rescuetime: get rid of kython, use cachew

This commit is contained in:
Dima Gerasimov 2020-07-31 16:46:07 +01:00 committed by karlicoss
parent 6515d1430f
commit 975f9dd110

View file

@ -4,13 +4,13 @@ Rescuetime (activity tracking) data
from pathlib import Path
from datetime import datetime, timedelta
from typing import NamedTuple, Dict, List, Set, Optional
from typing import Sequence, Iterable
from .common import get_files, LazyLogger
from .error import Res, split_errors
from .core import get_files, LazyLogger
from .core.common import mcachew
from .core.error import Res, split_errors
# TODO get rid of it
from kython import group_by_cmp # type: ignore
import more_itertools
from my.config import rescuetime as config
@ -18,51 +18,49 @@ from my.config import rescuetime as config
log = LazyLogger(__package__, level='info')
def inputs():
return get_files(config.export_path, '*.json')
def inputs() -> Sequence[Path]:
return get_files(config.export_path)
import my.config.repos.rescuexport.model as rescuexport
Model = rescuexport.Model
import my.config.repos.rescuexport.dal as dal
DAL = dal.DAL
Entry = dal.Entry
# TODO cache?
def get_model(last=0) -> Model:
return Model(inputs()[-last:])
def _without_errors():
model = get_model()
it = model.iter_entries()
# todo needs to be cumulative cache
@mcachew
def entries(files=inputs()) -> Iterable[Entry]:
dal = DAL(files)
it = dal.iter_entries()
vit, eit = split_errors(it, ET=Exception)
# TODO FIXME handle eit somehow?
# todo handle errors, I guess initially I didn't because it's unclear how to easily group?
yield from vit
def groups(gap=timedelta(hours=3)):
vit = entries()
from more_itertools import split_when
yield from split_when(vit, lambda a, b: (b.dt - a.dt) > gap)
def get_groups(gap=timedelta(hours=3)):
vit = _without_errors()
lit = list(vit) # TODO get rid of it...
return group_by_cmp(lit, lambda a, b: (b.dt - a.dt) <= gap, dist=1)
def stats():
from .core import stat
return {
**stat(groups),
**stat(entries),
}
# todo not sure if I want to keep these here? vvv
def print_groups():
for gr in get_groups():
for gr in groups():
print(f"{gr[0].dt}--{gr[-1].dt}")
# TODO merged db?
# TODO ok, it summarises my sleep intervals pretty well. I guess should adjust it for the fact I don't sleep during the day, and it would be ok!
def check_backed_up(hours=24):
vit = _without_errors()
# TODO use some itertools stuff to get a window only?
last = list(vit)[-1]
latest_dt = last.dt
assert (datetime.now() - latest_dt) < timedelta(hours=hours)
# TODO move this to backup checker??
def fill_influxdb():
from influxdb import InfluxDBClient # type: ignore
client = InfluxDBClient()
@ -70,7 +68,7 @@ def fill_influxdb():
db = 'test'
client.drop_database(db)
client.create_database(db)
vit = _without_errors()
vit = entries()
jsons = [{
"measurement": 'phone',
"tags": {},