Prettify rescuetime provider, interact with export module
This commit is contained in:
parent
ab5fc16df7
commit
00007dabe6
4 changed files with 71 additions and 137 deletions
71
my/rescuetime.py
Normal file
71
my/rescuetime.py
Normal file
|
@ -0,0 +1,71 @@
|
|||
import logging
|
||||
from pathlib import Path
|
||||
from datetime import datetime, timedelta
|
||||
from typing import NamedTuple, Dict, List, Set, Optional
|
||||
from functools import lru_cache
|
||||
|
||||
from .common import get_files
|
||||
|
||||
# TODO get rid of it
|
||||
from kython import group_by_cmp # type: ignore
|
||||
|
||||
from my_configuration import paths
|
||||
|
||||
|
||||
def get_logger():
|
||||
return logging.getLogger("my.rescuetime")
|
||||
|
||||
|
||||
def _get_exports() -> List[Path]:
|
||||
from my_configuration import paths
|
||||
return get_files(paths.rescuetime.export_path, '*.json')
|
||||
|
||||
|
||||
import my_configuration.repos.rescuexport.model as rescuexport
|
||||
Model = rescuexport.Model
|
||||
|
||||
|
||||
# TODO cache?
|
||||
def get_model(last=0) -> Model:
|
||||
return Model(_get_exports()[-last:])
|
||||
|
||||
|
||||
def get_groups(gap=timedelta(hours=3)):
|
||||
model = get_model()
|
||||
it = model.iter_entries()
|
||||
lit = list(it) # TODO get rid of it...
|
||||
return group_by_cmp(lit, lambda a, b: (b.dt - a.dt) <= gap, dist=1)
|
||||
|
||||
|
||||
def print_groups():
|
||||
for gr in get_groups():
|
||||
print(f"{gr[0].dt}--{gr[-1].dt}")
|
||||
# TODO merged db?
|
||||
# TODO ok, it summarises my sleep intervals pretty well. I guess should adjust it for the fact I don't sleep during the day, and it would be ok!
|
||||
|
||||
|
||||
def check_backed_up():
|
||||
model = get_model(last=1)
|
||||
last = list(model.iter_entries())[-1]
|
||||
latest_dt = last.dt
|
||||
|
||||
assert (datetime.now() - latest_dt) < timedelta(days=1)
|
||||
# TODO move this to backup checker??
|
||||
|
||||
|
||||
def fill_influxdb():
|
||||
from influxdb import InfluxDBClient # type: ignore
|
||||
client = InfluxDBClient()
|
||||
# client.delete_series(database='lastfm', measurement='phone')
|
||||
db = 'test'
|
||||
client.drop_database(db)
|
||||
client.create_database(db)
|
||||
model = get_model()
|
||||
jsons = [{
|
||||
"measurement": 'phone',
|
||||
"tags": {},
|
||||
"time": str(e.dt),
|
||||
"fields": {"name": e.activity},
|
||||
} for e in model.iter_entries()]
|
||||
client.write_points(jsons, database=db) # TODO??
|
||||
|
|
@ -1,103 +0,0 @@
|
|||
import logging
|
||||
from pathlib import Path
|
||||
import json
|
||||
from datetime import datetime, timedelta
|
||||
from typing import NamedTuple, Dict, List, Set, Optional
|
||||
from functools import lru_cache
|
||||
|
||||
from ..common import get_files
|
||||
|
||||
|
||||
from kython import JSONType, fget, group_by_cmp
|
||||
|
||||
|
||||
def get_logger():
|
||||
return logging.getLogger("rescuetime-provider")
|
||||
|
||||
_PATH = Path("/L/backups/rescuetime")
|
||||
|
||||
def try_load(fp: Path):
|
||||
logger = get_logger()
|
||||
try:
|
||||
return json.loads(fp.read_text())
|
||||
except Exception as e:
|
||||
if 'Expecting value' in str(e):
|
||||
logger.warning(f"Corrupted: {fp}")
|
||||
else:
|
||||
raise e
|
||||
return None
|
||||
|
||||
|
||||
_DT_FMT = "%Y-%m-%dT%H:%M:%S"
|
||||
|
||||
class Entry(NamedTuple):
|
||||
# TODO ugh, appears to be local time...
|
||||
dt: datetime
|
||||
duration_s: int
|
||||
activity: str
|
||||
|
||||
@staticmethod
|
||||
def from_row(row: List):
|
||||
COL_DT = 0
|
||||
COL_DUR = 1
|
||||
COL_ACTIVITY = 3
|
||||
dt_s = row[COL_DT]
|
||||
dur = row[COL_DUR]
|
||||
activity = row[COL_ACTIVITY]
|
||||
# TODO utc??
|
||||
dt = datetime.strptime(dt_s, _DT_FMT)
|
||||
return Entry(dt=dt, duration_s=dur, activity=activity)
|
||||
|
||||
|
||||
# TODO hmm ok, these are going to need to be properly merged?
|
||||
@lru_cache(1)
|
||||
def get_rescuetime(latest: Optional[int]=None):
|
||||
if latest is None:
|
||||
latest = 0
|
||||
|
||||
entries: Set[Entry] = set()
|
||||
|
||||
# pylint: disable=invalid-unary-operand-type
|
||||
for fp in list(sorted(_PATH.glob('*.json')))[-latest:]:
|
||||
j = try_load(fp)
|
||||
if j is None:
|
||||
continue
|
||||
|
||||
cols = j['row_headers']
|
||||
seen = 0
|
||||
total = 0
|
||||
for row in j['rows']:
|
||||
e = Entry.from_row(row)
|
||||
total += 1
|
||||
if e in entries:
|
||||
seen += 1
|
||||
else:
|
||||
entries.add(e)
|
||||
print(f"{fp}: {seen}/{total}")
|
||||
# import ipdb; ipdb.set_trace()
|
||||
# print(len(j))
|
||||
res = sorted(entries, key=fget(Entry.dt))
|
||||
return res
|
||||
|
||||
|
||||
def get_groups(gap=timedelta(hours=3)):
|
||||
data = get_rescuetime()
|
||||
return group_by_cmp(data, lambda a, b: (b.dt - a.dt) <= gap, dist=1)
|
||||
|
||||
|
||||
|
||||
def fill_influxdb():
|
||||
from influxdb import InfluxDBClient # type: ignore
|
||||
client = InfluxDBClient()
|
||||
# client.delete_series(database='lastfm', measurement='phone')
|
||||
db = 'test'
|
||||
client.drop_database(db)
|
||||
client.create_database(db)
|
||||
jsons = [{
|
||||
"measurement": 'phone',
|
||||
"tags": {},
|
||||
"time": str(e.dt),
|
||||
"fields": {"name": e.activity},
|
||||
} for e in get_rescuetime()]
|
||||
client.write_points(jsons, database=db) # TODO??
|
||||
|
|
@ -1,19 +0,0 @@
|
|||
from kython.klogging import setup_logzero
|
||||
|
||||
from . import get_logger, get_groups, get_rescuetime, fill_influxdb
|
||||
|
||||
def main():
|
||||
logger = get_logger()
|
||||
setup_logzero(logger)
|
||||
|
||||
# for gr in get_groups():
|
||||
# print(f"{gr[0].dt}--{gr[-1].dt}")
|
||||
# for e in get_rescuetime(latest=2):
|
||||
# print(e)
|
||||
fill_influxdb()
|
||||
|
||||
# TODO merged db?
|
||||
# TODO ok, it summarises my sleep intervals pretty well. I guess should adjust it for the fact I don't sleep during the day, and it would be ok!
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
|
@ -1,15 +0,0 @@
|
|||
#!/usr/bin/env python3
|
||||
from datetime import datetime, timedelta
|
||||
|
||||
from rescuetime import get_rescuetime
|
||||
|
||||
|
||||
def main():
|
||||
rs = get_rescuetime(latest=1)
|
||||
latest_dt = rs[-1].dt
|
||||
|
||||
assert (datetime.now() - latest_dt) < timedelta(days=1)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
Loading…
Add table
Reference in a new issue