use my.config instead of mycfg; minor cleanups and docstrings

This commit is contained in:
Dima Gerasimov 2020-04-12 00:18:48 +01:00 committed by karlicoss
parent f31ff71e6f
commit e5b3a1e91e
37 changed files with 142 additions and 110 deletions

View file

@ -14,14 +14,14 @@ from typing import Any, Dict, Iterable, NamedTuple, Set
from ..common import mcachew, LazyLogger, get_files from ..common import mcachew, LazyLogger, get_files
import mycfg from my.config import bluemaestro as config
logger = LazyLogger('bluemaestro', level='debug') logger = LazyLogger('bluemaestro', level='debug')
def _get_exports(): def _get_exports():
return get_files(mycfg.bluemaestro.export_path, glob='*.db') return get_files(config.export_path, glob='*.db')
class Measurement(NamedTuple): class Measurement(NamedTuple):
@ -29,7 +29,7 @@ class Measurement(NamedTuple):
temp: float temp: float
@mcachew(cache_path=mycfg.bluemaestro.cache_path) @mcachew(cache_path=config.cache_path)
def _iter_measurements(dbs) -> Iterable[Measurement]: def _iter_measurements(dbs) -> Iterable[Measurement]:
# I guess we can affort keeping them in sorted order # I guess we can affort keeping them in sorted order
points: Set[Measurement] = set() points: Set[Measurement] = set()

View file

@ -1,4 +1,3 @@
#!/usr/bin/env python3
""" """
Blood tracking Blood tracking
""" """
@ -14,7 +13,7 @@ from ..error import Res, echain
from kython.org import parse_org_date from kython.org import parse_org_date
from mycfg import paths from my.config import blood as config
import pandas as pd # type: ignore import pandas as pd # type: ignore
@ -52,7 +51,7 @@ def try_float(s: str) -> Optional[float]:
def iter_gluc_keto_data() -> Iterable[Result]: def iter_gluc_keto_data() -> Iterable[Result]:
o = porg.Org.from_file(str(paths.blood.blood_log)) o = porg.Org.from_file(str(config.blood_log))
tbl = o.xpath('//table') tbl = o.xpath('//table')
for l in tbl.lines: for l in tbl.lines:
kets = l['ket'].strip() kets = l['ket'].strip()
@ -71,7 +70,7 @@ def iter_gluc_keto_data() -> Iterable[Result]:
def iter_tests_data() -> Iterable[Result]: def iter_tests_data() -> Iterable[Result]:
o = porg.Org.from_file(str(paths.blood.blood_tests_log)) o = porg.Org.from_file(str(config.blood_tests_log))
tbl = o.xpath('//table') tbl = o.xpath('//table')
for d in tbl.lines: for d in tbl.lines:
try: try:

View file

@ -9,7 +9,7 @@ from ..common import LazyLogger
from ..error import Res from ..error import Res
from ..notes import orgmode from ..notes import orgmode
from mycfg import weight as config from my.config import weight as config
log = LazyLogger('my.body.weight') log = LazyLogger('my.body.weight')

View file

@ -4,12 +4,12 @@ Kobo e-ink reader: annotations and reading stats
from typing import Callable, Union, List from typing import Callable, Union, List
from mycfg import paths from my.config import kobo as config
from mycfg.repos.kobuddy.src.kobuddy import * from my.config.repos.kobuddy.src.kobuddy import *
# hmm, explicit imports make pylint a bit happier.. # hmm, explicit imports make pylint a bit happier..
from mycfg.repos.kobuddy.src.kobuddy import Highlight, set_databases, get_highlights from my.config.repos.kobuddy.src.kobuddy import Highlight, set_databases, get_highlights
set_databases(paths.kobuddy.export_dir) set_databases(config.export_dir)
# TODO maybe type over T? # TODO maybe type over T?
_Predicate = Callable[[str], bool] _Predicate = Callable[[str], bool]

View file

@ -8,7 +8,7 @@ import re
from typing import Tuple, Iterator, List, Union from typing import Tuple, Iterator, List, Union
from mycfg.holidays_data import HOLIDAYS_DATA from my.config.holidays_data import HOLIDAYS_DATA
# pip3 install workalendar # pip3 install workalendar

View file

@ -1,4 +1,8 @@
#!/usr/bin/env python3 #!/usr/bin/env python3
from .. import init
from my.config import codeforces as config
from datetime import datetime from datetime import datetime
from typing import NamedTuple from typing import NamedTuple
from pathlib import Path from pathlib import Path
@ -31,8 +35,7 @@ Cmap = Dict[Cid, Contest]
def get_contests() -> Cmap: def get_contests() -> Cmap:
from mycfg import paths last = max(get_files(config.export_path, 'allcontests*.json'))
last = max(get_files(paths.codeforces.export_path, 'allcontests*.json'))
j = json.loads(last.read_text()) j = json.loads(last.read_text())
d = {} d = {}
for c in j['result']: for c in j['result']:
@ -77,8 +80,7 @@ class Competition(NamedTuple):
def iter_data() -> Iterator[Res[Competition]]: def iter_data() -> Iterator[Res[Competition]]:
cmap = get_contests() cmap = get_contests()
from mycfg import paths last = max(get_files(config.export_path, 'codeforces*.json'))
last = max(get_files(paths.codeforces.export_path, 'codeforces*.json'))
with wrap(json.loads(last.read_text())) as j: with wrap(json.loads(last.read_text())) as j:
j['status'].ignore() j['status'].ignore()

View file

@ -7,7 +7,7 @@ from datetime import datetime, timezone
from typing import List, NamedTuple, Optional, Dict, Any, Iterator, Set from typing import List, NamedTuple, Optional, Dict, Any, Iterator, Set
from ..common import PathIsh, LazyLogger, mcachew from ..common import PathIsh, LazyLogger, mcachew
from mycfg import commits as config from my.config import commits as config
# pip3 install gitpython # pip3 install gitpython
import git # type: ignore import git # type: ignore

View file

@ -2,6 +2,8 @@
Github events and their metadata: comments/issues/pull requests Github events and their metadata: comments/issues/pull requests
""" """
from .. import init
from typing import Dict, List, Union, Any, NamedTuple, Tuple, Optional, Iterator, TypeVar, Set from typing import Dict, List, Union, Any, NamedTuple, Tuple, Optional, Iterator, TypeVar, Set
from datetime import datetime from datetime import datetime
import json import json
@ -14,8 +16,8 @@ from ..kython.kompress import CPath
from ..common import get_files, mcachew from ..common import get_files, mcachew
from ..error import Res from ..error import Res
from mycfg import paths from my.config import github as config
import mycfg.repos.ghexport.dal as ghexport import my.config.repos.ghexport.dal as ghexport
logger = LazyLogger('my.github') logger = LazyLogger('my.github')
@ -79,7 +81,7 @@ def _get_summary(e) -> Tuple[str, Optional[str], Optional[str]]:
def get_dal(): def get_dal():
sources = get_files(paths.github.export_dir, glob='*.json*') sources = get_files(config.export_dir, glob='*.json*')
sources = list(map(CPath, sources)) # TODO maybe move it to get_files? e.g. compressed=True arg? sources = list(map(CPath, sources)) # TODO maybe move it to get_files? e.g. compressed=True arg?
return ghexport.DAL(sources) return ghexport.DAL(sources)
@ -178,7 +180,7 @@ def iter_gdpr_events() -> Iterator[Res[Event]]:
""" """
Parses events from GDPR export (https://github.com/settings/admin) Parses events from GDPR export (https://github.com/settings/admin)
""" """
files = list(sorted(paths.github.gdpr_dir.glob('*.json'))) files = list(sorted(config.gdpr_dir.glob('*.json')))
handler_map = { handler_map = {
'schema' : None, 'schema' : None,
'issue_events_': None, # eh, doesn't seem to have any useful bodies 'issue_events_': None, # eh, doesn't seem to have any useful bodies
@ -215,7 +217,7 @@ def iter_gdpr_events() -> Iterator[Res[Event]]:
# TODO hmm. not good, need to be lazier?... # TODO hmm. not good, need to be lazier?...
@mcachew(paths.github.cache_dir, hashf=lambda dal: dal.sources) @mcachew(config.cache_dir, hashf=lambda dal: dal.sources)
def iter_backup_events(dal=get_dal()) -> Iterator[Event]: def iter_backup_events(dal=get_dal()) -> Iterator[Event]:
for d in dal.events(): for d in dal.events():
yield _parse_event(d) yield _parse_event(d)

View file

@ -1,4 +1,8 @@
#!/usr/bin/env python3 #!/usr/bin/env python3
from .. import init
from my.config import topcoder as config
from datetime import datetime from datetime import datetime
from typing import NamedTuple from typing import NamedTuple
from pathlib import Path from pathlib import Path
@ -15,8 +19,7 @@ from kython.konsume import zoom, wrap, ignore
# TODO json type?? # TODO json type??
def _get_latest() -> Dict: def _get_latest() -> Dict:
from mycfg import paths pp = max(get_files(config.export_path, glob='*.json'))
pp = max(get_files(paths.topcoder.export_path, glob='*.json'))
return json.loads(pp.read_text()) return json.loads(pp.read_text())

View file

@ -16,7 +16,7 @@ import pytz
from ..common import get_files, LazyLogger, cproperty, group_by_key, mcachew from ..common import get_files, LazyLogger, cproperty, group_by_key, mcachew
import mycfg from my.config import emfit as config
logger = LazyLogger('my.emfit', level='info') logger = LazyLogger('my.emfit', level='info')
@ -36,7 +36,7 @@ AWAKE = 4
Sid = str Sid = str
# TODO use tz provider for that? # TODO use tz provider for that?
_TZ = pytz.timezone(mycfg.emfit.tz) _TZ = pytz.timezone(config.tz)
# TODO use common tz thing? # TODO use common tz thing?
def fromts(ts) -> datetime: def fromts(ts) -> datetime:
@ -299,19 +299,19 @@ def dir_hash(path: Path):
return mtimes return mtimes
@mcachew(cache_path=mycfg.emfit.cache_path, hashf=dir_hash, logger=logger) @mcachew(cache_path=config.cache_path, hashf=dir_hash, logger=logger)
def iter_datas_cached(path: Path) -> Iterator[Emfit]: def iter_datas_cached(path: Path) -> Iterator[Emfit]:
# TODO use get_files? # TODO use get_files?
for f in sorted(path.glob('*.json')): for f in sorted(path.glob('*.json')):
sid = f.stem sid = f.stem
if sid in mycfg.emfit.excluded_sids: if sid in config.excluded_sids:
continue continue
em = EmfitOld(sid=sid, jj=json.loads(f.read_text())) em = EmfitOld(sid=sid, jj=json.loads(f.read_text()))
yield from Emfit.make(em) yield from Emfit.make(em)
def iter_datas(path=mycfg.emfit.export_path) -> Iterator[Emfit]: def iter_datas(path=config.export_path) -> Iterator[Emfit]:
yield from iter_datas_cached(path) yield from iter_datas_cached(path)

View file

@ -8,12 +8,12 @@ from typing import Iterator
from .common import PathIsh from .common import PathIsh
import mycfg.repos.fbmessengerexport.dal as messenger import my.config.repos.fbmessengerexport.dal as messenger
from mycfg import paths from my.config import fbmessenger as config
def _dal() -> messenger.DAL: def _dal() -> messenger.DAL:
return messenger.DAL(paths.fbmessenger.export_db) return messenger.DAL(config.export_db)
# TODO Result type? # TODO Result type?

View file

@ -5,7 +5,7 @@ Module for Feedbin RSS reader
from .common import listify from .common import listify
from ._rss import Subscription from ._rss import Subscription
from mycfg import paths from my.config import feedbin as config
import json import json
from pathlib import Path from pathlib import Path
@ -27,7 +27,8 @@ def parse_file(f: Path):
def get_states() -> Dict[datetime, List[Subscription]]: def get_states() -> Dict[datetime, List[Subscription]]:
res = {} res = {}
for f in sorted(Path(paths.feedbin.export_dir).glob('*.json')): # TODO use get_files
for f in sorted(Path(config.export_dir).glob('*.json')):
dts = f.stem.split('_')[-1] dts = f.stem.split('_')[-1]
dt = isoparse(dts) dt = isoparse(dts)
subs = parse_file(f) subs = parse_file(f)

View file

@ -5,7 +5,7 @@ Module for Fedly RSS reader
from .common import listify from .common import listify
from ._rss import Subscription from ._rss import Subscription
from mycfg import paths from my.config import feedly as config
import json import json
from pathlib import Path from pathlib import Path
@ -30,7 +30,8 @@ def parse_file(f: Path):
def get_states() -> Dict[datetime, List[Subscription]]: def get_states() -> Dict[datetime, List[Subscription]]:
res = {} res = {}
for f in sorted(Path(paths.feedly.export_dir).glob('*.json')): # TODO use get_files
for f in sorted(Path(config.export_dir).glob('*.json')):
dts = f.stem.split('_')[-1] dts = f.stem.split('_')[-1]
dt = datetime.strptime(dts, '%Y%m%d%H%M%S') dt = datetime.strptime(dts, '%Y%m%d%H%M%S')
dt = pytz.utc.localize(dt) dt = pytz.utc.localize(dt)

View file

@ -1,4 +1,7 @@
#!/usr/bin/env python3 '''
Foursquare/Swarm checkins
'''
from datetime import datetime, timezone, timedelta from datetime import datetime, timezone, timedelta
from itertools import chain from itertools import chain
from pathlib import Path from pathlib import Path
@ -9,14 +12,14 @@ from pathlib import Path
# TODO pytz for timezone??? # TODO pytz for timezone???
from .common import get_files, LazyLogger from .common import get_files, LazyLogger
from my.config import foursquare as config
logger = LazyLogger('my.foursquare') logger = LazyLogger(__package__)
def _get_exports() -> List[Path]: def _get_exports() -> List[Path]:
from mycfg import paths return get_files(config.export_path, '*.json')
return get_files(paths.foursquare.export_path, '*.json')
class Checkin: class Checkin:

View file

@ -7,12 +7,12 @@ from typing import NamedTuple, Optional, List, Iterator
from .common import group_by_key, PathIsh, get_files from .common import group_by_key, PathIsh, get_files
from mycfg import paths from my.config import instapaper as config
import mycfg.repos.instapexport.dal as dal import my.config.repos.instapexport.dal as dal
def _get_files(): def _get_files():
return get_files(paths.instapaper.export_path, glob='*.json') return get_files(config.export_path, glob='*.json')
def get_dal() -> dal.DAL: def get_dal() -> dal.DAL:

View file

@ -7,10 +7,10 @@ from pathlib import Path
import logging import logging
import pytz import pytz
from mycfg import paths from my.config import jawbone as config
BDIR = paths.jawbone.export_dir BDIR = config.export_dir
PHASES_FILE = BDIR / 'phases.json' PHASES_FILE = BDIR / 'phases.json'
SLEEPS_FILE = BDIR / 'sleeps.json' SLEEPS_FILE = BDIR / 'sleeps.json'
GRAPHS_DIR = BDIR / 'graphs' GRAPHS_DIR = BDIR / 'graphs'

View file

@ -87,9 +87,9 @@ def iter_useful(data_file: str):
# TODO <<< hmm. these files do contain deep and light sleep?? # TODO <<< hmm. these files do contain deep and light sleep??
# also steps stats?? # also steps stats??
from mycfg import paths from my.config import jawbone as config
p = paths.jawbone.export_dir / 'old_csv' p = config.export_dir / 'old_csv'
# TODO with_my? # TODO with_my?
files = [ files = [
p / "2015.csv", p / "2015.csv",

View file

@ -1,4 +1,9 @@
#!/usr/bin/env python3 '''
Last.fm scrobbles
'''
from .. import init
from functools import lru_cache from functools import lru_cache
from typing import NamedTuple, Dict, Any from typing import NamedTuple, Dict, Any
from datetime import datetime from datetime import datetime
@ -7,7 +12,7 @@ import json
import pytz import pytz
from mycfg import paths from my.config import lastfm as config
# TODO Json type? # TODO Json type?
# TODO memoised properties? # TODO memoised properties?
@ -43,7 +48,8 @@ class Scrobble(NamedTuple):
# TODO memoise...? # TODO memoise...?
# TODO watch out, if we keep the app running it might expire # TODO watch out, if we keep the app running it might expire
def _iter_scrobbles(): def _iter_scrobbles():
last = max(Path(paths.lastfm.export_path).glob('*.json')) # TODO use get_files
last = max(Path(config.export_path).glob('*.json'))
# TODO mm, no timezone? hopefuly it's UTC # TODO mm, no timezone? hopefuly it's UTC
j = json.loads(last.read_text()) j = json.loads(last.read_text())

View file

@ -31,8 +31,8 @@ logger = LazyLogger(__package__)
def cache_path(*args, **kwargs): def cache_path(*args, **kwargs):
from mycfg import paths from my.config import location as config
return paths.location.cache_path return config.cache_path
Tag = Optional[str] Tag = Optional[str]
@ -51,9 +51,9 @@ def _iter_locations_fo(fo, start, stop) -> Iterator[Location]:
errors = 0 errors = 0
try: try:
from mycfg.locations import LOCATIONS as known_locations from my.config.locations import LOCATIONS as known_locations
except ModuleNotFoundError as e: except ModuleNotFoundError as e:
name = 'mycfg.locations' name = 'my.config.locations'
if e.name != name: if e.name != name:
raise e raise e
logger.warning("'%s' isn't found. setting known_locations to empty list", name) logger.warning("'%s' isn't found. setting known_locations to empty list", name)
@ -118,7 +118,6 @@ def _iter_locations(path: Path, start=0, stop=None) -> Iterator[Location]:
def iter_locations(**kwargs) -> Iterator[Location]: def iter_locations(**kwargs) -> Iterator[Location]:
from mycfg import paths
# TODO need to include older data # TODO need to include older data
last_takeout = get_last_takeout(path=_LOCATION_JSON) last_takeout = get_last_takeout(path=_LOCATION_JSON)

View file

@ -1,6 +1,7 @@
""" """
Module for [[https://play.google.com/store/apps/details?id=io.github.hidroh.materialistic][Materialistic]] app for Hackernews [[https://play.google.com/store/apps/details?id=io.github.hidroh.materialistic][Materialistic]] app for Hackernews
""" """
from . import init
from datetime import datetime from datetime import datetime
from typing import Any, Dict, Iterator, NamedTuple from typing import Any, Dict, Iterator, NamedTuple
@ -9,7 +10,7 @@ import pytz
import dataset # type: ignore import dataset # type: ignore
from .common import get_files from .common import get_files
from mycfg import paths from my.config import materialistic as config
Row = Dict[str, Any] Row = Dict[str, Any]
@ -41,7 +42,7 @@ class Saved(NamedTuple):
def _last_export(): def _last_export():
return max(get_files(paths.materialistic.export_path, glob='**/*.db')) return max(get_files(config.export_path, glob='**/*.db'))
def raw() -> Iterator[Row]: def raw() -> Iterator[Row]:

View file

@ -1,4 +1,7 @@
#!/usr/bin/env python3 #!/usr/bin/env python3
from .. import init
import csv import csv
import json import json
from datetime import datetime from datetime import datetime
@ -6,10 +9,10 @@ from typing import Iterator, List, NamedTuple
from ..common import get_files from ..common import get_files
from mycfg import paths from my.config import imdb as config
def _get_last(): def _get_last():
return max(get_files(paths.imdb.export_path, glob='*.csv')) return max(get_files(config.export_path, glob='*.csv'))
class Movie(NamedTuple): class Movie(NamedTuple):

View file

@ -1,4 +1,4 @@
# TODO maybe, reuse mycfg_template here? # TODO maybe, reuse mycfg_template here?
class paths: class stub:
pass key = 'value'

View file

@ -8,7 +8,7 @@ from pathlib import Path
from ..common import PathIsh from ..common import PathIsh
from mycfg import orgmode as config from my.config import orgmode as config
from porg import Org from porg import Org

View file

@ -1,4 +1,10 @@
#!/usr/bin/env python3 #!/usr/bin/env python3
'''
PDF annotations on your filesystem
'''
from . import init
from concurrent.futures import ProcessPoolExecutor from concurrent.futures import ProcessPoolExecutor
from datetime import datetime from datetime import datetime
import re import re
@ -14,8 +20,8 @@ from .common import mcachew, group_by_key
from .error import Res, split_errors from .error import Res, split_errors
# path to pdfannots (https://github.com/0xabu/pdfannots) # path to pdfannots (https://github.com/0xabu/pdfannots)
import mycfg.repos.pdfannots.pdfannots as pdfannots import my.config.repos.pdfannots.pdfannots as pdfannots
from mycfg import paths from my.config import pdfs as config
def get_logger(): def get_logger():
@ -24,12 +30,12 @@ def get_logger():
def is_ignored(p: Path) -> bool: def is_ignored(p: Path) -> bool:
# ignore some extremely heavy files # ignore some extremely heavy files
return paths.pdfs.is_ignored(p) return config.is_ignored(p)
def candidates(roots=None) -> Iterator[Path]: def candidates(roots=None) -> Iterator[Path]:
if roots is None: if roots is None:
roots = paths.pdfs.roots roots = config.roots
for r in roots: for r in roots:
for p in Path(r).rglob('*.pdf'): for p in Path(r).rglob('*.pdf'):

View file

@ -14,7 +14,7 @@ from geopy.geocoders import Nominatim # type: ignore
from ..common import LazyLogger, mcachew, fastermime from ..common import LazyLogger, mcachew, fastermime
from ..error import Res from ..error import Res
from mycfg import photos as config from my.config import photos as config
log = LazyLogger('my.photos') log = LazyLogger('my.photos')

View file

@ -1,11 +1,12 @@
""" """
Module for pinboard.in bookmarks pinboard.in bookmarks
""" """
from . import init
from .common import get_files from .common import get_files
from mycfg.repos.pinbexport import dal as pinbexport from my.config.repos.pinbexport import dal as pinbexport
from mycfg import paths from my.config import pinboard as config
# TODO would be nice to make interfaces available for mypy... # TODO would be nice to make interfaces available for mypy...
Bookmark = pinbexport.Bookmark Bookmark = pinbexport.Bookmark
@ -13,7 +14,7 @@ Bookmark = pinbexport.Bookmark
# yep; clearly looks that the purpose of my. package is to wire files to DAL implicitly; otherwise it's just passtrhough. # yep; clearly looks that the purpose of my. package is to wire files to DAL implicitly; otherwise it's just passtrhough.
def dal(): def dal():
sources = get_files(paths.pinbexport.export_dir, glob='*.json') sources = get_files(config.export_dir, glob='*.json')
model = pinbexport.DAL(sources) model = pinbexport.DAL(sources)
return model return model

View file

@ -4,7 +4,7 @@ from typing import NamedTuple, Sequence, Any
from .common import get_files from .common import get_files
from mycfg import pocket as config from my.config import pocket as config
def _files(): def _files():

View file

@ -4,12 +4,12 @@ from typing import NamedTuple
from datetime import datetime from datetime import datetime
import pytz import pytz
from mycfg.repos.goodrexport import dal as goodrexport from my.config.repos.goodrexport import dal as goodrexport
from mycfg import paths from my.config import goodreads as config
def get_model(): def get_model():
sources = list(sorted(paths.goodrexport.export_dir.glob('*.xml'))) sources = list(sorted(config.export_dir.glob('*.xml')))
model = goodrexport.DAL(sources) model = goodrexport.DAL(sources)
return model return model

View file

@ -1,7 +1,7 @@
#!/usr/bin/env python3
""" """
Module for Reddit data: saved items/comments/upvotes etc Module for Reddit data: saved items/comments/upvotes etc
""" """
from . import init
from pathlib import Path from pathlib import Path
from typing import List, Sequence, Mapping, Iterator from typing import List, Sequence, Mapping, Iterator
@ -9,19 +9,19 @@ from typing import List, Sequence, Mapping, Iterator
from .kython.kompress import CPath from .kython.kompress import CPath
from .common import mcachew, get_files, LazyLogger, make_dict from .common import mcachew, get_files, LazyLogger, make_dict
from mycfg import paths from my.config import reddit as config
import mycfg.repos.rexport.dal as rexport import my.config.repos.rexport.dal as rexport
def get_sources() -> Sequence[Path]: def get_sources() -> Sequence[Path]:
# TODO use zstd? # TODO use zstd?
# TODO maybe add assert to get_files? (and allow to suppress it) # TODO maybe add assert to get_files? (and allow to suppress it)
files = get_files(paths.rexport.export_dir, glob='*.json.xz') files = get_files(config.export_dir, glob='*.json.xz')
res = list(map(CPath, files)); assert len(res) > 0 res = list(map(CPath, files)); assert len(res) > 0
return tuple(res) return tuple(res)
logger = LazyLogger('my.reddit', level='debug') logger = LazyLogger(__package__, level='debug')
Sid = rexport.Sid Sid = rexport.Sid

View file

@ -1,3 +1,7 @@
'''
Rescuetime (activity tracking) data
'''
from pathlib import Path from pathlib import Path
from datetime import datetime, timedelta from datetime import datetime, timedelta
from typing import NamedTuple, Dict, List, Set, Optional from typing import NamedTuple, Dict, List, Set, Optional
@ -8,18 +12,17 @@ from .error import Res, split_errors
# TODO get rid of it # TODO get rid of it
from kython import group_by_cmp # type: ignore from kython import group_by_cmp # type: ignore
from mycfg import paths from my.config import rescuetime as config
log = LazyLogger('my.rescuetime', level='info') log = LazyLogger(__package__, level='info')
def _get_exports() -> List[Path]: def _get_exports() -> List[Path]:
from mycfg import paths return get_files(config.export_path, '*.json')
return get_files(paths.rescuetime.export_path, '*.json')
import mycfg.repos.rescuexport.model as rescuexport import my.config.repos.rescuexport.model as rescuexport
Model = rescuexport.Model Model = rescuexport.Model

View file

@ -11,7 +11,7 @@ from datetime import datetime
from .common import LazyLogger, get_files, group_by_key, cproperty, make_dict from .common import LazyLogger, get_files, group_by_key, cproperty, make_dict
from .kython.kompress import open as kopen from .kython.kompress import open as kopen
from mycfg import rtm as config from my.config import rtm as config
import icalendar # type: ignore import icalendar # type: ignore

View file

@ -2,6 +2,7 @@
Phone calls and SMS messages Phone calls and SMS messages
""" """
# TODO extract SMS as well? I barely use them though.. # TODO extract SMS as well? I barely use them though..
from . import init
from datetime import datetime from datetime import datetime
from pathlib import Path from pathlib import Path
@ -12,7 +13,7 @@ from lxml import etree # type: ignore
from .common import get_files from .common import get_files
from mycfg import paths from my.config import smscalls as config
class Call(NamedTuple): class Call(NamedTuple):
@ -40,7 +41,7 @@ def _extract_calls(path: Path) -> Iterator[Call]:
def calls() -> Iterator[Call]: def calls() -> Iterator[Call]:
files = get_files(paths.smscalls.export_path, glob='calls-*.xml') files = get_files(config.export_path, glob='calls-*.xml')
# TODO always replacing with the latter is good, we get better contact names?? # TODO always replacing with the latter is good, we get better contact names??
emitted: Set[datetime] = set() emitted: Set[datetime] = set()

View file

@ -1,7 +1,11 @@
import mycfg.repos.stexport.model as stexport '''
from mycfg import paths Stackexchange data
'''
import my.config.repos.stexport.model as stexport
from my.config import stackexchange as config
def get_data(): def get_data():
sources = [max(paths.stexport.export_dir.glob('*.json'))] sources = [max(config.export_dir.glob('*.json'))]
return stexport.Model(sources).site_model('stackoverflow') return stexport.Model(sources).site_model('stackoverflow')

View file

@ -3,7 +3,7 @@ from typing import Optional
from .common import get_files from .common import get_files
from mycfg import paths from my.config import google as config
from .kython.kompress import kopen from .kython.kompress import kopen
@ -12,7 +12,7 @@ def get_last_takeout(*, path: Optional[str]=None) -> Path:
Ok, sometimes google splits takeout into two zip archives Ok, sometimes google splits takeout into two zip archives
I guess I could detect it (they've got 001/002 etc suffixes), but fornow that works fine.. I guess I could detect it (they've got 001/002 etc suffixes), but fornow that works fine..
""" """
for takeout in reversed(get_files(paths.google.takeout_path, glob='*.zip')): for takeout in reversed(get_files(config.takeout_path, glob='*.zip')):
if path is None: if path is None:
return takeout return takeout
else: else:

View file

@ -4,12 +4,7 @@ Module for Twitter (uses official twitter archive export)
See https://help.twitter.com/en/managing-your-account/how-to-download-your-twitter-archive See https://help.twitter.com/en/managing-your-account/how-to-download-your-twitter-archive
""" """
# TODO remove these from . import init
"""
Expects path to be set
- via ~configure~ (before calling anything else)
- or in ~mycfg.twitter.export_path~
"""
from datetime import date, datetime from datetime import date, datetime
@ -24,9 +19,10 @@ from .common import PathIsh, get_files, LazyLogger
from .kython import kompress from .kython import kompress
logger = LazyLogger('my.twitter') logger = LazyLogger(__package__)
# TODO get rid of this?
_export_path: Optional[Path] = None _export_path: Optional[Path] = None
def configure(*, export_path: Optional[PathIsh]=None) -> None: def configure(*, export_path: Optional[PathIsh]=None) -> None:
if export_path is not None: if export_path is not None:
@ -37,9 +33,9 @@ def configure(*, export_path: Optional[PathIsh]=None) -> None:
def _get_export() -> Path: def _get_export() -> Path:
export_path = _export_path export_path = _export_path
if export_path is None: if export_path is None:
# fallback to mycfg # fallback
from mycfg import paths from my.config import twitter as config
export_path = paths.twitter.export_path export_path = config.export_path
return max(get_files(export_path, '*.zip')) return max(get_files(export_path, '*.zip'))

View file

@ -2,7 +2,7 @@ from datetime import datetime
import json import json
from typing import NamedTuple, Iterator, Dict, Union, Sequence, Optional from typing import NamedTuple, Iterator, Dict, Union, Sequence, Optional
from mycfg import paths from my.config import vk as config
class Favorite(NamedTuple): class Favorite(NamedTuple):
@ -53,7 +53,7 @@ def parse_fav(j: Dict) -> Favorite:
def _iter_favs() -> Iterator[Res]: def _iter_favs() -> Iterator[Res]:
jj = json.loads(paths.vk.favs_file.read_text()) jj = json.loads(config.favs_file.read_text())
for j in jj: for j in jj:
try: try:
yield parse_fav(j) yield parse_fav(j)

View file

@ -6,12 +6,13 @@ skip_missing_interpreters = True
# TODO ugh. unclear how to reuse setup.cfg deps in tox # TODO ugh. unclear how to reuse setup.cfg deps in tox
[testenv] [testenv]
passenv = CI CI_* CIRCLE* passenv = CI CI_* CIRCLE*
setenv = MY_CONFIG = mycfg_template # deliberately set to nonexistent pathe to check the fallback logic
setenv = MY_CONFIG = nonexistent
commands = commands =
pip install -e . pip install -e .
# TODO ?? # TODO ??
# python -m pytest {posargs} # python -m pytest {posargs}
python3 -c 'import my.init; import my.config; print(my.config.__path__)' python3 -c 'import my.init; from my.config import stub as config; print(config.key)'
python3 -c 'import my.init; import my.config; import my.config.repos' # shouldn't fail at least python3 -c 'import my.init; import my.config; import my.config.repos' # shouldn't fail at least
# TODO run demo.py? just make sure with_my is a bit cleverer? # TODO run demo.py? just make sure with_my is a bit cleverer?
# TODO e.g. under CI, rely on installing # TODO e.g. under CI, rely on installing
@ -22,7 +23,7 @@ commands =
changedir = {toxworkdir}/{envname}/../.. changedir = {toxworkdir}/{envname}/../..
# TODO not sure?? also reuse installed my. instead on installing in demo.py? # TODO not sure?? also reuse installed my. instead on installing in demo.py?
skip_install = true skip_install = true
# TODO need to keep full mycfg.py as example? to dummy run CI # TODO need to keep full config as example? to dummy run CI
commands = ./demo.py commands = ./demo.py