From e5b3a1e91e7b0e26635d68634e834a8f9b8cdb21 Mon Sep 17 00:00:00 2001 From: Dima Gerasimov Date: Sun, 12 Apr 2020 00:18:48 +0100 Subject: [PATCH] use my.config instead of mycfg; minor cleanups and docstrings --- my/bluemaestro/__init__.py | 6 +++--- my/body/blood.py | 7 +++---- my/body/weight.py | 2 +- my/books/kobo.py | 8 ++++---- my/calendar/holidays.py | 2 +- my/coding/codeforces.py | 10 ++++++---- my/coding/commits.py | 2 +- my/coding/github.py | 12 +++++++----- my/coding/topcoder.py | 7 +++++-- my/emfit/__init__.py | 10 +++++----- my/fbmessenger.py | 6 +++--- my/feedbin.py | 5 +++-- my/feedly.py | 5 +++-- my/foursquare.py | 11 +++++++---- my/instapaper.py | 6 +++--- my/jawbone/__init__.py | 4 ++-- my/jawbone/plots.py | 4 ++-- my/lastfm/__init__.py | 12 +++++++++--- my/location/takeout.py | 9 ++++----- my/materialistic.py | 7 ++++--- my/media/imdb.py | 7 +++++-- my/mycfg_stub/__init__.py | 4 ++-- my/notes/orgmode.py | 2 +- my/pdfs.py | 14 ++++++++++---- my/photos/__init__.py | 2 +- my/pinboard.py | 9 +++++---- my/pocket.py | 2 +- my/reading/goodreads.py | 6 +++--- my/reddit.py | 10 +++++----- my/rescuetime.py | 13 ++++++++----- my/rtm.py | 2 +- my/smscalls.py | 5 +++-- my/stackexchange.py | 10 +++++++--- my/takeout.py | 4 ++-- my/twitter.py | 16 ++++++---------- my/vk.py | 4 ++-- tox.ini | 7 ++++--- 37 files changed, 142 insertions(+), 110 deletions(-) diff --git a/my/bluemaestro/__init__.py b/my/bluemaestro/__init__.py index f5c31e9..f4ac714 100755 --- a/my/bluemaestro/__init__.py +++ b/my/bluemaestro/__init__.py @@ -14,14 +14,14 @@ from typing import Any, Dict, Iterable, NamedTuple, Set from ..common import mcachew, LazyLogger, get_files -import mycfg +from my.config import bluemaestro as config logger = LazyLogger('bluemaestro', level='debug') def _get_exports(): - return get_files(mycfg.bluemaestro.export_path, glob='*.db') + return get_files(config.export_path, glob='*.db') class Measurement(NamedTuple): @@ -29,7 +29,7 @@ class Measurement(NamedTuple): temp: float -@mcachew(cache_path=mycfg.bluemaestro.cache_path) +@mcachew(cache_path=config.cache_path) def _iter_measurements(dbs) -> Iterable[Measurement]: # I guess we can affort keeping them in sorted order points: Set[Measurement] = set() diff --git a/my/body/blood.py b/my/body/blood.py index c9787cb..9a614d3 100755 --- a/my/body/blood.py +++ b/my/body/blood.py @@ -1,4 +1,3 @@ -#!/usr/bin/env python3 """ Blood tracking """ @@ -14,7 +13,7 @@ from ..error import Res, echain from kython.org import parse_org_date -from mycfg import paths +from my.config import blood as config import pandas as pd # type: ignore @@ -52,7 +51,7 @@ def try_float(s: str) -> Optional[float]: def iter_gluc_keto_data() -> Iterable[Result]: - o = porg.Org.from_file(str(paths.blood.blood_log)) + o = porg.Org.from_file(str(config.blood_log)) tbl = o.xpath('//table') for l in tbl.lines: kets = l['ket'].strip() @@ -71,7 +70,7 @@ def iter_gluc_keto_data() -> Iterable[Result]: def iter_tests_data() -> Iterable[Result]: - o = porg.Org.from_file(str(paths.blood.blood_tests_log)) + o = porg.Org.from_file(str(config.blood_tests_log)) tbl = o.xpath('//table') for d in tbl.lines: try: diff --git a/my/body/weight.py b/my/body/weight.py index 7dc3a18..f18f449 100644 --- a/my/body/weight.py +++ b/my/body/weight.py @@ -9,7 +9,7 @@ from ..common import LazyLogger from ..error import Res from ..notes import orgmode -from mycfg import weight as config +from my.config import weight as config log = LazyLogger('my.body.weight') diff --git a/my/books/kobo.py b/my/books/kobo.py index 7b70a79..6235556 100644 --- a/my/books/kobo.py +++ b/my/books/kobo.py @@ -4,12 +4,12 @@ Kobo e-ink reader: annotations and reading stats from typing import Callable, Union, List -from mycfg import paths -from mycfg.repos.kobuddy.src.kobuddy import * +from my.config import kobo as config +from my.config.repos.kobuddy.src.kobuddy import * # hmm, explicit imports make pylint a bit happier.. -from mycfg.repos.kobuddy.src.kobuddy import Highlight, set_databases, get_highlights +from my.config.repos.kobuddy.src.kobuddy import Highlight, set_databases, get_highlights -set_databases(paths.kobuddy.export_dir) +set_databases(config.export_dir) # TODO maybe type over T? _Predicate = Callable[[str], bool] diff --git a/my/calendar/holidays.py b/my/calendar/holidays.py index a837b4a..c5cf9d9 100644 --- a/my/calendar/holidays.py +++ b/my/calendar/holidays.py @@ -8,7 +8,7 @@ import re from typing import Tuple, Iterator, List, Union -from mycfg.holidays_data import HOLIDAYS_DATA +from my.config.holidays_data import HOLIDAYS_DATA # pip3 install workalendar diff --git a/my/coding/codeforces.py b/my/coding/codeforces.py index 16cb35b..fbbf586 100644 --- a/my/coding/codeforces.py +++ b/my/coding/codeforces.py @@ -1,4 +1,8 @@ #!/usr/bin/env python3 +from .. import init + +from my.config import codeforces as config + from datetime import datetime from typing import NamedTuple from pathlib import Path @@ -31,8 +35,7 @@ Cmap = Dict[Cid, Contest] def get_contests() -> Cmap: - from mycfg import paths - last = max(get_files(paths.codeforces.export_path, 'allcontests*.json')) + last = max(get_files(config.export_path, 'allcontests*.json')) j = json.loads(last.read_text()) d = {} for c in j['result']: @@ -77,8 +80,7 @@ class Competition(NamedTuple): def iter_data() -> Iterator[Res[Competition]]: cmap = get_contests() - from mycfg import paths - last = max(get_files(paths.codeforces.export_path, 'codeforces*.json')) + last = max(get_files(config.export_path, 'codeforces*.json')) with wrap(json.loads(last.read_text())) as j: j['status'].ignore() diff --git a/my/coding/commits.py b/my/coding/commits.py index 6e7a822..7c15f40 100644 --- a/my/coding/commits.py +++ b/my/coding/commits.py @@ -7,7 +7,7 @@ from datetime import datetime, timezone from typing import List, NamedTuple, Optional, Dict, Any, Iterator, Set from ..common import PathIsh, LazyLogger, mcachew -from mycfg import commits as config +from my.config import commits as config # pip3 install gitpython import git # type: ignore diff --git a/my/coding/github.py b/my/coding/github.py index 3b15e33..735ee93 100644 --- a/my/coding/github.py +++ b/my/coding/github.py @@ -2,6 +2,8 @@ Github events and their metadata: comments/issues/pull requests """ +from .. import init + from typing import Dict, List, Union, Any, NamedTuple, Tuple, Optional, Iterator, TypeVar, Set from datetime import datetime import json @@ -14,8 +16,8 @@ from ..kython.kompress import CPath from ..common import get_files, mcachew from ..error import Res -from mycfg import paths -import mycfg.repos.ghexport.dal as ghexport +from my.config import github as config +import my.config.repos.ghexport.dal as ghexport logger = LazyLogger('my.github') @@ -79,7 +81,7 @@ def _get_summary(e) -> Tuple[str, Optional[str], Optional[str]]: def get_dal(): - sources = get_files(paths.github.export_dir, glob='*.json*') + sources = get_files(config.export_dir, glob='*.json*') sources = list(map(CPath, sources)) # TODO maybe move it to get_files? e.g. compressed=True arg? return ghexport.DAL(sources) @@ -178,7 +180,7 @@ def iter_gdpr_events() -> Iterator[Res[Event]]: """ Parses events from GDPR export (https://github.com/settings/admin) """ - files = list(sorted(paths.github.gdpr_dir.glob('*.json'))) + files = list(sorted(config.gdpr_dir.glob('*.json'))) handler_map = { 'schema' : None, 'issue_events_': None, # eh, doesn't seem to have any useful bodies @@ -215,7 +217,7 @@ def iter_gdpr_events() -> Iterator[Res[Event]]: # TODO hmm. not good, need to be lazier?... -@mcachew(paths.github.cache_dir, hashf=lambda dal: dal.sources) +@mcachew(config.cache_dir, hashf=lambda dal: dal.sources) def iter_backup_events(dal=get_dal()) -> Iterator[Event]: for d in dal.events(): yield _parse_event(d) diff --git a/my/coding/topcoder.py b/my/coding/topcoder.py index fa90af7..de98114 100644 --- a/my/coding/topcoder.py +++ b/my/coding/topcoder.py @@ -1,4 +1,8 @@ #!/usr/bin/env python3 +from .. import init + +from my.config import topcoder as config + from datetime import datetime from typing import NamedTuple from pathlib import Path @@ -15,8 +19,7 @@ from kython.konsume import zoom, wrap, ignore # TODO json type?? def _get_latest() -> Dict: - from mycfg import paths - pp = max(get_files(paths.topcoder.export_path, glob='*.json')) + pp = max(get_files(config.export_path, glob='*.json')) return json.loads(pp.read_text()) diff --git a/my/emfit/__init__.py b/my/emfit/__init__.py index 988ebce..b245824 100755 --- a/my/emfit/__init__.py +++ b/my/emfit/__init__.py @@ -16,7 +16,7 @@ import pytz from ..common import get_files, LazyLogger, cproperty, group_by_key, mcachew -import mycfg +from my.config import emfit as config logger = LazyLogger('my.emfit', level='info') @@ -36,7 +36,7 @@ AWAKE = 4 Sid = str # TODO use tz provider for that? -_TZ = pytz.timezone(mycfg.emfit.tz) +_TZ = pytz.timezone(config.tz) # TODO use common tz thing? def fromts(ts) -> datetime: @@ -299,19 +299,19 @@ def dir_hash(path: Path): return mtimes -@mcachew(cache_path=mycfg.emfit.cache_path, hashf=dir_hash, logger=logger) +@mcachew(cache_path=config.cache_path, hashf=dir_hash, logger=logger) def iter_datas_cached(path: Path) -> Iterator[Emfit]: # TODO use get_files? for f in sorted(path.glob('*.json')): sid = f.stem - if sid in mycfg.emfit.excluded_sids: + if sid in config.excluded_sids: continue em = EmfitOld(sid=sid, jj=json.loads(f.read_text())) yield from Emfit.make(em) -def iter_datas(path=mycfg.emfit.export_path) -> Iterator[Emfit]: +def iter_datas(path=config.export_path) -> Iterator[Emfit]: yield from iter_datas_cached(path) diff --git a/my/fbmessenger.py b/my/fbmessenger.py index a447721..84c72de 100644 --- a/my/fbmessenger.py +++ b/my/fbmessenger.py @@ -8,12 +8,12 @@ from typing import Iterator from .common import PathIsh -import mycfg.repos.fbmessengerexport.dal as messenger -from mycfg import paths +import my.config.repos.fbmessengerexport.dal as messenger +from my.config import fbmessenger as config def _dal() -> messenger.DAL: - return messenger.DAL(paths.fbmessenger.export_db) + return messenger.DAL(config.export_db) # TODO Result type? diff --git a/my/feedbin.py b/my/feedbin.py index eab3f33..a31a53a 100644 --- a/my/feedbin.py +++ b/my/feedbin.py @@ -5,7 +5,7 @@ Module for Feedbin RSS reader from .common import listify from ._rss import Subscription -from mycfg import paths +from my.config import feedbin as config import json from pathlib import Path @@ -27,7 +27,8 @@ def parse_file(f: Path): def get_states() -> Dict[datetime, List[Subscription]]: res = {} - for f in sorted(Path(paths.feedbin.export_dir).glob('*.json')): + # TODO use get_files + for f in sorted(Path(config.export_dir).glob('*.json')): dts = f.stem.split('_')[-1] dt = isoparse(dts) subs = parse_file(f) diff --git a/my/feedly.py b/my/feedly.py index 528ce0d..ca0301c 100644 --- a/my/feedly.py +++ b/my/feedly.py @@ -5,7 +5,7 @@ Module for Fedly RSS reader from .common import listify from ._rss import Subscription -from mycfg import paths +from my.config import feedly as config import json from pathlib import Path @@ -30,7 +30,8 @@ def parse_file(f: Path): def get_states() -> Dict[datetime, List[Subscription]]: res = {} - for f in sorted(Path(paths.feedly.export_dir).glob('*.json')): + # TODO use get_files + for f in sorted(Path(config.export_dir).glob('*.json')): dts = f.stem.split('_')[-1] dt = datetime.strptime(dts, '%Y%m%d%H%M%S') dt = pytz.utc.localize(dt) diff --git a/my/foursquare.py b/my/foursquare.py index fd913b3..03cc312 100755 --- a/my/foursquare.py +++ b/my/foursquare.py @@ -1,4 +1,7 @@ -#!/usr/bin/env python3 +''' +Foursquare/Swarm checkins +''' + from datetime import datetime, timezone, timedelta from itertools import chain from pathlib import Path @@ -9,14 +12,14 @@ from pathlib import Path # TODO pytz for timezone??? from .common import get_files, LazyLogger +from my.config import foursquare as config -logger = LazyLogger('my.foursquare') +logger = LazyLogger(__package__) def _get_exports() -> List[Path]: - from mycfg import paths - return get_files(paths.foursquare.export_path, '*.json') + return get_files(config.export_path, '*.json') class Checkin: diff --git a/my/instapaper.py b/my/instapaper.py index 224906a..aa70527 100644 --- a/my/instapaper.py +++ b/my/instapaper.py @@ -7,12 +7,12 @@ from typing import NamedTuple, Optional, List, Iterator from .common import group_by_key, PathIsh, get_files -from mycfg import paths -import mycfg.repos.instapexport.dal as dal +from my.config import instapaper as config +import my.config.repos.instapexport.dal as dal def _get_files(): - return get_files(paths.instapaper.export_path, glob='*.json') + return get_files(config.export_path, glob='*.json') def get_dal() -> dal.DAL: diff --git a/my/jawbone/__init__.py b/my/jawbone/__init__.py index 4fd4080..0879b20 100755 --- a/my/jawbone/__init__.py +++ b/my/jawbone/__init__.py @@ -7,10 +7,10 @@ from pathlib import Path import logging import pytz -from mycfg import paths +from my.config import jawbone as config -BDIR = paths.jawbone.export_dir +BDIR = config.export_dir PHASES_FILE = BDIR / 'phases.json' SLEEPS_FILE = BDIR / 'sleeps.json' GRAPHS_DIR = BDIR / 'graphs' diff --git a/my/jawbone/plots.py b/my/jawbone/plots.py index a48b2c6..ae44a8c 100755 --- a/my/jawbone/plots.py +++ b/my/jawbone/plots.py @@ -87,9 +87,9 @@ def iter_useful(data_file: str): # TODO <<< hmm. these files do contain deep and light sleep?? # also steps stats?? -from mycfg import paths +from my.config import jawbone as config -p = paths.jawbone.export_dir / 'old_csv' +p = config.export_dir / 'old_csv' # TODO with_my? files = [ p / "2015.csv", diff --git a/my/lastfm/__init__.py b/my/lastfm/__init__.py index 66f05ce..12239b0 100755 --- a/my/lastfm/__init__.py +++ b/my/lastfm/__init__.py @@ -1,4 +1,9 @@ -#!/usr/bin/env python3 +''' +Last.fm scrobbles +''' + +from .. import init + from functools import lru_cache from typing import NamedTuple, Dict, Any from datetime import datetime @@ -7,7 +12,7 @@ import json import pytz -from mycfg import paths +from my.config import lastfm as config # TODO Json type? # TODO memoised properties? @@ -43,7 +48,8 @@ class Scrobble(NamedTuple): # TODO memoise...? # TODO watch out, if we keep the app running it might expire def _iter_scrobbles(): - last = max(Path(paths.lastfm.export_path).glob('*.json')) + # TODO use get_files + last = max(Path(config.export_path).glob('*.json')) # TODO mm, no timezone? hopefuly it's UTC j = json.loads(last.read_text()) diff --git a/my/location/takeout.py b/my/location/takeout.py index 768a51e..3aec270 100644 --- a/my/location/takeout.py +++ b/my/location/takeout.py @@ -31,8 +31,8 @@ logger = LazyLogger(__package__) def cache_path(*args, **kwargs): - from mycfg import paths - return paths.location.cache_path + from my.config import location as config + return config.cache_path Tag = Optional[str] @@ -51,9 +51,9 @@ def _iter_locations_fo(fo, start, stop) -> Iterator[Location]: errors = 0 try: - from mycfg.locations import LOCATIONS as known_locations + from my.config.locations import LOCATIONS as known_locations except ModuleNotFoundError as e: - name = 'mycfg.locations' + name = 'my.config.locations' if e.name != name: raise e logger.warning("'%s' isn't found. setting known_locations to empty list", name) @@ -118,7 +118,6 @@ def _iter_locations(path: Path, start=0, stop=None) -> Iterator[Location]: def iter_locations(**kwargs) -> Iterator[Location]: - from mycfg import paths # TODO need to include older data last_takeout = get_last_takeout(path=_LOCATION_JSON) diff --git a/my/materialistic.py b/my/materialistic.py index 2f822e4..79cd448 100644 --- a/my/materialistic.py +++ b/my/materialistic.py @@ -1,6 +1,7 @@ """ -Module for [[https://play.google.com/store/apps/details?id=io.github.hidroh.materialistic][Materialistic]] app for Hackernews +[[https://play.google.com/store/apps/details?id=io.github.hidroh.materialistic][Materialistic]] app for Hackernews """ +from . import init from datetime import datetime from typing import Any, Dict, Iterator, NamedTuple @@ -9,7 +10,7 @@ import pytz import dataset # type: ignore from .common import get_files -from mycfg import paths +from my.config import materialistic as config Row = Dict[str, Any] @@ -41,7 +42,7 @@ class Saved(NamedTuple): def _last_export(): - return max(get_files(paths.materialistic.export_path, glob='**/*.db')) + return max(get_files(config.export_path, glob='**/*.db')) def raw() -> Iterator[Row]: diff --git a/my/media/imdb.py b/my/media/imdb.py index c16ef64..42a1bc0 100644 --- a/my/media/imdb.py +++ b/my/media/imdb.py @@ -1,4 +1,7 @@ #!/usr/bin/env python3 + +from .. import init + import csv import json from datetime import datetime @@ -6,10 +9,10 @@ from typing import Iterator, List, NamedTuple from ..common import get_files -from mycfg import paths +from my.config import imdb as config def _get_last(): - return max(get_files(paths.imdb.export_path, glob='*.csv')) + return max(get_files(config.export_path, glob='*.csv')) class Movie(NamedTuple): diff --git a/my/mycfg_stub/__init__.py b/my/mycfg_stub/__init__.py index b523526..a1438fc 100644 --- a/my/mycfg_stub/__init__.py +++ b/my/mycfg_stub/__init__.py @@ -1,4 +1,4 @@ # TODO maybe, reuse mycfg_template here? -class paths: - pass +class stub: + key = 'value' diff --git a/my/notes/orgmode.py b/my/notes/orgmode.py index 89c0eb5..6df688c 100644 --- a/my/notes/orgmode.py +++ b/my/notes/orgmode.py @@ -8,7 +8,7 @@ from pathlib import Path from ..common import PathIsh -from mycfg import orgmode as config +from my.config import orgmode as config from porg import Org diff --git a/my/pdfs.py b/my/pdfs.py index 567304f..ba529e2 100755 --- a/my/pdfs.py +++ b/my/pdfs.py @@ -1,4 +1,10 @@ #!/usr/bin/env python3 +''' +PDF annotations on your filesystem +''' + +from . import init + from concurrent.futures import ProcessPoolExecutor from datetime import datetime import re @@ -14,8 +20,8 @@ from .common import mcachew, group_by_key from .error import Res, split_errors # path to pdfannots (https://github.com/0xabu/pdfannots) -import mycfg.repos.pdfannots.pdfannots as pdfannots -from mycfg import paths +import my.config.repos.pdfannots.pdfannots as pdfannots +from my.config import pdfs as config def get_logger(): @@ -24,12 +30,12 @@ def get_logger(): def is_ignored(p: Path) -> bool: # ignore some extremely heavy files - return paths.pdfs.is_ignored(p) + return config.is_ignored(p) def candidates(roots=None) -> Iterator[Path]: if roots is None: - roots = paths.pdfs.roots + roots = config.roots for r in roots: for p in Path(r).rglob('*.pdf'): diff --git a/my/photos/__init__.py b/my/photos/__init__.py index 3444692..905e0a1 100644 --- a/my/photos/__init__.py +++ b/my/photos/__init__.py @@ -14,7 +14,7 @@ from geopy.geocoders import Nominatim # type: ignore from ..common import LazyLogger, mcachew, fastermime from ..error import Res -from mycfg import photos as config +from my.config import photos as config log = LazyLogger('my.photos') diff --git a/my/pinboard.py b/my/pinboard.py index 60a8788..68042a3 100644 --- a/my/pinboard.py +++ b/my/pinboard.py @@ -1,11 +1,12 @@ """ -Module for pinboard.in bookmarks +pinboard.in bookmarks """ +from . import init from .common import get_files -from mycfg.repos.pinbexport import dal as pinbexport -from mycfg import paths +from my.config.repos.pinbexport import dal as pinbexport +from my.config import pinboard as config # TODO would be nice to make interfaces available for mypy... Bookmark = pinbexport.Bookmark @@ -13,7 +14,7 @@ Bookmark = pinbexport.Bookmark # yep; clearly looks that the purpose of my. package is to wire files to DAL implicitly; otherwise it's just passtrhough. def dal(): - sources = get_files(paths.pinbexport.export_dir, glob='*.json') + sources = get_files(config.export_dir, glob='*.json') model = pinbexport.DAL(sources) return model diff --git a/my/pocket.py b/my/pocket.py index ef05d30..137ddeb 100644 --- a/my/pocket.py +++ b/my/pocket.py @@ -4,7 +4,7 @@ from typing import NamedTuple, Sequence, Any from .common import get_files -from mycfg import pocket as config +from my.config import pocket as config def _files(): diff --git a/my/reading/goodreads.py b/my/reading/goodreads.py index 47a67f7..1b06c26 100755 --- a/my/reading/goodreads.py +++ b/my/reading/goodreads.py @@ -4,12 +4,12 @@ from typing import NamedTuple from datetime import datetime import pytz -from mycfg.repos.goodrexport import dal as goodrexport -from mycfg import paths +from my.config.repos.goodrexport import dal as goodrexport +from my.config import goodreads as config def get_model(): - sources = list(sorted(paths.goodrexport.export_dir.glob('*.xml'))) + sources = list(sorted(config.export_dir.glob('*.xml'))) model = goodrexport.DAL(sources) return model diff --git a/my/reddit.py b/my/reddit.py index e2b78c2..9491bd6 100755 --- a/my/reddit.py +++ b/my/reddit.py @@ -1,7 +1,7 @@ -#!/usr/bin/env python3 """ Module for Reddit data: saved items/comments/upvotes etc """ +from . import init from pathlib import Path from typing import List, Sequence, Mapping, Iterator @@ -9,19 +9,19 @@ from typing import List, Sequence, Mapping, Iterator from .kython.kompress import CPath from .common import mcachew, get_files, LazyLogger, make_dict -from mycfg import paths -import mycfg.repos.rexport.dal as rexport +from my.config import reddit as config +import my.config.repos.rexport.dal as rexport def get_sources() -> Sequence[Path]: # TODO use zstd? # TODO maybe add assert to get_files? (and allow to suppress it) - files = get_files(paths.rexport.export_dir, glob='*.json.xz') + files = get_files(config.export_dir, glob='*.json.xz') res = list(map(CPath, files)); assert len(res) > 0 return tuple(res) -logger = LazyLogger('my.reddit', level='debug') +logger = LazyLogger(__package__, level='debug') Sid = rexport.Sid diff --git a/my/rescuetime.py b/my/rescuetime.py index 620688b..3ee2730 100644 --- a/my/rescuetime.py +++ b/my/rescuetime.py @@ -1,3 +1,7 @@ +''' +Rescuetime (activity tracking) data +''' + from pathlib import Path from datetime import datetime, timedelta from typing import NamedTuple, Dict, List, Set, Optional @@ -8,18 +12,17 @@ from .error import Res, split_errors # TODO get rid of it from kython import group_by_cmp # type: ignore -from mycfg import paths +from my.config import rescuetime as config -log = LazyLogger('my.rescuetime', level='info') +log = LazyLogger(__package__, level='info') def _get_exports() -> List[Path]: - from mycfg import paths - return get_files(paths.rescuetime.export_path, '*.json') + return get_files(config.export_path, '*.json') -import mycfg.repos.rescuexport.model as rescuexport +import my.config.repos.rescuexport.model as rescuexport Model = rescuexport.Model diff --git a/my/rtm.py b/my/rtm.py index fe41ab0..55ed7a0 100755 --- a/my/rtm.py +++ b/my/rtm.py @@ -11,7 +11,7 @@ from datetime import datetime from .common import LazyLogger, get_files, group_by_key, cproperty, make_dict from .kython.kompress import open as kopen -from mycfg import rtm as config +from my.config import rtm as config import icalendar # type: ignore diff --git a/my/smscalls.py b/my/smscalls.py index 60e1b4e..e2d80f1 100644 --- a/my/smscalls.py +++ b/my/smscalls.py @@ -2,6 +2,7 @@ Phone calls and SMS messages """ # TODO extract SMS as well? I barely use them though.. +from . import init from datetime import datetime from pathlib import Path @@ -12,7 +13,7 @@ from lxml import etree # type: ignore from .common import get_files -from mycfg import paths +from my.config import smscalls as config class Call(NamedTuple): @@ -40,7 +41,7 @@ def _extract_calls(path: Path) -> Iterator[Call]: def calls() -> Iterator[Call]: - files = get_files(paths.smscalls.export_path, glob='calls-*.xml') + files = get_files(config.export_path, glob='calls-*.xml') # TODO always replacing with the latter is good, we get better contact names?? emitted: Set[datetime] = set() diff --git a/my/stackexchange.py b/my/stackexchange.py index 267c208..314546a 100644 --- a/my/stackexchange.py +++ b/my/stackexchange.py @@ -1,7 +1,11 @@ -import mycfg.repos.stexport.model as stexport -from mycfg import paths +''' +Stackexchange data +''' + +import my.config.repos.stexport.model as stexport +from my.config import stackexchange as config def get_data(): - sources = [max(paths.stexport.export_dir.glob('*.json'))] + sources = [max(config.export_dir.glob('*.json'))] return stexport.Model(sources).site_model('stackoverflow') diff --git a/my/takeout.py b/my/takeout.py index 53129fc..64dbcda 100644 --- a/my/takeout.py +++ b/my/takeout.py @@ -3,7 +3,7 @@ from typing import Optional from .common import get_files -from mycfg import paths +from my.config import google as config from .kython.kompress import kopen @@ -12,7 +12,7 @@ def get_last_takeout(*, path: Optional[str]=None) -> Path: Ok, sometimes google splits takeout into two zip archives I guess I could detect it (they've got 001/002 etc suffixes), but fornow that works fine.. """ - for takeout in reversed(get_files(paths.google.takeout_path, glob='*.zip')): + for takeout in reversed(get_files(config.takeout_path, glob='*.zip')): if path is None: return takeout else: diff --git a/my/twitter.py b/my/twitter.py index 22addc2..55ed0a7 100755 --- a/my/twitter.py +++ b/my/twitter.py @@ -4,12 +4,7 @@ Module for Twitter (uses official twitter archive export) See https://help.twitter.com/en/managing-your-account/how-to-download-your-twitter-archive """ -# TODO remove these -""" -Expects path to be set -- via ~configure~ (before calling anything else) -- or in ~mycfg.twitter.export_path~ -""" +from . import init from datetime import date, datetime @@ -24,9 +19,10 @@ from .common import PathIsh, get_files, LazyLogger from .kython import kompress -logger = LazyLogger('my.twitter') +logger = LazyLogger(__package__) +# TODO get rid of this? _export_path: Optional[Path] = None def configure(*, export_path: Optional[PathIsh]=None) -> None: if export_path is not None: @@ -37,9 +33,9 @@ def configure(*, export_path: Optional[PathIsh]=None) -> None: def _get_export() -> Path: export_path = _export_path if export_path is None: - # fallback to mycfg - from mycfg import paths - export_path = paths.twitter.export_path + # fallback + from my.config import twitter as config + export_path = config.export_path return max(get_files(export_path, '*.zip')) diff --git a/my/vk.py b/my/vk.py index 7c7c0d6..feb3655 100644 --- a/my/vk.py +++ b/my/vk.py @@ -2,7 +2,7 @@ from datetime import datetime import json from typing import NamedTuple, Iterator, Dict, Union, Sequence, Optional -from mycfg import paths +from my.config import vk as config class Favorite(NamedTuple): @@ -53,7 +53,7 @@ def parse_fav(j: Dict) -> Favorite: def _iter_favs() -> Iterator[Res]: - jj = json.loads(paths.vk.favs_file.read_text()) + jj = json.loads(config.favs_file.read_text()) for j in jj: try: yield parse_fav(j) diff --git a/tox.ini b/tox.ini index 777b4c6..1962871 100644 --- a/tox.ini +++ b/tox.ini @@ -6,12 +6,13 @@ skip_missing_interpreters = True # TODO ugh. unclear how to reuse setup.cfg deps in tox [testenv] passenv = CI CI_* CIRCLE* -setenv = MY_CONFIG = mycfg_template +# deliberately set to nonexistent pathe to check the fallback logic +setenv = MY_CONFIG = nonexistent commands = pip install -e . # TODO ?? # python -m pytest {posargs} - python3 -c 'import my.init; import my.config; print(my.config.__path__)' + python3 -c 'import my.init; from my.config import stub as config; print(config.key)' python3 -c 'import my.init; import my.config; import my.config.repos' # shouldn't fail at least # TODO run demo.py? just make sure with_my is a bit cleverer? # TODO e.g. under CI, rely on installing @@ -22,7 +23,7 @@ commands = changedir = {toxworkdir}/{envname}/../.. # TODO not sure?? also reuse installed my. instead on installing in demo.py? skip_install = true -# TODO need to keep full mycfg.py as example? to dummy run CI +# TODO need to keep full config as example? to dummy run CI commands = ./demo.py