use my.config instead of mycfg; minor cleanups and docstrings
This commit is contained in:
parent
f31ff71e6f
commit
e5b3a1e91e
37 changed files with 142 additions and 110 deletions
|
@ -14,14 +14,14 @@ from typing import Any, Dict, Iterable, NamedTuple, Set
|
|||
from ..common import mcachew, LazyLogger, get_files
|
||||
|
||||
|
||||
import mycfg
|
||||
from my.config import bluemaestro as config
|
||||
|
||||
|
||||
logger = LazyLogger('bluemaestro', level='debug')
|
||||
|
||||
|
||||
def _get_exports():
|
||||
return get_files(mycfg.bluemaestro.export_path, glob='*.db')
|
||||
return get_files(config.export_path, glob='*.db')
|
||||
|
||||
|
||||
class Measurement(NamedTuple):
|
||||
|
@ -29,7 +29,7 @@ class Measurement(NamedTuple):
|
|||
temp: float
|
||||
|
||||
|
||||
@mcachew(cache_path=mycfg.bluemaestro.cache_path)
|
||||
@mcachew(cache_path=config.cache_path)
|
||||
def _iter_measurements(dbs) -> Iterable[Measurement]:
|
||||
# I guess we can affort keeping them in sorted order
|
||||
points: Set[Measurement] = set()
|
||||
|
|
|
@ -1,4 +1,3 @@
|
|||
#!/usr/bin/env python3
|
||||
"""
|
||||
Blood tracking
|
||||
"""
|
||||
|
@ -14,7 +13,7 @@ from ..error import Res, echain
|
|||
|
||||
from kython.org import parse_org_date
|
||||
|
||||
from mycfg import paths
|
||||
from my.config import blood as config
|
||||
|
||||
import pandas as pd # type: ignore
|
||||
|
||||
|
@ -52,7 +51,7 @@ def try_float(s: str) -> Optional[float]:
|
|||
|
||||
|
||||
def iter_gluc_keto_data() -> Iterable[Result]:
|
||||
o = porg.Org.from_file(str(paths.blood.blood_log))
|
||||
o = porg.Org.from_file(str(config.blood_log))
|
||||
tbl = o.xpath('//table')
|
||||
for l in tbl.lines:
|
||||
kets = l['ket'].strip()
|
||||
|
@ -71,7 +70,7 @@ def iter_gluc_keto_data() -> Iterable[Result]:
|
|||
|
||||
|
||||
def iter_tests_data() -> Iterable[Result]:
|
||||
o = porg.Org.from_file(str(paths.blood.blood_tests_log))
|
||||
o = porg.Org.from_file(str(config.blood_tests_log))
|
||||
tbl = o.xpath('//table')
|
||||
for d in tbl.lines:
|
||||
try:
|
||||
|
|
|
@ -9,7 +9,7 @@ from ..common import LazyLogger
|
|||
from ..error import Res
|
||||
from ..notes import orgmode
|
||||
|
||||
from mycfg import weight as config
|
||||
from my.config import weight as config
|
||||
|
||||
|
||||
log = LazyLogger('my.body.weight')
|
||||
|
|
|
@ -4,12 +4,12 @@ Kobo e-ink reader: annotations and reading stats
|
|||
|
||||
from typing import Callable, Union, List
|
||||
|
||||
from mycfg import paths
|
||||
from mycfg.repos.kobuddy.src.kobuddy import *
|
||||
from my.config import kobo as config
|
||||
from my.config.repos.kobuddy.src.kobuddy import *
|
||||
# hmm, explicit imports make pylint a bit happier..
|
||||
from mycfg.repos.kobuddy.src.kobuddy import Highlight, set_databases, get_highlights
|
||||
from my.config.repos.kobuddy.src.kobuddy import Highlight, set_databases, get_highlights
|
||||
|
||||
set_databases(paths.kobuddy.export_dir)
|
||||
set_databases(config.export_dir)
|
||||
|
||||
# TODO maybe type over T?
|
||||
_Predicate = Callable[[str], bool]
|
||||
|
|
|
@ -8,7 +8,7 @@ import re
|
|||
from typing import Tuple, Iterator, List, Union
|
||||
|
||||
|
||||
from mycfg.holidays_data import HOLIDAYS_DATA
|
||||
from my.config.holidays_data import HOLIDAYS_DATA
|
||||
|
||||
|
||||
# pip3 install workalendar
|
||||
|
|
|
@ -1,4 +1,8 @@
|
|||
#!/usr/bin/env python3
|
||||
from .. import init
|
||||
|
||||
from my.config import codeforces as config
|
||||
|
||||
from datetime import datetime
|
||||
from typing import NamedTuple
|
||||
from pathlib import Path
|
||||
|
@ -31,8 +35,7 @@ Cmap = Dict[Cid, Contest]
|
|||
|
||||
|
||||
def get_contests() -> Cmap:
|
||||
from mycfg import paths
|
||||
last = max(get_files(paths.codeforces.export_path, 'allcontests*.json'))
|
||||
last = max(get_files(config.export_path, 'allcontests*.json'))
|
||||
j = json.loads(last.read_text())
|
||||
d = {}
|
||||
for c in j['result']:
|
||||
|
@ -77,8 +80,7 @@ class Competition(NamedTuple):
|
|||
|
||||
def iter_data() -> Iterator[Res[Competition]]:
|
||||
cmap = get_contests()
|
||||
from mycfg import paths
|
||||
last = max(get_files(paths.codeforces.export_path, 'codeforces*.json'))
|
||||
last = max(get_files(config.export_path, 'codeforces*.json'))
|
||||
|
||||
with wrap(json.loads(last.read_text())) as j:
|
||||
j['status'].ignore()
|
||||
|
|
|
@ -7,7 +7,7 @@ from datetime import datetime, timezone
|
|||
from typing import List, NamedTuple, Optional, Dict, Any, Iterator, Set
|
||||
|
||||
from ..common import PathIsh, LazyLogger, mcachew
|
||||
from mycfg import commits as config
|
||||
from my.config import commits as config
|
||||
|
||||
# pip3 install gitpython
|
||||
import git # type: ignore
|
||||
|
|
|
@ -2,6 +2,8 @@
|
|||
Github events and their metadata: comments/issues/pull requests
|
||||
"""
|
||||
|
||||
from .. import init
|
||||
|
||||
from typing import Dict, List, Union, Any, NamedTuple, Tuple, Optional, Iterator, TypeVar, Set
|
||||
from datetime import datetime
|
||||
import json
|
||||
|
@ -14,8 +16,8 @@ from ..kython.kompress import CPath
|
|||
from ..common import get_files, mcachew
|
||||
from ..error import Res
|
||||
|
||||
from mycfg import paths
|
||||
import mycfg.repos.ghexport.dal as ghexport
|
||||
from my.config import github as config
|
||||
import my.config.repos.ghexport.dal as ghexport
|
||||
|
||||
|
||||
logger = LazyLogger('my.github')
|
||||
|
@ -79,7 +81,7 @@ def _get_summary(e) -> Tuple[str, Optional[str], Optional[str]]:
|
|||
|
||||
|
||||
def get_dal():
|
||||
sources = get_files(paths.github.export_dir, glob='*.json*')
|
||||
sources = get_files(config.export_dir, glob='*.json*')
|
||||
sources = list(map(CPath, sources)) # TODO maybe move it to get_files? e.g. compressed=True arg?
|
||||
return ghexport.DAL(sources)
|
||||
|
||||
|
@ -178,7 +180,7 @@ def iter_gdpr_events() -> Iterator[Res[Event]]:
|
|||
"""
|
||||
Parses events from GDPR export (https://github.com/settings/admin)
|
||||
"""
|
||||
files = list(sorted(paths.github.gdpr_dir.glob('*.json')))
|
||||
files = list(sorted(config.gdpr_dir.glob('*.json')))
|
||||
handler_map = {
|
||||
'schema' : None,
|
||||
'issue_events_': None, # eh, doesn't seem to have any useful bodies
|
||||
|
@ -215,7 +217,7 @@ def iter_gdpr_events() -> Iterator[Res[Event]]:
|
|||
|
||||
|
||||
# TODO hmm. not good, need to be lazier?...
|
||||
@mcachew(paths.github.cache_dir, hashf=lambda dal: dal.sources)
|
||||
@mcachew(config.cache_dir, hashf=lambda dal: dal.sources)
|
||||
def iter_backup_events(dal=get_dal()) -> Iterator[Event]:
|
||||
for d in dal.events():
|
||||
yield _parse_event(d)
|
||||
|
|
|
@ -1,4 +1,8 @@
|
|||
#!/usr/bin/env python3
|
||||
from .. import init
|
||||
|
||||
from my.config import topcoder as config
|
||||
|
||||
from datetime import datetime
|
||||
from typing import NamedTuple
|
||||
from pathlib import Path
|
||||
|
@ -15,8 +19,7 @@ from kython.konsume import zoom, wrap, ignore
|
|||
|
||||
# TODO json type??
|
||||
def _get_latest() -> Dict:
|
||||
from mycfg import paths
|
||||
pp = max(get_files(paths.topcoder.export_path, glob='*.json'))
|
||||
pp = max(get_files(config.export_path, glob='*.json'))
|
||||
return json.loads(pp.read_text())
|
||||
|
||||
|
||||
|
|
|
@ -16,7 +16,7 @@ import pytz
|
|||
|
||||
from ..common import get_files, LazyLogger, cproperty, group_by_key, mcachew
|
||||
|
||||
import mycfg
|
||||
from my.config import emfit as config
|
||||
|
||||
|
||||
logger = LazyLogger('my.emfit', level='info')
|
||||
|
@ -36,7 +36,7 @@ AWAKE = 4
|
|||
Sid = str
|
||||
|
||||
# TODO use tz provider for that?
|
||||
_TZ = pytz.timezone(mycfg.emfit.tz)
|
||||
_TZ = pytz.timezone(config.tz)
|
||||
|
||||
# TODO use common tz thing?
|
||||
def fromts(ts) -> datetime:
|
||||
|
@ -299,19 +299,19 @@ def dir_hash(path: Path):
|
|||
return mtimes
|
||||
|
||||
|
||||
@mcachew(cache_path=mycfg.emfit.cache_path, hashf=dir_hash, logger=logger)
|
||||
@mcachew(cache_path=config.cache_path, hashf=dir_hash, logger=logger)
|
||||
def iter_datas_cached(path: Path) -> Iterator[Emfit]:
|
||||
# TODO use get_files?
|
||||
for f in sorted(path.glob('*.json')):
|
||||
sid = f.stem
|
||||
if sid in mycfg.emfit.excluded_sids:
|
||||
if sid in config.excluded_sids:
|
||||
continue
|
||||
|
||||
em = EmfitOld(sid=sid, jj=json.loads(f.read_text()))
|
||||
yield from Emfit.make(em)
|
||||
|
||||
|
||||
def iter_datas(path=mycfg.emfit.export_path) -> Iterator[Emfit]:
|
||||
def iter_datas(path=config.export_path) -> Iterator[Emfit]:
|
||||
yield from iter_datas_cached(path)
|
||||
|
||||
|
||||
|
|
|
@ -8,12 +8,12 @@ from typing import Iterator
|
|||
|
||||
from .common import PathIsh
|
||||
|
||||
import mycfg.repos.fbmessengerexport.dal as messenger
|
||||
from mycfg import paths
|
||||
import my.config.repos.fbmessengerexport.dal as messenger
|
||||
from my.config import fbmessenger as config
|
||||
|
||||
|
||||
def _dal() -> messenger.DAL:
|
||||
return messenger.DAL(paths.fbmessenger.export_db)
|
||||
return messenger.DAL(config.export_db)
|
||||
|
||||
|
||||
# TODO Result type?
|
||||
|
|
|
@ -5,7 +5,7 @@ Module for Feedbin RSS reader
|
|||
from .common import listify
|
||||
from ._rss import Subscription
|
||||
|
||||
from mycfg import paths
|
||||
from my.config import feedbin as config
|
||||
|
||||
import json
|
||||
from pathlib import Path
|
||||
|
@ -27,7 +27,8 @@ def parse_file(f: Path):
|
|||
|
||||
def get_states() -> Dict[datetime, List[Subscription]]:
|
||||
res = {}
|
||||
for f in sorted(Path(paths.feedbin.export_dir).glob('*.json')):
|
||||
# TODO use get_files
|
||||
for f in sorted(Path(config.export_dir).glob('*.json')):
|
||||
dts = f.stem.split('_')[-1]
|
||||
dt = isoparse(dts)
|
||||
subs = parse_file(f)
|
||||
|
|
|
@ -5,7 +5,7 @@ Module for Fedly RSS reader
|
|||
from .common import listify
|
||||
from ._rss import Subscription
|
||||
|
||||
from mycfg import paths
|
||||
from my.config import feedly as config
|
||||
|
||||
import json
|
||||
from pathlib import Path
|
||||
|
@ -30,7 +30,8 @@ def parse_file(f: Path):
|
|||
|
||||
def get_states() -> Dict[datetime, List[Subscription]]:
|
||||
res = {}
|
||||
for f in sorted(Path(paths.feedly.export_dir).glob('*.json')):
|
||||
# TODO use get_files
|
||||
for f in sorted(Path(config.export_dir).glob('*.json')):
|
||||
dts = f.stem.split('_')[-1]
|
||||
dt = datetime.strptime(dts, '%Y%m%d%H%M%S')
|
||||
dt = pytz.utc.localize(dt)
|
||||
|
|
|
@ -1,4 +1,7 @@
|
|||
#!/usr/bin/env python3
|
||||
'''
|
||||
Foursquare/Swarm checkins
|
||||
'''
|
||||
|
||||
from datetime import datetime, timezone, timedelta
|
||||
from itertools import chain
|
||||
from pathlib import Path
|
||||
|
@ -9,14 +12,14 @@ from pathlib import Path
|
|||
# TODO pytz for timezone???
|
||||
|
||||
from .common import get_files, LazyLogger
|
||||
from my.config import foursquare as config
|
||||
|
||||
|
||||
logger = LazyLogger('my.foursquare')
|
||||
logger = LazyLogger(__package__)
|
||||
|
||||
|
||||
def _get_exports() -> List[Path]:
|
||||
from mycfg import paths
|
||||
return get_files(paths.foursquare.export_path, '*.json')
|
||||
return get_files(config.export_path, '*.json')
|
||||
|
||||
|
||||
class Checkin:
|
||||
|
|
|
@ -7,12 +7,12 @@ from typing import NamedTuple, Optional, List, Iterator
|
|||
from .common import group_by_key, PathIsh, get_files
|
||||
|
||||
|
||||
from mycfg import paths
|
||||
import mycfg.repos.instapexport.dal as dal
|
||||
from my.config import instapaper as config
|
||||
import my.config.repos.instapexport.dal as dal
|
||||
|
||||
|
||||
def _get_files():
|
||||
return get_files(paths.instapaper.export_path, glob='*.json')
|
||||
return get_files(config.export_path, glob='*.json')
|
||||
|
||||
|
||||
def get_dal() -> dal.DAL:
|
||||
|
|
|
@ -7,10 +7,10 @@ from pathlib import Path
|
|||
import logging
|
||||
import pytz
|
||||
|
||||
from mycfg import paths
|
||||
from my.config import jawbone as config
|
||||
|
||||
|
||||
BDIR = paths.jawbone.export_dir
|
||||
BDIR = config.export_dir
|
||||
PHASES_FILE = BDIR / 'phases.json'
|
||||
SLEEPS_FILE = BDIR / 'sleeps.json'
|
||||
GRAPHS_DIR = BDIR / 'graphs'
|
||||
|
|
|
@ -87,9 +87,9 @@ def iter_useful(data_file: str):
|
|||
|
||||
# TODO <<< hmm. these files do contain deep and light sleep??
|
||||
# also steps stats??
|
||||
from mycfg import paths
|
||||
from my.config import jawbone as config
|
||||
|
||||
p = paths.jawbone.export_dir / 'old_csv'
|
||||
p = config.export_dir / 'old_csv'
|
||||
# TODO with_my?
|
||||
files = [
|
||||
p / "2015.csv",
|
||||
|
|
|
@ -1,4 +1,9 @@
|
|||
#!/usr/bin/env python3
|
||||
'''
|
||||
Last.fm scrobbles
|
||||
'''
|
||||
|
||||
from .. import init
|
||||
|
||||
from functools import lru_cache
|
||||
from typing import NamedTuple, Dict, Any
|
||||
from datetime import datetime
|
||||
|
@ -7,7 +12,7 @@ import json
|
|||
|
||||
import pytz
|
||||
|
||||
from mycfg import paths
|
||||
from my.config import lastfm as config
|
||||
|
||||
# TODO Json type?
|
||||
# TODO memoised properties?
|
||||
|
@ -43,7 +48,8 @@ class Scrobble(NamedTuple):
|
|||
# TODO memoise...?
|
||||
# TODO watch out, if we keep the app running it might expire
|
||||
def _iter_scrobbles():
|
||||
last = max(Path(paths.lastfm.export_path).glob('*.json'))
|
||||
# TODO use get_files
|
||||
last = max(Path(config.export_path).glob('*.json'))
|
||||
# TODO mm, no timezone? hopefuly it's UTC
|
||||
j = json.loads(last.read_text())
|
||||
|
||||
|
|
|
@ -31,8 +31,8 @@ logger = LazyLogger(__package__)
|
|||
|
||||
|
||||
def cache_path(*args, **kwargs):
|
||||
from mycfg import paths
|
||||
return paths.location.cache_path
|
||||
from my.config import location as config
|
||||
return config.cache_path
|
||||
|
||||
|
||||
Tag = Optional[str]
|
||||
|
@ -51,9 +51,9 @@ def _iter_locations_fo(fo, start, stop) -> Iterator[Location]:
|
|||
errors = 0
|
||||
|
||||
try:
|
||||
from mycfg.locations import LOCATIONS as known_locations
|
||||
from my.config.locations import LOCATIONS as known_locations
|
||||
except ModuleNotFoundError as e:
|
||||
name = 'mycfg.locations'
|
||||
name = 'my.config.locations'
|
||||
if e.name != name:
|
||||
raise e
|
||||
logger.warning("'%s' isn't found. setting known_locations to empty list", name)
|
||||
|
@ -118,7 +118,6 @@ def _iter_locations(path: Path, start=0, stop=None) -> Iterator[Location]:
|
|||
|
||||
|
||||
def iter_locations(**kwargs) -> Iterator[Location]:
|
||||
from mycfg import paths
|
||||
# TODO need to include older data
|
||||
last_takeout = get_last_takeout(path=_LOCATION_JSON)
|
||||
|
||||
|
|
|
@ -1,6 +1,7 @@
|
|||
"""
|
||||
Module for [[https://play.google.com/store/apps/details?id=io.github.hidroh.materialistic][Materialistic]] app for Hackernews
|
||||
[[https://play.google.com/store/apps/details?id=io.github.hidroh.materialistic][Materialistic]] app for Hackernews
|
||||
"""
|
||||
from . import init
|
||||
|
||||
from datetime import datetime
|
||||
from typing import Any, Dict, Iterator, NamedTuple
|
||||
|
@ -9,7 +10,7 @@ import pytz
|
|||
import dataset # type: ignore
|
||||
|
||||
from .common import get_files
|
||||
from mycfg import paths
|
||||
from my.config import materialistic as config
|
||||
|
||||
|
||||
Row = Dict[str, Any]
|
||||
|
@ -41,7 +42,7 @@ class Saved(NamedTuple):
|
|||
|
||||
|
||||
def _last_export():
|
||||
return max(get_files(paths.materialistic.export_path, glob='**/*.db'))
|
||||
return max(get_files(config.export_path, glob='**/*.db'))
|
||||
|
||||
|
||||
def raw() -> Iterator[Row]:
|
||||
|
|
|
@ -1,4 +1,7 @@
|
|||
#!/usr/bin/env python3
|
||||
|
||||
from .. import init
|
||||
|
||||
import csv
|
||||
import json
|
||||
from datetime import datetime
|
||||
|
@ -6,10 +9,10 @@ from typing import Iterator, List, NamedTuple
|
|||
|
||||
from ..common import get_files
|
||||
|
||||
from mycfg import paths
|
||||
from my.config import imdb as config
|
||||
|
||||
def _get_last():
|
||||
return max(get_files(paths.imdb.export_path, glob='*.csv'))
|
||||
return max(get_files(config.export_path, glob='*.csv'))
|
||||
|
||||
|
||||
class Movie(NamedTuple):
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
# TODO maybe, reuse mycfg_template here?
|
||||
|
||||
class paths:
|
||||
pass
|
||||
class stub:
|
||||
key = 'value'
|
||||
|
|
|
@ -8,7 +8,7 @@ from pathlib import Path
|
|||
|
||||
from ..common import PathIsh
|
||||
|
||||
from mycfg import orgmode as config
|
||||
from my.config import orgmode as config
|
||||
|
||||
from porg import Org
|
||||
|
||||
|
|
14
my/pdfs.py
14
my/pdfs.py
|
@ -1,4 +1,10 @@
|
|||
#!/usr/bin/env python3
|
||||
'''
|
||||
PDF annotations on your filesystem
|
||||
'''
|
||||
|
||||
from . import init
|
||||
|
||||
from concurrent.futures import ProcessPoolExecutor
|
||||
from datetime import datetime
|
||||
import re
|
||||
|
@ -14,8 +20,8 @@ from .common import mcachew, group_by_key
|
|||
from .error import Res, split_errors
|
||||
|
||||
# path to pdfannots (https://github.com/0xabu/pdfannots)
|
||||
import mycfg.repos.pdfannots.pdfannots as pdfannots
|
||||
from mycfg import paths
|
||||
import my.config.repos.pdfannots.pdfannots as pdfannots
|
||||
from my.config import pdfs as config
|
||||
|
||||
|
||||
def get_logger():
|
||||
|
@ -24,12 +30,12 @@ def get_logger():
|
|||
|
||||
def is_ignored(p: Path) -> bool:
|
||||
# ignore some extremely heavy files
|
||||
return paths.pdfs.is_ignored(p)
|
||||
return config.is_ignored(p)
|
||||
|
||||
|
||||
def candidates(roots=None) -> Iterator[Path]:
|
||||
if roots is None:
|
||||
roots = paths.pdfs.roots
|
||||
roots = config.roots
|
||||
|
||||
for r in roots:
|
||||
for p in Path(r).rglob('*.pdf'):
|
||||
|
|
|
@ -14,7 +14,7 @@ from geopy.geocoders import Nominatim # type: ignore
|
|||
from ..common import LazyLogger, mcachew, fastermime
|
||||
from ..error import Res
|
||||
|
||||
from mycfg import photos as config
|
||||
from my.config import photos as config
|
||||
|
||||
|
||||
log = LazyLogger('my.photos')
|
||||
|
|
|
@ -1,11 +1,12 @@
|
|||
"""
|
||||
Module for pinboard.in bookmarks
|
||||
pinboard.in bookmarks
|
||||
"""
|
||||
from . import init
|
||||
|
||||
from .common import get_files
|
||||
|
||||
from mycfg.repos.pinbexport import dal as pinbexport
|
||||
from mycfg import paths
|
||||
from my.config.repos.pinbexport import dal as pinbexport
|
||||
from my.config import pinboard as config
|
||||
|
||||
# TODO would be nice to make interfaces available for mypy...
|
||||
Bookmark = pinbexport.Bookmark
|
||||
|
@ -13,7 +14,7 @@ Bookmark = pinbexport.Bookmark
|
|||
|
||||
# yep; clearly looks that the purpose of my. package is to wire files to DAL implicitly; otherwise it's just passtrhough.
|
||||
def dal():
|
||||
sources = get_files(paths.pinbexport.export_dir, glob='*.json')
|
||||
sources = get_files(config.export_dir, glob='*.json')
|
||||
model = pinbexport.DAL(sources)
|
||||
return model
|
||||
|
||||
|
|
|
@ -4,7 +4,7 @@ from typing import NamedTuple, Sequence, Any
|
|||
|
||||
from .common import get_files
|
||||
|
||||
from mycfg import pocket as config
|
||||
from my.config import pocket as config
|
||||
|
||||
|
||||
def _files():
|
||||
|
|
|
@ -4,12 +4,12 @@ from typing import NamedTuple
|
|||
from datetime import datetime
|
||||
import pytz
|
||||
|
||||
from mycfg.repos.goodrexport import dal as goodrexport
|
||||
from mycfg import paths
|
||||
from my.config.repos.goodrexport import dal as goodrexport
|
||||
from my.config import goodreads as config
|
||||
|
||||
|
||||
def get_model():
|
||||
sources = list(sorted(paths.goodrexport.export_dir.glob('*.xml')))
|
||||
sources = list(sorted(config.export_dir.glob('*.xml')))
|
||||
model = goodrexport.DAL(sources)
|
||||
return model
|
||||
|
||||
|
|
10
my/reddit.py
10
my/reddit.py
|
@ -1,7 +1,7 @@
|
|||
#!/usr/bin/env python3
|
||||
"""
|
||||
Module for Reddit data: saved items/comments/upvotes etc
|
||||
"""
|
||||
from . import init
|
||||
|
||||
from pathlib import Path
|
||||
from typing import List, Sequence, Mapping, Iterator
|
||||
|
@ -9,19 +9,19 @@ from typing import List, Sequence, Mapping, Iterator
|
|||
from .kython.kompress import CPath
|
||||
from .common import mcachew, get_files, LazyLogger, make_dict
|
||||
|
||||
from mycfg import paths
|
||||
import mycfg.repos.rexport.dal as rexport
|
||||
from my.config import reddit as config
|
||||
import my.config.repos.rexport.dal as rexport
|
||||
|
||||
|
||||
def get_sources() -> Sequence[Path]:
|
||||
# TODO use zstd?
|
||||
# TODO maybe add assert to get_files? (and allow to suppress it)
|
||||
files = get_files(paths.rexport.export_dir, glob='*.json.xz')
|
||||
files = get_files(config.export_dir, glob='*.json.xz')
|
||||
res = list(map(CPath, files)); assert len(res) > 0
|
||||
return tuple(res)
|
||||
|
||||
|
||||
logger = LazyLogger('my.reddit', level='debug')
|
||||
logger = LazyLogger(__package__, level='debug')
|
||||
|
||||
|
||||
Sid = rexport.Sid
|
||||
|
|
|
@ -1,3 +1,7 @@
|
|||
'''
|
||||
Rescuetime (activity tracking) data
|
||||
'''
|
||||
|
||||
from pathlib import Path
|
||||
from datetime import datetime, timedelta
|
||||
from typing import NamedTuple, Dict, List, Set, Optional
|
||||
|
@ -8,18 +12,17 @@ from .error import Res, split_errors
|
|||
# TODO get rid of it
|
||||
from kython import group_by_cmp # type: ignore
|
||||
|
||||
from mycfg import paths
|
||||
from my.config import rescuetime as config
|
||||
|
||||
|
||||
log = LazyLogger('my.rescuetime', level='info')
|
||||
log = LazyLogger(__package__, level='info')
|
||||
|
||||
|
||||
def _get_exports() -> List[Path]:
|
||||
from mycfg import paths
|
||||
return get_files(paths.rescuetime.export_path, '*.json')
|
||||
return get_files(config.export_path, '*.json')
|
||||
|
||||
|
||||
import mycfg.repos.rescuexport.model as rescuexport
|
||||
import my.config.repos.rescuexport.model as rescuexport
|
||||
Model = rescuexport.Model
|
||||
|
||||
|
||||
|
|
|
@ -11,7 +11,7 @@ from datetime import datetime
|
|||
from .common import LazyLogger, get_files, group_by_key, cproperty, make_dict
|
||||
from .kython.kompress import open as kopen
|
||||
|
||||
from mycfg import rtm as config
|
||||
from my.config import rtm as config
|
||||
|
||||
|
||||
import icalendar # type: ignore
|
||||
|
|
|
@ -2,6 +2,7 @@
|
|||
Phone calls and SMS messages
|
||||
"""
|
||||
# TODO extract SMS as well? I barely use them though..
|
||||
from . import init
|
||||
|
||||
from datetime import datetime
|
||||
from pathlib import Path
|
||||
|
@ -12,7 +13,7 @@ from lxml import etree # type: ignore
|
|||
|
||||
from .common import get_files
|
||||
|
||||
from mycfg import paths
|
||||
from my.config import smscalls as config
|
||||
|
||||
|
||||
class Call(NamedTuple):
|
||||
|
@ -40,7 +41,7 @@ def _extract_calls(path: Path) -> Iterator[Call]:
|
|||
|
||||
|
||||
def calls() -> Iterator[Call]:
|
||||
files = get_files(paths.smscalls.export_path, glob='calls-*.xml')
|
||||
files = get_files(config.export_path, glob='calls-*.xml')
|
||||
|
||||
# TODO always replacing with the latter is good, we get better contact names??
|
||||
emitted: Set[datetime] = set()
|
||||
|
|
|
@ -1,7 +1,11 @@
|
|||
import mycfg.repos.stexport.model as stexport
|
||||
from mycfg import paths
|
||||
'''
|
||||
Stackexchange data
|
||||
'''
|
||||
|
||||
import my.config.repos.stexport.model as stexport
|
||||
from my.config import stackexchange as config
|
||||
|
||||
|
||||
def get_data():
|
||||
sources = [max(paths.stexport.export_dir.glob('*.json'))]
|
||||
sources = [max(config.export_dir.glob('*.json'))]
|
||||
return stexport.Model(sources).site_model('stackoverflow')
|
||||
|
|
|
@ -3,7 +3,7 @@ from typing import Optional
|
|||
|
||||
from .common import get_files
|
||||
|
||||
from mycfg import paths
|
||||
from my.config import google as config
|
||||
|
||||
from .kython.kompress import kopen
|
||||
|
||||
|
@ -12,7 +12,7 @@ def get_last_takeout(*, path: Optional[str]=None) -> Path:
|
|||
Ok, sometimes google splits takeout into two zip archives
|
||||
I guess I could detect it (they've got 001/002 etc suffixes), but fornow that works fine..
|
||||
"""
|
||||
for takeout in reversed(get_files(paths.google.takeout_path, glob='*.zip')):
|
||||
for takeout in reversed(get_files(config.takeout_path, glob='*.zip')):
|
||||
if path is None:
|
||||
return takeout
|
||||
else:
|
||||
|
|
|
@ -4,12 +4,7 @@ Module for Twitter (uses official twitter archive export)
|
|||
See https://help.twitter.com/en/managing-your-account/how-to-download-your-twitter-archive
|
||||
"""
|
||||
|
||||
# TODO remove these
|
||||
"""
|
||||
Expects path to be set
|
||||
- via ~configure~ (before calling anything else)
|
||||
- or in ~mycfg.twitter.export_path~
|
||||
"""
|
||||
from . import init
|
||||
|
||||
|
||||
from datetime import date, datetime
|
||||
|
@ -24,9 +19,10 @@ from .common import PathIsh, get_files, LazyLogger
|
|||
from .kython import kompress
|
||||
|
||||
|
||||
logger = LazyLogger('my.twitter')
|
||||
logger = LazyLogger(__package__)
|
||||
|
||||
|
||||
# TODO get rid of this?
|
||||
_export_path: Optional[Path] = None
|
||||
def configure(*, export_path: Optional[PathIsh]=None) -> None:
|
||||
if export_path is not None:
|
||||
|
@ -37,9 +33,9 @@ def configure(*, export_path: Optional[PathIsh]=None) -> None:
|
|||
def _get_export() -> Path:
|
||||
export_path = _export_path
|
||||
if export_path is None:
|
||||
# fallback to mycfg
|
||||
from mycfg import paths
|
||||
export_path = paths.twitter.export_path
|
||||
# fallback
|
||||
from my.config import twitter as config
|
||||
export_path = config.export_path
|
||||
return max(get_files(export_path, '*.zip'))
|
||||
|
||||
|
||||
|
|
4
my/vk.py
4
my/vk.py
|
@ -2,7 +2,7 @@ from datetime import datetime
|
|||
import json
|
||||
from typing import NamedTuple, Iterator, Dict, Union, Sequence, Optional
|
||||
|
||||
from mycfg import paths
|
||||
from my.config import vk as config
|
||||
|
||||
|
||||
class Favorite(NamedTuple):
|
||||
|
@ -53,7 +53,7 @@ def parse_fav(j: Dict) -> Favorite:
|
|||
|
||||
|
||||
def _iter_favs() -> Iterator[Res]:
|
||||
jj = json.loads(paths.vk.favs_file.read_text())
|
||||
jj = json.loads(config.favs_file.read_text())
|
||||
for j in jj:
|
||||
try:
|
||||
yield parse_fav(j)
|
||||
|
|
7
tox.ini
7
tox.ini
|
@ -6,12 +6,13 @@ skip_missing_interpreters = True
|
|||
# TODO ugh. unclear how to reuse setup.cfg deps in tox
|
||||
[testenv]
|
||||
passenv = CI CI_* CIRCLE*
|
||||
setenv = MY_CONFIG = mycfg_template
|
||||
# deliberately set to nonexistent pathe to check the fallback logic
|
||||
setenv = MY_CONFIG = nonexistent
|
||||
commands =
|
||||
pip install -e .
|
||||
# TODO ??
|
||||
# python -m pytest {posargs}
|
||||
python3 -c 'import my.init; import my.config; print(my.config.__path__)'
|
||||
python3 -c 'import my.init; from my.config import stub as config; print(config.key)'
|
||||
python3 -c 'import my.init; import my.config; import my.config.repos' # shouldn't fail at least
|
||||
# TODO run demo.py? just make sure with_my is a bit cleverer?
|
||||
# TODO e.g. under CI, rely on installing
|
||||
|
@ -22,7 +23,7 @@ commands =
|
|||
changedir = {toxworkdir}/{envname}/../..
|
||||
# TODO not sure?? also reuse installed my. instead on installing in demo.py?
|
||||
skip_install = true
|
||||
# TODO need to keep full mycfg.py as example? to dummy run CI
|
||||
# TODO need to keep full config as example? to dummy run CI
|
||||
commands = ./demo.py
|
||||
|
||||
|
||||
|
|
Loading…
Add table
Reference in a new issue