From 19e90eb64728e8805be260353013f1aaedf4fcf3 Mon Sep 17 00:00:00 2001 From: Dima Gerasimov Date: Sun, 3 May 2020 15:57:11 +0100 Subject: [PATCH] improvements to @mcachew type checking --- my/common.py | 17 ++++++++++++++++- my/reddit.py | 26 +++++++++++++------------- 2 files changed, 29 insertions(+), 14 deletions(-) diff --git a/my/common.py b/my/common.py index 89ee916..172af06 100644 --- a/my/common.py +++ b/my/common.py @@ -135,7 +135,22 @@ def get_files(pp: Paths, glob: str, sort: bool=True) -> Tuple[Path, ...]: return tuple(paths) -def mcachew(*args, **kwargs): +# TODO annotate it, perhaps use 'dependent' type (for @doublewrap stuff) +from typing import TYPE_CHECKING +if TYPE_CHECKING: + from typing import Callable, TypeVar + from typing_extensions import Protocol + # TODO reuse types from cachew? although not sure if we want hard dependency on it in typecheck time.. + # I guess, later just define pass through once this is fixed: https://github.com/python/typing/issues/270 + # ok, that's actually a super nice 'pattern' + F = TypeVar('F') + class McachewType(Protocol): + def __call__(self, cache_path: Any=None, *, hashf: Any=None, chunk_by: int=0, logger: Any=None) -> Callable[[F], F]: + ... + + mcachew: McachewType + +def mcachew(*args, **kwargs): # type: ignore[no-redef] """ Stands for 'Maybe cachew'. Defensive wrapper around @cachew to make it an optional dependency. diff --git a/my/reddit.py b/my/reddit.py index 36921e9..e4e5590 100755 --- a/my/reddit.py +++ b/my/reddit.py @@ -21,17 +21,17 @@ def get_sources() -> Sequence[Path]: return tuple(res) -logger = LazyLogger(__package__, level='debug') +logger = LazyLogger(__name__, level='debug') -Sid = rexport.Sid -Save = rexport.Save -Comment = rexport.Comment +Sid = rexport.Sid +Save = rexport.Save +Comment = rexport.Comment Submission = rexport.Submission -Upvote = rexport.Upvote +Upvote = rexport.Upvote -def dal(): +def dal() -> rexport.DAL: # TODO lru cache? but be careful when it runs continuously return rexport.DAL(get_sources()) @@ -173,12 +173,12 @@ def get_events(*args, **kwargs) -> List[Event]: return list(sorted(evit, key=lambda e: e.cmp_key)) -def test(): +def test() -> None: get_events(backups=get_sources()[-1:]) list(saved()) -def test_unfav(): +def test_unfav() -> None: events = get_events() url = 'https://reddit.com/r/QuantifiedSelf/comments/acxy1v/personal_dashboard/' uevents = [e for e in events if e.url == url] @@ -188,15 +188,15 @@ def test_unfav(): uf = uevents[1] assert uf.text == 'unfavorited' - -def test_get_all_saves(): +# TODO move out.. +def test_get_all_saves() -> None: # TODO not sure if this is necesasry anymore? saves = list(saved()) # just check that they are unique.. make_dict(saves, key=lambda s: s.sid) -def test_disappearing(): +def test_disappearing() -> None: # eh. so for instance, 'metro line colors' is missing from reddit-20190402005024.json for no reason # but I guess it was just a short glitch... so whatever saves = get_events() @@ -205,14 +205,14 @@ def test_disappearing(): assert deal_with_it.backup_dt == datetime(2019, 4, 1, 23, 10, 25, tzinfo=pytz.utc) -def test_unfavorite(): +def test_unfavorite() -> None: events = get_events() unfavs = [s for s in events if s.text == 'unfavorited'] [xxx] = [u for u in unfavs if u.eid == 'unf-19ifop'] assert xxx.dt == datetime(2019, 1, 28, 8, 10, 20, tzinfo=pytz.utc) -def main(): +def main() -> None: # TODO eh. not sure why but parallel on seems to mess glumov up and cause OOM... events = get_events(parallel=False) print(len(events))