improvements to @mcachew type checking

This commit is contained in:
Dima Gerasimov 2020-05-03 15:57:11 +01:00
parent 78dbbd3c55
commit 19e90eb647
2 changed files with 29 additions and 14 deletions

View file

@ -135,7 +135,22 @@ def get_files(pp: Paths, glob: str, sort: bool=True) -> Tuple[Path, ...]:
return tuple(paths)
def mcachew(*args, **kwargs):
# TODO annotate it, perhaps use 'dependent' type (for @doublewrap stuff)
from typing import TYPE_CHECKING
if TYPE_CHECKING:
from typing import Callable, TypeVar
from typing_extensions import Protocol
# TODO reuse types from cachew? although not sure if we want hard dependency on it in typecheck time..
# I guess, later just define pass through once this is fixed: https://github.com/python/typing/issues/270
# ok, that's actually a super nice 'pattern'
F = TypeVar('F')
class McachewType(Protocol):
def __call__(self, cache_path: Any=None, *, hashf: Any=None, chunk_by: int=0, logger: Any=None) -> Callable[[F], F]:
...
mcachew: McachewType
def mcachew(*args, **kwargs): # type: ignore[no-redef]
"""
Stands for 'Maybe cachew'.
Defensive wrapper around @cachew to make it an optional dependency.

View file

@ -21,17 +21,17 @@ def get_sources() -> Sequence[Path]:
return tuple(res)
logger = LazyLogger(__package__, level='debug')
logger = LazyLogger(__name__, level='debug')
Sid = rexport.Sid
Save = rexport.Save
Comment = rexport.Comment
Sid = rexport.Sid
Save = rexport.Save
Comment = rexport.Comment
Submission = rexport.Submission
Upvote = rexport.Upvote
Upvote = rexport.Upvote
def dal():
def dal() -> rexport.DAL:
# TODO lru cache? but be careful when it runs continuously
return rexport.DAL(get_sources())
@ -173,12 +173,12 @@ def get_events(*args, **kwargs) -> List[Event]:
return list(sorted(evit, key=lambda e: e.cmp_key))
def test():
def test() -> None:
get_events(backups=get_sources()[-1:])
list(saved())
def test_unfav():
def test_unfav() -> None:
events = get_events()
url = 'https://reddit.com/r/QuantifiedSelf/comments/acxy1v/personal_dashboard/'
uevents = [e for e in events if e.url == url]
@ -188,15 +188,15 @@ def test_unfav():
uf = uevents[1]
assert uf.text == 'unfavorited'
def test_get_all_saves():
# TODO move out..
def test_get_all_saves() -> None:
# TODO not sure if this is necesasry anymore?
saves = list(saved())
# just check that they are unique..
make_dict(saves, key=lambda s: s.sid)
def test_disappearing():
def test_disappearing() -> None:
# eh. so for instance, 'metro line colors' is missing from reddit-20190402005024.json for no reason
# but I guess it was just a short glitch... so whatever
saves = get_events()
@ -205,14 +205,14 @@ def test_disappearing():
assert deal_with_it.backup_dt == datetime(2019, 4, 1, 23, 10, 25, tzinfo=pytz.utc)
def test_unfavorite():
def test_unfavorite() -> None:
events = get_events()
unfavs = [s for s in events if s.text == 'unfavorited']
[xxx] = [u for u in unfavs if u.eid == 'unf-19ifop']
assert xxx.dt == datetime(2019, 1, 28, 8, 10, 20, tzinfo=pytz.utc)
def main():
def main() -> None:
# TODO eh. not sure why but parallel on seems to mess glumov up and cause OOM...
events = get_events(parallel=False)
print(len(events))