diff --git a/my/core/common.py b/my/core/common.py index 10599f5..0c7571e 100644 --- a/my/core/common.py +++ b/my/core/common.py @@ -130,6 +130,11 @@ def get_files(pp: Paths, glob: str=DEFAULT_GLOB, sort: bool=True) -> Tuple[Path, else: sources.extend(map(Path, pp)) + def caller() -> str: + import traceback + # TODO ugh. very flaky... -3 because [, get_files(), ] + return traceback.extract_stack()[-3].filename + paths: List[Path] = [] for src in sources: if src.parts[0] == '~': @@ -141,7 +146,7 @@ def get_files(pp: Paths, glob: str=DEFAULT_GLOB, sort: bool=True) -> Tuple[Path, ss = str(src) if '*' in ss: if glob != DEFAULT_GLOB: - warnings.warn(f"Treating {ss} as glob path. Explicit glob={glob} argument is ignored!") + warnings.warn(f"{caller()}: treating {ss} as glob path. Explicit glob={glob} argument is ignored!") paths.extend(map(Path, do_glob(ss))) else: if not src.is_file(): @@ -154,8 +159,10 @@ def get_files(pp: Paths, glob: str=DEFAULT_GLOB, sort: bool=True) -> Tuple[Path, if len(paths) == 0: # todo make it conditionally defensive based on some global settings - # todo stacktrace? - warnings.warn(f'No paths were matched against {paths}. This might result in missing data.') + # TODO not sure about using warnings module for this + import traceback + warnings.warn(f'{caller()}: no paths were matched against {paths}. This might result in missing data.') + traceback.print_stack() return tuple(paths) @@ -274,13 +281,13 @@ from typing import Generic, Sized, Callable # X = TypeVar('X') -def _warn_iterator(it): +def _warn_iterator(it, f: Any=None): emitted = False for i in it: yield i emitted = True if not emitted: - warnings.warn(f"Function hasn't emitted any data, make sure your config paths are correct") + warnings.warn(f"Function {f} didn't emit any data, make sure your config paths are correct") # TODO ugh, so I want to express something like: @@ -294,17 +301,17 @@ def _warn_iterator(it): from typing import overload X = TypeVar('X') @overload -def _warn_iterable(it: List[X] ) -> List[X] : ... +def _warn_iterable(it: List[X] , f: Any=None) -> List[X] : ... @overload -def _warn_iterable(it: Iterable[X]) -> Iterable[X]: ... -def _warn_iterable(it): +def _warn_iterable(it: Iterable[X], f: Any=None) -> Iterable[X]: ... +def _warn_iterable(it, f=None): if isinstance(it, Sized): sz = len(it) if sz == 0: - warnings.warn(f"Function is returning empty container, make sure your config paths are correct") - return it # type: ignore[return-value] + warnings.warn(f"Function {f} returned empty container, make sure your config paths are correct") + return it else: - return _warn_iterator(it) + return _warn_iterator(it, f=f) @overload @@ -316,5 +323,5 @@ def warn_if_empty(f): @wraps(f) def wrapped(*args, **kwargs): res = f(*args, **kwargs) - return _warn_iterable(res) + return _warn_iterable(res, f=f) return wrapped diff --git a/my/rss/all.py b/my/rss/all.py index 90f5efa..61f9fab 100644 --- a/my/rss/all.py +++ b/my/rss/all.py @@ -1,11 +1,16 @@ ''' Unified RSS data, merged from different services I used historically ''' +# NOTE: you can comment out the sources you're not using +from . import feedbin, feedly + from typing import Iterable from .common import Subscription, compute_subscriptions def subscriptions() -> Iterable[Subscription]: - from . import feedbin, feedly # TODO google reader? - yield from compute_subscriptions(feedbin.states(), feedly.states()) + yield from compute_subscriptions( + feedbin.states(), + feedly .states(), + ) diff --git a/my/rss/common.py b/my/rss/common.py index 3dc761c..9aa5ed8 100644 --- a/my/rss/common.py +++ b/my/rss/common.py @@ -17,6 +17,8 @@ from typing import Iterable, Tuple, Sequence SubscriptionState = Tuple[datetime, Sequence[Subscription]] +from ..core import warn_if_empty +@warn_if_empty def compute_subscriptions(*sources: Iterable[SubscriptionState]) -> List[Subscription]: """ Keeps track of everything I ever subscribed to. @@ -34,6 +36,9 @@ def compute_subscriptions(*sources: Iterable[SubscriptionState]) -> List[Subscri if feed.url not in by_url: by_url[feed.url] = feed + if len(states) == 0: + return [] + _, last_state = max(states, key=lambda x: x[0]) last_urls = {f.url for f in last_state} diff --git a/my/twitter/all.py b/my/twitter/all.py index acb59a2..5c8103c 100644 --- a/my/twitter/all.py +++ b/my/twitter/all.py @@ -3,11 +3,9 @@ Unified Twitter data (merged from the archive and periodic updates) """ # NOTE: you can comment out the sources you don't need - - from . import twint, archive -from .common import merge_tweets +from .common import merge_tweets def tweets(): yield from merge_tweets( @@ -15,6 +13,7 @@ def tweets(): archive.tweets(), ) +from .common import merge_tweets def likes(): yield from merge_tweets( diff --git a/my/twitter/common.py b/my/twitter/common.py index 1bf36f0..ecfaea3 100644 --- a/my/twitter/common.py +++ b/my/twitter/common.py @@ -2,7 +2,9 @@ from itertools import chain from more_itertools import unique_everseen +from ..core import warn_if_empty +@warn_if_empty def merge_tweets(*sources): yield from unique_everseen( chain(*sources),