ruff: enable B warnings (mainly suppressed exceptions and unused variables)
This commit is contained in:
parent
d0df8e8f2d
commit
72cc8ff3ac
30 changed files with 83 additions and 67 deletions
|
@ -153,7 +153,7 @@ def measurements() -> Iterable[Res[Measurement]]:
|
||||||
oldfmt = False
|
oldfmt = False
|
||||||
db_dt = None
|
db_dt = None
|
||||||
|
|
||||||
for i, (name, tsc, temp, hum, pres, dewp) in enumerate(datas):
|
for (name, tsc, temp, hum, pres, dewp) in datas:
|
||||||
if is_bad_table(name):
|
if is_bad_table(name):
|
||||||
continue
|
continue
|
||||||
|
|
||||||
|
|
|
@ -105,7 +105,7 @@ def dataframe() -> DataFrameT:
|
||||||
rows = []
|
rows = []
|
||||||
idxs = [] # type: ignore[var-annotated]
|
idxs = [] # type: ignore[var-annotated]
|
||||||
NO_ENDOMONDO = 'no endomondo matches'
|
NO_ENDOMONDO = 'no endomondo matches'
|
||||||
for i, row in mdf.iterrows():
|
for _i, row in mdf.iterrows():
|
||||||
rd = row.to_dict()
|
rd = row.to_dict()
|
||||||
mdate = row['date']
|
mdate = row['date']
|
||||||
if pd.isna(mdate):
|
if pd.isna(mdate):
|
||||||
|
|
|
@ -1,8 +1,11 @@
|
||||||
import warnings
|
from typing import TYPE_CHECKING
|
||||||
|
|
||||||
warnings.warn('my.coding.github is deprecated! Please use my.github.all instead!')
|
from my.core import warnings
|
||||||
|
|
||||||
|
warnings.high('my.coding.github is deprecated! Please use my.github.all instead!')
|
||||||
# todo why aren't DeprecationWarning shown by default??
|
# todo why aren't DeprecationWarning shown by default??
|
||||||
|
|
||||||
|
if not TYPE_CHECKING:
|
||||||
from ..github.all import events, get_events
|
from ..github.all import events, get_events
|
||||||
|
|
||||||
# todo deprecate properly
|
# todo deprecate properly
|
||||||
|
|
|
@ -456,9 +456,9 @@ def _locate_functions_or_prompt(qualified_names: List[str], *, prompt: bool = Tr
|
||||||
# user to select a 'data provider' like function
|
# user to select a 'data provider' like function
|
||||||
try:
|
try:
|
||||||
mod = importlib.import_module(qualname)
|
mod = importlib.import_module(qualname)
|
||||||
except Exception:
|
except Exception as ie:
|
||||||
eprint(f"During fallback, importing '{qualname}' as module failed")
|
eprint(f"During fallback, importing '{qualname}' as module failed")
|
||||||
raise qr_err
|
raise qr_err from ie
|
||||||
|
|
||||||
# find data providers in this module
|
# find data providers in this module
|
||||||
data_providers = [f for _, f in inspect.getmembers(mod, inspect.isfunction) if is_data_provider(f)]
|
data_providers = [f for _, f in inspect.getmembers(mod, inspect.isfunction) if is_data_provider(f)]
|
||||||
|
|
|
@ -2,7 +2,6 @@ from .internal import assert_subpackage; assert_subpackage(__name__)
|
||||||
|
|
||||||
import logging
|
import logging
|
||||||
import sys
|
import sys
|
||||||
import warnings
|
|
||||||
from contextlib import contextmanager
|
from contextlib import contextmanager
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
from typing import (
|
from typing import (
|
||||||
|
@ -20,6 +19,9 @@ from typing import (
|
||||||
|
|
||||||
import appdirs # type: ignore[import-untyped]
|
import appdirs # type: ignore[import-untyped]
|
||||||
|
|
||||||
|
from . import warnings
|
||||||
|
|
||||||
|
|
||||||
PathIsh = Union[str, Path] # avoid circular import from .common
|
PathIsh = Union[str, Path] # avoid circular import from .common
|
||||||
|
|
||||||
|
|
||||||
|
@ -116,7 +118,7 @@ def _mcachew_impl(cache_path=_cache_path_dflt, **kwargs):
|
||||||
try:
|
try:
|
||||||
import cachew
|
import cachew
|
||||||
except ModuleNotFoundError:
|
except ModuleNotFoundError:
|
||||||
warnings.warn('cachew library not found. You might want to install it to speed things up. See https://github.com/karlicoss/cachew')
|
warnings.high('cachew library not found. You might want to install it to speed things up. See https://github.com/karlicoss/cachew')
|
||||||
return lambda orig_func: orig_func
|
return lambda orig_func: orig_func
|
||||||
else:
|
else:
|
||||||
kwargs['cache_path'] = cache_path
|
kwargs['cache_path'] = cache_path
|
||||||
|
|
|
@ -1,5 +1,4 @@
|
||||||
import os
|
import os
|
||||||
import warnings
|
|
||||||
from glob import glob as do_glob
|
from glob import glob as do_glob
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
from typing import (
|
from typing import (
|
||||||
|
@ -15,7 +14,7 @@ from typing import (
|
||||||
)
|
)
|
||||||
|
|
||||||
from . import compat
|
from . import compat
|
||||||
from . import warnings as core_warnings
|
from . import warnings as warnings
|
||||||
|
|
||||||
# some helper functions
|
# some helper functions
|
||||||
# TODO start deprecating this? soon we'd be able to use Path | str syntax which is shorter and more explicit
|
# TODO start deprecating this? soon we'd be able to use Path | str syntax which is shorter and more explicit
|
||||||
|
@ -63,7 +62,7 @@ def get_files(
|
||||||
gs = str(src)
|
gs = str(src)
|
||||||
if '*' in gs:
|
if '*' in gs:
|
||||||
if glob != DEFAULT_GLOB:
|
if glob != DEFAULT_GLOB:
|
||||||
warnings.warn(f"{caller()}: treating {gs} as glob path. Explicit glob={glob} argument is ignored!")
|
warnings.medium(f"{caller()}: treating {gs} as glob path. Explicit glob={glob} argument is ignored!")
|
||||||
paths.extend(map(Path, do_glob(gs)))
|
paths.extend(map(Path, do_glob(gs)))
|
||||||
elif os.path.isdir(str(src)):
|
elif os.path.isdir(str(src)):
|
||||||
# NOTE: we're using os.path here on purpose instead of src.is_dir
|
# NOTE: we're using os.path here on purpose instead of src.is_dir
|
||||||
|
@ -85,7 +84,7 @@ def get_files(
|
||||||
|
|
||||||
if len(paths) == 0:
|
if len(paths) == 0:
|
||||||
# todo make it conditionally defensive based on some global settings
|
# todo make it conditionally defensive based on some global settings
|
||||||
core_warnings.high(f'''
|
warnings.high(f'''
|
||||||
{caller()}: no paths were matched against {pp}. This might result in missing data. Likely, the directory you passed is empty.
|
{caller()}: no paths were matched against {pp}. This might result in missing data. Likely, the directory you passed is empty.
|
||||||
'''.strip())
|
'''.strip())
|
||||||
# traceback is useful to figure out what config caused it?
|
# traceback is useful to figure out what config caused it?
|
||||||
|
|
|
@ -119,7 +119,7 @@ def sort_res_by(items: Iterable[Res[T]], key: Callable[[Any], K]) -> List[Res[T]
|
||||||
group = []
|
group = []
|
||||||
|
|
||||||
results: List[Res[T]] = []
|
results: List[Res[T]] = []
|
||||||
for v, grp in sorted(groups, key=lambda p: p[0]): # type: ignore[return-value, arg-type] # TODO SupportsLessThan??
|
for _v, grp in sorted(groups, key=lambda p: p[0]): # type: ignore[return-value, arg-type] # TODO SupportsLessThan??
|
||||||
results.extend(grp)
|
results.extend(grp)
|
||||||
results.extend(group) # handle last group (it will always be errors only)
|
results.extend(group) # handle last group (it will always be errors only)
|
||||||
|
|
||||||
|
|
|
@ -6,7 +6,7 @@ import inspect
|
||||||
import os
|
import os
|
||||||
import re
|
import re
|
||||||
from types import ModuleType
|
from types import ModuleType
|
||||||
from typing import Iterator, List, Optional, TypeVar
|
from typing import Iterator, List, Optional, Sequence, TypeVar
|
||||||
|
|
||||||
from . import warnings
|
from . import warnings
|
||||||
|
|
||||||
|
@ -71,7 +71,7 @@ def pre_pip_dal_handler(
|
||||||
name: str,
|
name: str,
|
||||||
e: ModuleNotFoundError,
|
e: ModuleNotFoundError,
|
||||||
cfg,
|
cfg,
|
||||||
requires=[],
|
requires: Sequence[str] = (),
|
||||||
) -> ModuleType:
|
) -> ModuleType:
|
||||||
'''
|
'''
|
||||||
https://github.com/karlicoss/HPI/issues/79
|
https://github.com/karlicoss/HPI/issues/79
|
||||||
|
|
|
@ -25,7 +25,7 @@ def setup_config() -> None:
|
||||||
warnings.warn(f"""
|
warnings.warn(f"""
|
||||||
'my.config' package isn't found! (expected at '{mycfg_dir}'). This is likely to result in issues.
|
'my.config' package isn't found! (expected at '{mycfg_dir}'). This is likely to result in issues.
|
||||||
See https://github.com/karlicoss/HPI/blob/master/doc/SETUP.org#setting-up-the-modules for more info.
|
See https://github.com/karlicoss/HPI/blob/master/doc/SETUP.org#setting-up-the-modules for more info.
|
||||||
""".strip())
|
""".strip(), stacklevel=1)
|
||||||
return
|
return
|
||||||
|
|
||||||
mpath = str(mycfg_dir)
|
mpath = str(mycfg_dir)
|
||||||
|
@ -47,7 +47,7 @@ See https://github.com/karlicoss/HPI/blob/master/doc/SETUP.org#setting-up-the-mo
|
||||||
warnings.warn(f"""
|
warnings.warn(f"""
|
||||||
Importing 'my.config' failed! (error: {ex}). This is likely to result in issues.
|
Importing 'my.config' failed! (error: {ex}). This is likely to result in issues.
|
||||||
See https://github.com/karlicoss/HPI/blob/master/doc/SETUP.org#setting-up-the-modules for more info.
|
See https://github.com/karlicoss/HPI/blob/master/doc/SETUP.org#setting-up-the-modules for more info.
|
||||||
""")
|
""", stacklevel=1)
|
||||||
else:
|
else:
|
||||||
# defensive just in case -- __file__ may not be present if there is some dynamic magic involved
|
# defensive just in case -- __file__ may not be present if there is some dynamic magic involved
|
||||||
used_config_file = getattr(my.config, '__file__', None)
|
used_config_file = getattr(my.config, '__file__', None)
|
||||||
|
@ -63,7 +63,7 @@ See https://github.com/karlicoss/HPI/blob/master/doc/SETUP.org#setting-up-the-mo
|
||||||
Expected my.config to be located at {mycfg_dir}, but instead its path is {used_config_path}.
|
Expected my.config to be located at {mycfg_dir}, but instead its path is {used_config_path}.
|
||||||
This will likely cause issues down the line -- double check {mycfg_dir} structure.
|
This will likely cause issues down the line -- double check {mycfg_dir} structure.
|
||||||
See https://github.com/karlicoss/HPI/blob/master/doc/SETUP.org#setting-up-the-modules for more info.
|
See https://github.com/karlicoss/HPI/blob/master/doc/SETUP.org#setting-up-the-modules for more info.
|
||||||
""",
|
""", stacklevel=1
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -15,7 +15,7 @@ def test() -> None:
|
||||||
|
|
||||||
## prepare exception for later
|
## prepare exception for later
|
||||||
try:
|
try:
|
||||||
None.whatever # type: ignore[attr-defined]
|
None.whatever # type: ignore[attr-defined] # noqa: B018
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
ex = e
|
ex = e
|
||||||
##
|
##
|
||||||
|
@ -146,7 +146,7 @@ def _setup_handlers_and_formatters(name: str) -> None:
|
||||||
# try colorlog first, so user gets nice colored logs
|
# try colorlog first, so user gets nice colored logs
|
||||||
import colorlog
|
import colorlog
|
||||||
except ModuleNotFoundError:
|
except ModuleNotFoundError:
|
||||||
warnings.warn("You might want to 'pip install colorlog' for nice colored logs")
|
warnings.warn("You might want to 'pip install colorlog' for nice colored logs", stacklevel=1)
|
||||||
formatter = logging.Formatter(FORMAT_NOCOLOR)
|
formatter = logging.Formatter(FORMAT_NOCOLOR)
|
||||||
else:
|
else:
|
||||||
# log_color/reset are specific to colorlog
|
# log_color/reset are specific to colorlog
|
||||||
|
@ -233,7 +233,7 @@ def get_enlighten():
|
||||||
try:
|
try:
|
||||||
import enlighten # type: ignore[import-untyped]
|
import enlighten # type: ignore[import-untyped]
|
||||||
except ModuleNotFoundError:
|
except ModuleNotFoundError:
|
||||||
warnings.warn("You might want to 'pip install enlighten' for a nice progress bar")
|
warnings.warn("You might want to 'pip install enlighten' for a nice progress bar", stacklevel=1)
|
||||||
|
|
||||||
return Mock()
|
return Mock()
|
||||||
|
|
||||||
|
|
|
@ -6,7 +6,7 @@ from datetime import datetime
|
||||||
|
|
||||||
def parse_org_datetime(s: str) -> datetime:
|
def parse_org_datetime(s: str) -> datetime:
|
||||||
s = s.strip('[]')
|
s = s.strip('[]')
|
||||||
for fmt, cl in [
|
for fmt, _cls in [
|
||||||
("%Y-%m-%d %a %H:%M", datetime),
|
("%Y-%m-%d %a %H:%M", datetime),
|
||||||
("%Y-%m-%d %H:%M" , datetime),
|
("%Y-%m-%d %H:%M" , datetime),
|
||||||
# todo not sure about these... fallback on 00:00?
|
# todo not sure about these... fallback on 00:00?
|
||||||
|
|
|
@ -72,7 +72,7 @@ def locate_function(module_name: str, function_name: str) -> Callable[[], Iterab
|
||||||
if func is not None and callable(func):
|
if func is not None and callable(func):
|
||||||
return func
|
return func
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
raise QueryException(str(e))
|
raise QueryException(str(e)) # noqa: B904
|
||||||
raise QueryException(f"Could not find function '{function_name}' in '{module_name}'")
|
raise QueryException(f"Could not find function '{function_name}' in '{module_name}'")
|
||||||
|
|
||||||
|
|
||||||
|
@ -468,7 +468,7 @@ Will attempt to call iter() on the value""")
|
||||||
try:
|
try:
|
||||||
itr: Iterator[ET] = iter(it)
|
itr: Iterator[ET] = iter(it)
|
||||||
except TypeError as t:
|
except TypeError as t:
|
||||||
raise QueryException("Could not convert input src to an Iterator: " + str(t))
|
raise QueryException("Could not convert input src to an Iterator: " + str(t)) # noqa: B904
|
||||||
|
|
||||||
# if both drop_exceptions and drop_exceptions are provided for some reason,
|
# if both drop_exceptions and drop_exceptions are provided for some reason,
|
||||||
# should raise exceptions before dropping them
|
# should raise exceptions before dropping them
|
||||||
|
|
|
@ -109,7 +109,7 @@ def _datelike_to_float(dl: Any) -> float:
|
||||||
try:
|
try:
|
||||||
return parse_datetime_float(dl)
|
return parse_datetime_float(dl)
|
||||||
except QueryException as q:
|
except QueryException as q:
|
||||||
raise QueryException(f"While attempting to extract datetime from {dl}, to order by datetime:\n\n" + str(q))
|
raise QueryException(f"While attempting to extract datetime from {dl}, to order by datetime:\n\n" + str(q)) # noqa: B904
|
||||||
|
|
||||||
|
|
||||||
class RangeTuple(NamedTuple):
|
class RangeTuple(NamedTuple):
|
||||||
|
|
|
@ -62,7 +62,7 @@ def import_source(
|
||||||
|
|
||||||
class core:
|
class core:
|
||||||
disabled_modules = [{module_name!r}]
|
disabled_modules = [{module_name!r}]
|
||||||
""")
|
""", stacklevel=1)
|
||||||
# try to check if this is a config error or based on dependencies not being installed
|
# try to check if this is a config error or based on dependencies not being installed
|
||||||
if isinstance(err, (ImportError, AttributeError)):
|
if isinstance(err, (ImportError, AttributeError)):
|
||||||
matched_config_err = warn_my_config_import_error(err, module_name=module_name, help_url=help_url)
|
matched_config_err = warn_my_config_import_error(err, module_name=module_name, help_url=help_url)
|
||||||
|
|
|
@ -440,7 +440,7 @@ def _guess_datetime(x: Any) -> Optional[datetime]:
|
||||||
d = asdict(x)
|
d = asdict(x)
|
||||||
except: # noqa: E722 bare except
|
except: # noqa: E722 bare except
|
||||||
return None
|
return None
|
||||||
for k, v in d.items():
|
for _k, v in d.items():
|
||||||
if isinstance(v, datetime):
|
if isinstance(v, datetime):
|
||||||
return v
|
return v
|
||||||
return None
|
return None
|
||||||
|
|
|
@ -93,11 +93,11 @@ def _discover_path_importables(pkg_pth: Path, pkg_name: str) -> Iterable[HPIModu
|
||||||
def _walk_packages(path: Iterable[str], prefix: str='', onerror=None) -> Iterable[HPIModule]:
|
def _walk_packages(path: Iterable[str], prefix: str='', onerror=None) -> Iterable[HPIModule]:
|
||||||
"""
|
"""
|
||||||
Modified version of https://github.com/python/cpython/blob/d50a0700265536a20bcce3fb108c954746d97625/Lib/pkgutil.py#L53,
|
Modified version of https://github.com/python/cpython/blob/d50a0700265536a20bcce3fb108c954746d97625/Lib/pkgutil.py#L53,
|
||||||
to alvoid importing modules that are skipped
|
to avoid importing modules that are skipped
|
||||||
"""
|
"""
|
||||||
from .core_config import config
|
from .core_config import config
|
||||||
|
|
||||||
def seen(p, m={}):
|
def seen(p, m={}): # noqa: B006
|
||||||
if p in m:
|
if p in m:
|
||||||
return True
|
return True
|
||||||
m[p] = True
|
m[p] = True
|
||||||
|
|
|
@ -24,6 +24,8 @@ import more_itertools
|
||||||
from decorator import decorator
|
from decorator import decorator
|
||||||
|
|
||||||
from ..compat import ParamSpec
|
from ..compat import ParamSpec
|
||||||
|
from .. import warnings as core_warnings
|
||||||
|
|
||||||
|
|
||||||
T = TypeVar('T')
|
T = TypeVar('T')
|
||||||
K = TypeVar('K')
|
K = TypeVar('K')
|
||||||
|
@ -142,8 +144,7 @@ def _warn_if_empty(func, *args, **kwargs):
|
||||||
if isinstance(iterable, Sized):
|
if isinstance(iterable, Sized):
|
||||||
sz = len(iterable)
|
sz = len(iterable)
|
||||||
if sz == 0:
|
if sz == 0:
|
||||||
# todo use hpi warnings here?
|
core_warnings.medium(f"Function {func} returned empty container, make sure your config paths are correct")
|
||||||
warnings.warn(f"Function {func} returned empty container, make sure your config paths are correct")
|
|
||||||
return iterable
|
return iterable
|
||||||
else: # must be an iterator
|
else: # must be an iterator
|
||||||
|
|
||||||
|
@ -153,7 +154,7 @@ def _warn_if_empty(func, *args, **kwargs):
|
||||||
yield i
|
yield i
|
||||||
empty = False
|
empty = False
|
||||||
if empty:
|
if empty:
|
||||||
warnings.warn(f"Function {func} didn't emit any data, make sure your config paths are correct")
|
core_warnings.medium(f"Function {func} didn't emit any data, make sure your config paths are correct")
|
||||||
|
|
||||||
return wit()
|
return wit()
|
||||||
|
|
||||||
|
|
|
@ -27,7 +27,7 @@ def _colorize(x: str, color: Optional[str]=None) -> str:
|
||||||
def _warn(message: str, *args, color: Optional[str] = None, **kwargs) -> None:
|
def _warn(message: str, *args, color: Optional[str] = None, **kwargs) -> None:
|
||||||
stacklevel = kwargs.get('stacklevel', 1)
|
stacklevel = kwargs.get('stacklevel', 1)
|
||||||
kwargs['stacklevel'] = stacklevel + 2 # +1 for this function, +1 for medium/high wrapper
|
kwargs['stacklevel'] = stacklevel + 2 # +1 for this function, +1 for medium/high wrapper
|
||||||
warnings.warn(_colorize(message, color=color), *args, **kwargs)
|
warnings.warn(_colorize(message, color=color), *args, **kwargs) # noqa: B028
|
||||||
|
|
||||||
|
|
||||||
def low(message: str, *args, **kwargs) -> None:
|
def low(message: str, *args, **kwargs) -> None:
|
||||||
|
@ -55,4 +55,4 @@ if not TYPE_CHECKING:
|
||||||
def warn(*args, **kwargs):
|
def warn(*args, **kwargs):
|
||||||
import warnings
|
import warnings
|
||||||
|
|
||||||
return warnings.warn(*args, **kwargs)
|
return warnings.warn(*args, **kwargs) # noqa: B028
|
||||||
|
|
|
@ -274,7 +274,7 @@ def plot() -> None:
|
||||||
fig: Figure = plt.figure(figsize=(15, sleeps_count * 1))
|
fig: Figure = plt.figure(figsize=(15, sleeps_count * 1))
|
||||||
|
|
||||||
axarr = fig.subplots(nrows=len(sleeps))
|
axarr = fig.subplots(nrows=len(sleeps))
|
||||||
for i, (sleep, axes) in enumerate(zip(sleeps, axarr)):
|
for (sleep, axes) in zip(sleeps, axarr):
|
||||||
plot_one(sleep, fig, axes, showtext=True)
|
plot_one(sleep, fig, axes, showtext=True)
|
||||||
used = melatonin_data.get(sleep.date_, None)
|
used = melatonin_data.get(sleep.date_, None)
|
||||||
sused: str
|
sused: str
|
||||||
|
|
|
@ -22,9 +22,10 @@ import geopy # type: ignore
|
||||||
from my.core import stat, Stats, make_logger
|
from my.core import stat, Stats, make_logger
|
||||||
from my.core.cachew import cache_dir, mcachew
|
from my.core.cachew import cache_dir, mcachew
|
||||||
|
|
||||||
from my.core.warnings import high
|
from my.core import warnings
|
||||||
|
|
||||||
high("Please set up my.google.takeout.parser module for better takeout support")
|
|
||||||
|
warnings.high("Please set up my.google.takeout.parser module for better takeout support")
|
||||||
|
|
||||||
|
|
||||||
# otherwise uses ijson
|
# otherwise uses ijson
|
||||||
|
@ -52,8 +53,7 @@ def _iter_via_ijson(fo) -> Iterable[TsLatLon]:
|
||||||
# pip3 install ijson cffi
|
# pip3 install ijson cffi
|
||||||
import ijson.backends.yajl2_cffi as ijson # type: ignore
|
import ijson.backends.yajl2_cffi as ijson # type: ignore
|
||||||
except:
|
except:
|
||||||
import warnings
|
warnings.medium("Falling back to default ijson because 'cffi' backend isn't found. It's up to 2x faster, you might want to check it out")
|
||||||
warnings.warn("Falling back to default ijson because 'cffi' backend isn't found. It's up to 2x faster, you might want to check it out")
|
|
||||||
import ijson # type: ignore
|
import ijson # type: ignore
|
||||||
|
|
||||||
for d in ijson.items(fo, 'locations.item'):
|
for d in ijson.items(fo, 'locations.item'):
|
||||||
|
@ -105,7 +105,8 @@ def _iter_locations_fo(fit) -> Iterable[Location]:
|
||||||
errors += 1
|
errors += 1
|
||||||
if float(errors) / total > 0.01:
|
if float(errors) / total > 0.01:
|
||||||
# todo make defensive?
|
# todo make defensive?
|
||||||
raise RuntimeError('too many errors! aborting')
|
# todo exceptiongroup?
|
||||||
|
raise RuntimeError('too many errors! aborting') # noqa: B904
|
||||||
else:
|
else:
|
||||||
continue
|
continue
|
||||||
|
|
||||||
|
|
|
@ -22,7 +22,7 @@ def iter_movies() -> Iterator[Movie]:
|
||||||
|
|
||||||
with last.open() as fo:
|
with last.open() as fo:
|
||||||
reader = csv.DictReader(fo)
|
reader = csv.DictReader(fo)
|
||||||
for i, line in enumerate(reader):
|
for line in reader:
|
||||||
# TODO extract directors??
|
# TODO extract directors??
|
||||||
title = line['Title']
|
title = line['Title']
|
||||||
rating = int(line['You rated'])
|
rating = int(line['You rated'])
|
||||||
|
|
|
@ -166,7 +166,7 @@ class Loader:
|
||||||
htags: List[str] = []
|
htags: List[str] = []
|
||||||
if 'tags' in h:
|
if 'tags' in h:
|
||||||
ht = h['tags'].zoom()
|
ht = h['tags'].zoom()
|
||||||
for k, v in list(ht.items()):
|
for _k, v in list(ht.items()):
|
||||||
ctag = v.zoom()
|
ctag = v.zoom()
|
||||||
ctag['id'].consume()
|
ctag['id'].consume()
|
||||||
ct = ctag['label'].zoom()
|
ct = ctag['label'].zoom()
|
||||||
|
@ -199,7 +199,7 @@ class Loader:
|
||||||
|
|
||||||
|
|
||||||
def load_items(self, metas: Json) -> Iterable[Highlight]:
|
def load_items(self, metas: Json) -> Iterable[Highlight]:
|
||||||
for p, meta in metas.items():
|
for _p, meta in metas.items():
|
||||||
with wrap(meta, throw=not config.defensive) as meta:
|
with wrap(meta, throw=not config.defensive) as meta:
|
||||||
yield from self.load_item(meta)
|
yield from self.load_item(meta)
|
||||||
|
|
||||||
|
|
|
@ -144,9 +144,9 @@ if not TYPE_CHECKING:
|
||||||
try:
|
try:
|
||||||
# here we just check that types are available, we don't actually want to import them
|
# here we just check that types are available, we don't actually want to import them
|
||||||
# fmt: off
|
# fmt: off
|
||||||
dal.Subreddit
|
dal.Subreddit # noqa: B018
|
||||||
dal.Profile
|
dal.Profil # noqa: B018e
|
||||||
dal.Multireddit
|
dal.Multireddit # noqa: B018
|
||||||
# fmt: on
|
# fmt: on
|
||||||
except AttributeError as ae:
|
except AttributeError as ae:
|
||||||
warnings.high(f'{ae} : please update "rexport" installation')
|
warnings.high(f'{ae} : please update "rexport" installation')
|
||||||
|
|
|
@ -32,7 +32,7 @@ def compute_subscriptions(*sources: Iterable[SubscriptionState]) -> List[Subscri
|
||||||
|
|
||||||
by_url: Dict[str, Subscription] = {}
|
by_url: Dict[str, Subscription] = {}
|
||||||
# ah. dates are used for sorting
|
# ah. dates are used for sorting
|
||||||
for when, state in sorted(states):
|
for _when, state in sorted(states):
|
||||||
# TODO use 'when'?
|
# TODO use 'when'?
|
||||||
for feed in state:
|
for feed in state:
|
||||||
if feed.url not in by_url:
|
if feed.url not in by_url:
|
||||||
|
|
|
@ -44,8 +44,8 @@ def _prepare_takeouts_dir(tmp_path: Path) -> Path:
|
||||||
|
|
||||||
try:
|
try:
|
||||||
track = one(testdata().rglob('italy-slovenia-2017-07-29.json'))
|
track = one(testdata().rglob('italy-slovenia-2017-07-29.json'))
|
||||||
except ValueError:
|
except ValueError as e:
|
||||||
raise RuntimeError('testdata not found, setup git submodules?')
|
raise RuntimeError('testdata not found, setup git submodules?') from e
|
||||||
|
|
||||||
# todo ugh. unnecessary zipping, but at the moment takeout provider doesn't support plain dirs
|
# todo ugh. unnecessary zipping, but at the moment takeout provider doesn't support plain dirs
|
||||||
import zipfile
|
import zipfile
|
||||||
|
|
|
@ -49,8 +49,8 @@ def _prepare_takeouts_dir(tmp_path: Path) -> Path:
|
||||||
|
|
||||||
try:
|
try:
|
||||||
track = one(testdata().rglob('italy-slovenia-2017-07-29.json'))
|
track = one(testdata().rglob('italy-slovenia-2017-07-29.json'))
|
||||||
except ValueError:
|
except ValueError as e:
|
||||||
raise RuntimeError('testdata not found, setup git submodules?')
|
raise RuntimeError('testdata not found, setup git submodules?') from e
|
||||||
|
|
||||||
# todo ugh. unnecessary zipping, but at the moment takeout provider doesn't support plain dirs
|
# todo ugh. unnecessary zipping, but at the moment takeout provider doesn't support plain dirs
|
||||||
import zipfile
|
import zipfile
|
||||||
|
|
|
@ -33,7 +33,7 @@ def default_policy() -> TzPolicy:
|
||||||
def localize_with_policy(
|
def localize_with_policy(
|
||||||
lfun: Callable[[datetime], datetime_aware],
|
lfun: Callable[[datetime], datetime_aware],
|
||||||
dt: datetime,
|
dt: datetime,
|
||||||
policy: TzPolicy=default_policy()
|
policy: TzPolicy=default_policy() # noqa: B008
|
||||||
) -> datetime_aware:
|
) -> datetime_aware:
|
||||||
tz = dt.tzinfo
|
tz = dt.tzinfo
|
||||||
if tz is None:
|
if tz is None:
|
||||||
|
|
|
@ -14,9 +14,9 @@ except ImportError as ie:
|
||||||
try:
|
try:
|
||||||
from my.config import twitter as user_config # type: ignore[assignment]
|
from my.config import twitter as user_config # type: ignore[assignment]
|
||||||
except ImportError:
|
except ImportError:
|
||||||
raise ie # raise the original exception.. must be something else
|
raise ie # raise the original exception.. must be something else # noqa: B904
|
||||||
else:
|
else:
|
||||||
from ..core import warnings
|
from my.core import warnings
|
||||||
warnings.high('my.config.twitter is deprecated! Please rename it to my.config.twitter_archive in your config')
|
warnings.high('my.config.twitter is deprecated! Please rename it to my.config.twitter_archive in your config')
|
||||||
##
|
##
|
||||||
|
|
||||||
|
|
10
ruff.toml
10
ruff.toml
|
@ -7,8 +7,11 @@ lint.extend-select = [
|
||||||
"UP", # detect deprecated python stdlib stuff
|
"UP", # detect deprecated python stdlib stuff
|
||||||
"FBT", # detect use of boolean arguments
|
"FBT", # detect use of boolean arguments
|
||||||
"RUF", # various ruff-specific rules
|
"RUF", # various ruff-specific rules
|
||||||
|
"PLR", # 'refactor' rules
|
||||||
|
"B", # 'bugbear' set -- various possible bugs
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
"PLR",
|
|
||||||
# "S", # bandit (security checks) -- tends to be not very useful, lots of nitpicks
|
# "S", # bandit (security checks) -- tends to be not very useful, lots of nitpicks
|
||||||
# "DTZ", # datetimes checks -- complaining about missing tz and mostly false positives
|
# "DTZ", # datetimes checks -- complaining about missing tz and mostly false positives
|
||||||
]
|
]
|
||||||
|
@ -57,4 +60,9 @@ lint.ignore = [
|
||||||
"PLR2004", # magic value in comparison -- super annoying in tests
|
"PLR2004", # magic value in comparison -- super annoying in tests
|
||||||
###
|
###
|
||||||
"PLR0402", # import X.Y as Y -- TODO maybe consider enabling it, but double check
|
"PLR0402", # import X.Y as Y -- TODO maybe consider enabling it, but double check
|
||||||
|
|
||||||
|
"B009", # calling gettattr with constant attribute -- this is useful to convince mypy
|
||||||
|
"B010", # same as above, but setattr
|
||||||
|
"B017", # pytest.raises(Exception)
|
||||||
|
"B023", # seems to result in false positives?
|
||||||
]
|
]
|
||||||
|
|
|
@ -5,11 +5,13 @@ from more_itertools import ilen
|
||||||
|
|
||||||
def test_gdpr() -> None:
|
def test_gdpr() -> None:
|
||||||
import my.github.gdpr as gdpr
|
import my.github.gdpr as gdpr
|
||||||
|
|
||||||
assert ilen(gdpr.events()) > 100
|
assert ilen(gdpr.events()) > 100
|
||||||
|
|
||||||
|
|
||||||
def test() -> None:
|
def test() -> None:
|
||||||
from my.coding.github import get_events
|
from my.github.all import get_events
|
||||||
|
|
||||||
events = get_events()
|
events = get_events()
|
||||||
assert ilen(events) > 100
|
assert ilen(events) > 100
|
||||||
for e in events:
|
for e in events:
|
||||||
|
|
Loading…
Add table
Reference in a new issue