ruff: enable B warnings (mainly suppressed exceptions and unused variables)
This commit is contained in:
parent
d0df8e8f2d
commit
72cc8ff3ac
30 changed files with 83 additions and 67 deletions
|
@ -153,7 +153,7 @@ def measurements() -> Iterable[Res[Measurement]]:
|
|||
oldfmt = False
|
||||
db_dt = None
|
||||
|
||||
for i, (name, tsc, temp, hum, pres, dewp) in enumerate(datas):
|
||||
for (name, tsc, temp, hum, pres, dewp) in datas:
|
||||
if is_bad_table(name):
|
||||
continue
|
||||
|
||||
|
|
|
@ -105,7 +105,7 @@ def dataframe() -> DataFrameT:
|
|||
rows = []
|
||||
idxs = [] # type: ignore[var-annotated]
|
||||
NO_ENDOMONDO = 'no endomondo matches'
|
||||
for i, row in mdf.iterrows():
|
||||
for _i, row in mdf.iterrows():
|
||||
rd = row.to_dict()
|
||||
mdate = row['date']
|
||||
if pd.isna(mdate):
|
||||
|
|
|
@ -1,9 +1,12 @@
|
|||
import warnings
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
warnings.warn('my.coding.github is deprecated! Please use my.github.all instead!')
|
||||
from my.core import warnings
|
||||
|
||||
warnings.high('my.coding.github is deprecated! Please use my.github.all instead!')
|
||||
# todo why aren't DeprecationWarning shown by default??
|
||||
|
||||
from ..github.all import events, get_events
|
||||
if not TYPE_CHECKING:
|
||||
from ..github.all import events, get_events
|
||||
|
||||
# todo deprecate properly
|
||||
iter_events = events
|
||||
# todo deprecate properly
|
||||
iter_events = events
|
||||
|
|
|
@ -456,9 +456,9 @@ def _locate_functions_or_prompt(qualified_names: List[str], *, prompt: bool = Tr
|
|||
# user to select a 'data provider' like function
|
||||
try:
|
||||
mod = importlib.import_module(qualname)
|
||||
except Exception:
|
||||
except Exception as ie:
|
||||
eprint(f"During fallback, importing '{qualname}' as module failed")
|
||||
raise qr_err
|
||||
raise qr_err from ie
|
||||
|
||||
# find data providers in this module
|
||||
data_providers = [f for _, f in inspect.getmembers(mod, inspect.isfunction) if is_data_provider(f)]
|
||||
|
|
|
@ -2,7 +2,6 @@ from .internal import assert_subpackage; assert_subpackage(__name__)
|
|||
|
||||
import logging
|
||||
import sys
|
||||
import warnings
|
||||
from contextlib import contextmanager
|
||||
from pathlib import Path
|
||||
from typing import (
|
||||
|
@ -20,6 +19,9 @@ from typing import (
|
|||
|
||||
import appdirs # type: ignore[import-untyped]
|
||||
|
||||
from . import warnings
|
||||
|
||||
|
||||
PathIsh = Union[str, Path] # avoid circular import from .common
|
||||
|
||||
|
||||
|
@ -116,7 +118,7 @@ def _mcachew_impl(cache_path=_cache_path_dflt, **kwargs):
|
|||
try:
|
||||
import cachew
|
||||
except ModuleNotFoundError:
|
||||
warnings.warn('cachew library not found. You might want to install it to speed things up. See https://github.com/karlicoss/cachew')
|
||||
warnings.high('cachew library not found. You might want to install it to speed things up. See https://github.com/karlicoss/cachew')
|
||||
return lambda orig_func: orig_func
|
||||
else:
|
||||
kwargs['cache_path'] = cache_path
|
||||
|
|
|
@ -1,5 +1,4 @@
|
|||
import os
|
||||
import warnings
|
||||
from glob import glob as do_glob
|
||||
from pathlib import Path
|
||||
from typing import (
|
||||
|
@ -15,7 +14,7 @@ from typing import (
|
|||
)
|
||||
|
||||
from . import compat
|
||||
from . import warnings as core_warnings
|
||||
from . import warnings as warnings
|
||||
|
||||
# some helper functions
|
||||
# TODO start deprecating this? soon we'd be able to use Path | str syntax which is shorter and more explicit
|
||||
|
@ -63,7 +62,7 @@ def get_files(
|
|||
gs = str(src)
|
||||
if '*' in gs:
|
||||
if glob != DEFAULT_GLOB:
|
||||
warnings.warn(f"{caller()}: treating {gs} as glob path. Explicit glob={glob} argument is ignored!")
|
||||
warnings.medium(f"{caller()}: treating {gs} as glob path. Explicit glob={glob} argument is ignored!")
|
||||
paths.extend(map(Path, do_glob(gs)))
|
||||
elif os.path.isdir(str(src)):
|
||||
# NOTE: we're using os.path here on purpose instead of src.is_dir
|
||||
|
@ -85,7 +84,7 @@ def get_files(
|
|||
|
||||
if len(paths) == 0:
|
||||
# todo make it conditionally defensive based on some global settings
|
||||
core_warnings.high(f'''
|
||||
warnings.high(f'''
|
||||
{caller()}: no paths were matched against {pp}. This might result in missing data. Likely, the directory you passed is empty.
|
||||
'''.strip())
|
||||
# traceback is useful to figure out what config caused it?
|
||||
|
|
|
@ -119,7 +119,7 @@ def sort_res_by(items: Iterable[Res[T]], key: Callable[[Any], K]) -> List[Res[T]
|
|||
group = []
|
||||
|
||||
results: List[Res[T]] = []
|
||||
for v, grp in sorted(groups, key=lambda p: p[0]): # type: ignore[return-value, arg-type] # TODO SupportsLessThan??
|
||||
for _v, grp in sorted(groups, key=lambda p: p[0]): # type: ignore[return-value, arg-type] # TODO SupportsLessThan??
|
||||
results.extend(grp)
|
||||
results.extend(group) # handle last group (it will always be errors only)
|
||||
|
||||
|
|
|
@ -6,7 +6,7 @@ import inspect
|
|||
import os
|
||||
import re
|
||||
from types import ModuleType
|
||||
from typing import Iterator, List, Optional, TypeVar
|
||||
from typing import Iterator, List, Optional, Sequence, TypeVar
|
||||
|
||||
from . import warnings
|
||||
|
||||
|
@ -71,7 +71,7 @@ def pre_pip_dal_handler(
|
|||
name: str,
|
||||
e: ModuleNotFoundError,
|
||||
cfg,
|
||||
requires=[],
|
||||
requires: Sequence[str] = (),
|
||||
) -> ModuleType:
|
||||
'''
|
||||
https://github.com/karlicoss/HPI/issues/79
|
||||
|
|
|
@ -25,7 +25,7 @@ def setup_config() -> None:
|
|||
warnings.warn(f"""
|
||||
'my.config' package isn't found! (expected at '{mycfg_dir}'). This is likely to result in issues.
|
||||
See https://github.com/karlicoss/HPI/blob/master/doc/SETUP.org#setting-up-the-modules for more info.
|
||||
""".strip())
|
||||
""".strip(), stacklevel=1)
|
||||
return
|
||||
|
||||
mpath = str(mycfg_dir)
|
||||
|
@ -47,7 +47,7 @@ See https://github.com/karlicoss/HPI/blob/master/doc/SETUP.org#setting-up-the-mo
|
|||
warnings.warn(f"""
|
||||
Importing 'my.config' failed! (error: {ex}). This is likely to result in issues.
|
||||
See https://github.com/karlicoss/HPI/blob/master/doc/SETUP.org#setting-up-the-modules for more info.
|
||||
""")
|
||||
""", stacklevel=1)
|
||||
else:
|
||||
# defensive just in case -- __file__ may not be present if there is some dynamic magic involved
|
||||
used_config_file = getattr(my.config, '__file__', None)
|
||||
|
@ -63,7 +63,7 @@ See https://github.com/karlicoss/HPI/blob/master/doc/SETUP.org#setting-up-the-mo
|
|||
Expected my.config to be located at {mycfg_dir}, but instead its path is {used_config_path}.
|
||||
This will likely cause issues down the line -- double check {mycfg_dir} structure.
|
||||
See https://github.com/karlicoss/HPI/blob/master/doc/SETUP.org#setting-up-the-modules for more info.
|
||||
""",
|
||||
""", stacklevel=1
|
||||
)
|
||||
|
||||
|
||||
|
|
|
@ -15,7 +15,7 @@ def test() -> None:
|
|||
|
||||
## prepare exception for later
|
||||
try:
|
||||
None.whatever # type: ignore[attr-defined]
|
||||
None.whatever # type: ignore[attr-defined] # noqa: B018
|
||||
except Exception as e:
|
||||
ex = e
|
||||
##
|
||||
|
@ -146,7 +146,7 @@ def _setup_handlers_and_formatters(name: str) -> None:
|
|||
# try colorlog first, so user gets nice colored logs
|
||||
import colorlog
|
||||
except ModuleNotFoundError:
|
||||
warnings.warn("You might want to 'pip install colorlog' for nice colored logs")
|
||||
warnings.warn("You might want to 'pip install colorlog' for nice colored logs", stacklevel=1)
|
||||
formatter = logging.Formatter(FORMAT_NOCOLOR)
|
||||
else:
|
||||
# log_color/reset are specific to colorlog
|
||||
|
@ -233,7 +233,7 @@ def get_enlighten():
|
|||
try:
|
||||
import enlighten # type: ignore[import-untyped]
|
||||
except ModuleNotFoundError:
|
||||
warnings.warn("You might want to 'pip install enlighten' for a nice progress bar")
|
||||
warnings.warn("You might want to 'pip install enlighten' for a nice progress bar", stacklevel=1)
|
||||
|
||||
return Mock()
|
||||
|
||||
|
|
|
@ -6,7 +6,7 @@ from datetime import datetime
|
|||
|
||||
def parse_org_datetime(s: str) -> datetime:
|
||||
s = s.strip('[]')
|
||||
for fmt, cl in [
|
||||
for fmt, _cls in [
|
||||
("%Y-%m-%d %a %H:%M", datetime),
|
||||
("%Y-%m-%d %H:%M" , datetime),
|
||||
# todo not sure about these... fallback on 00:00?
|
||||
|
|
|
@ -72,7 +72,7 @@ def locate_function(module_name: str, function_name: str) -> Callable[[], Iterab
|
|||
if func is not None and callable(func):
|
||||
return func
|
||||
except Exception as e:
|
||||
raise QueryException(str(e))
|
||||
raise QueryException(str(e)) # noqa: B904
|
||||
raise QueryException(f"Could not find function '{function_name}' in '{module_name}'")
|
||||
|
||||
|
||||
|
@ -468,7 +468,7 @@ Will attempt to call iter() on the value""")
|
|||
try:
|
||||
itr: Iterator[ET] = iter(it)
|
||||
except TypeError as t:
|
||||
raise QueryException("Could not convert input src to an Iterator: " + str(t))
|
||||
raise QueryException("Could not convert input src to an Iterator: " + str(t)) # noqa: B904
|
||||
|
||||
# if both drop_exceptions and drop_exceptions are provided for some reason,
|
||||
# should raise exceptions before dropping them
|
||||
|
|
|
@ -109,7 +109,7 @@ def _datelike_to_float(dl: Any) -> float:
|
|||
try:
|
||||
return parse_datetime_float(dl)
|
||||
except QueryException as q:
|
||||
raise QueryException(f"While attempting to extract datetime from {dl}, to order by datetime:\n\n" + str(q))
|
||||
raise QueryException(f"While attempting to extract datetime from {dl}, to order by datetime:\n\n" + str(q)) # noqa: B904
|
||||
|
||||
|
||||
class RangeTuple(NamedTuple):
|
||||
|
|
|
@ -62,7 +62,7 @@ def import_source(
|
|||
|
||||
class core:
|
||||
disabled_modules = [{module_name!r}]
|
||||
""")
|
||||
""", stacklevel=1)
|
||||
# try to check if this is a config error or based on dependencies not being installed
|
||||
if isinstance(err, (ImportError, AttributeError)):
|
||||
matched_config_err = warn_my_config_import_error(err, module_name=module_name, help_url=help_url)
|
||||
|
|
|
@ -440,7 +440,7 @@ def _guess_datetime(x: Any) -> Optional[datetime]:
|
|||
d = asdict(x)
|
||||
except: # noqa: E722 bare except
|
||||
return None
|
||||
for k, v in d.items():
|
||||
for _k, v in d.items():
|
||||
if isinstance(v, datetime):
|
||||
return v
|
||||
return None
|
||||
|
|
|
@ -93,11 +93,11 @@ def _discover_path_importables(pkg_pth: Path, pkg_name: str) -> Iterable[HPIModu
|
|||
def _walk_packages(path: Iterable[str], prefix: str='', onerror=None) -> Iterable[HPIModule]:
|
||||
"""
|
||||
Modified version of https://github.com/python/cpython/blob/d50a0700265536a20bcce3fb108c954746d97625/Lib/pkgutil.py#L53,
|
||||
to alvoid importing modules that are skipped
|
||||
to avoid importing modules that are skipped
|
||||
"""
|
||||
from .core_config import config
|
||||
|
||||
def seen(p, m={}):
|
||||
def seen(p, m={}): # noqa: B006
|
||||
if p in m:
|
||||
return True
|
||||
m[p] = True
|
||||
|
|
|
@ -24,6 +24,8 @@ import more_itertools
|
|||
from decorator import decorator
|
||||
|
||||
from ..compat import ParamSpec
|
||||
from .. import warnings as core_warnings
|
||||
|
||||
|
||||
T = TypeVar('T')
|
||||
K = TypeVar('K')
|
||||
|
@ -142,8 +144,7 @@ def _warn_if_empty(func, *args, **kwargs):
|
|||
if isinstance(iterable, Sized):
|
||||
sz = len(iterable)
|
||||
if sz == 0:
|
||||
# todo use hpi warnings here?
|
||||
warnings.warn(f"Function {func} returned empty container, make sure your config paths are correct")
|
||||
core_warnings.medium(f"Function {func} returned empty container, make sure your config paths are correct")
|
||||
return iterable
|
||||
else: # must be an iterator
|
||||
|
||||
|
@ -153,7 +154,7 @@ def _warn_if_empty(func, *args, **kwargs):
|
|||
yield i
|
||||
empty = False
|
||||
if empty:
|
||||
warnings.warn(f"Function {func} didn't emit any data, make sure your config paths are correct")
|
||||
core_warnings.medium(f"Function {func} didn't emit any data, make sure your config paths are correct")
|
||||
|
||||
return wit()
|
||||
|
||||
|
|
|
@ -12,7 +12,7 @@ from typing import TYPE_CHECKING, Optional
|
|||
import click
|
||||
|
||||
|
||||
def _colorize(x: str, color: Optional[str]=None) -> str:
|
||||
def _colorize(x: str, color: Optional[str] = None) -> str:
|
||||
if color is None:
|
||||
return x
|
||||
|
||||
|
@ -24,10 +24,10 @@ def _colorize(x: str, color: Optional[str]=None) -> str:
|
|||
return click.style(x, fg=color)
|
||||
|
||||
|
||||
def _warn(message: str, *args, color: Optional[str]=None, **kwargs) -> None:
|
||||
def _warn(message: str, *args, color: Optional[str] = None, **kwargs) -> None:
|
||||
stacklevel = kwargs.get('stacklevel', 1)
|
||||
kwargs['stacklevel'] = stacklevel + 2 # +1 for this function, +1 for medium/high wrapper
|
||||
warnings.warn(_colorize(message, color=color), *args, **kwargs)
|
||||
warnings.warn(_colorize(message, color=color), *args, **kwargs) # noqa: B028
|
||||
|
||||
|
||||
def low(message: str, *args, **kwargs) -> None:
|
||||
|
@ -55,4 +55,4 @@ if not TYPE_CHECKING:
|
|||
def warn(*args, **kwargs):
|
||||
import warnings
|
||||
|
||||
return warnings.warn(*args, **kwargs)
|
||||
return warnings.warn(*args, **kwargs) # noqa: B028
|
||||
|
|
|
@ -274,7 +274,7 @@ def plot() -> None:
|
|||
fig: Figure = plt.figure(figsize=(15, sleeps_count * 1))
|
||||
|
||||
axarr = fig.subplots(nrows=len(sleeps))
|
||||
for i, (sleep, axes) in enumerate(zip(sleeps, axarr)):
|
||||
for (sleep, axes) in zip(sleeps, axarr):
|
||||
plot_one(sleep, fig, axes, showtext=True)
|
||||
used = melatonin_data.get(sleep.date_, None)
|
||||
sused: str
|
||||
|
|
|
@ -22,9 +22,10 @@ import geopy # type: ignore
|
|||
from my.core import stat, Stats, make_logger
|
||||
from my.core.cachew import cache_dir, mcachew
|
||||
|
||||
from my.core.warnings import high
|
||||
from my.core import warnings
|
||||
|
||||
high("Please set up my.google.takeout.parser module for better takeout support")
|
||||
|
||||
warnings.high("Please set up my.google.takeout.parser module for better takeout support")
|
||||
|
||||
|
||||
# otherwise uses ijson
|
||||
|
@ -52,8 +53,7 @@ def _iter_via_ijson(fo) -> Iterable[TsLatLon]:
|
|||
# pip3 install ijson cffi
|
||||
import ijson.backends.yajl2_cffi as ijson # type: ignore
|
||||
except:
|
||||
import warnings
|
||||
warnings.warn("Falling back to default ijson because 'cffi' backend isn't found. It's up to 2x faster, you might want to check it out")
|
||||
warnings.medium("Falling back to default ijson because 'cffi' backend isn't found. It's up to 2x faster, you might want to check it out")
|
||||
import ijson # type: ignore
|
||||
|
||||
for d in ijson.items(fo, 'locations.item'):
|
||||
|
@ -105,7 +105,8 @@ def _iter_locations_fo(fit) -> Iterable[Location]:
|
|||
errors += 1
|
||||
if float(errors) / total > 0.01:
|
||||
# todo make defensive?
|
||||
raise RuntimeError('too many errors! aborting')
|
||||
# todo exceptiongroup?
|
||||
raise RuntimeError('too many errors! aborting') # noqa: B904
|
||||
else:
|
||||
continue
|
||||
|
||||
|
|
|
@ -22,7 +22,7 @@ def iter_movies() -> Iterator[Movie]:
|
|||
|
||||
with last.open() as fo:
|
||||
reader = csv.DictReader(fo)
|
||||
for i, line in enumerate(reader):
|
||||
for line in reader:
|
||||
# TODO extract directors??
|
||||
title = line['Title']
|
||||
rating = int(line['You rated'])
|
||||
|
|
|
@ -166,7 +166,7 @@ class Loader:
|
|||
htags: List[str] = []
|
||||
if 'tags' in h:
|
||||
ht = h['tags'].zoom()
|
||||
for k, v in list(ht.items()):
|
||||
for _k, v in list(ht.items()):
|
||||
ctag = v.zoom()
|
||||
ctag['id'].consume()
|
||||
ct = ctag['label'].zoom()
|
||||
|
@ -199,7 +199,7 @@ class Loader:
|
|||
|
||||
|
||||
def load_items(self, metas: Json) -> Iterable[Highlight]:
|
||||
for p, meta in metas.items():
|
||||
for _p, meta in metas.items():
|
||||
with wrap(meta, throw=not config.defensive) as meta:
|
||||
yield from self.load_item(meta)
|
||||
|
||||
|
|
|
@ -144,9 +144,9 @@ if not TYPE_CHECKING:
|
|||
try:
|
||||
# here we just check that types are available, we don't actually want to import them
|
||||
# fmt: off
|
||||
dal.Subreddit
|
||||
dal.Profile
|
||||
dal.Multireddit
|
||||
dal.Subreddit # noqa: B018
|
||||
dal.Profil # noqa: B018e
|
||||
dal.Multireddit # noqa: B018
|
||||
# fmt: on
|
||||
except AttributeError as ae:
|
||||
warnings.high(f'{ae} : please update "rexport" installation')
|
||||
|
|
|
@ -32,7 +32,7 @@ def compute_subscriptions(*sources: Iterable[SubscriptionState]) -> List[Subscri
|
|||
|
||||
by_url: Dict[str, Subscription] = {}
|
||||
# ah. dates are used for sorting
|
||||
for when, state in sorted(states):
|
||||
for _when, state in sorted(states):
|
||||
# TODO use 'when'?
|
||||
for feed in state:
|
||||
if feed.url not in by_url:
|
||||
|
|
|
@ -44,8 +44,8 @@ def _prepare_takeouts_dir(tmp_path: Path) -> Path:
|
|||
|
||||
try:
|
||||
track = one(testdata().rglob('italy-slovenia-2017-07-29.json'))
|
||||
except ValueError:
|
||||
raise RuntimeError('testdata not found, setup git submodules?')
|
||||
except ValueError as e:
|
||||
raise RuntimeError('testdata not found, setup git submodules?') from e
|
||||
|
||||
# todo ugh. unnecessary zipping, but at the moment takeout provider doesn't support plain dirs
|
||||
import zipfile
|
||||
|
|
|
@ -49,8 +49,8 @@ def _prepare_takeouts_dir(tmp_path: Path) -> Path:
|
|||
|
||||
try:
|
||||
track = one(testdata().rglob('italy-slovenia-2017-07-29.json'))
|
||||
except ValueError:
|
||||
raise RuntimeError('testdata not found, setup git submodules?')
|
||||
except ValueError as e:
|
||||
raise RuntimeError('testdata not found, setup git submodules?') from e
|
||||
|
||||
# todo ugh. unnecessary zipping, but at the moment takeout provider doesn't support plain dirs
|
||||
import zipfile
|
||||
|
|
|
@ -33,7 +33,7 @@ def default_policy() -> TzPolicy:
|
|||
def localize_with_policy(
|
||||
lfun: Callable[[datetime], datetime_aware],
|
||||
dt: datetime,
|
||||
policy: TzPolicy=default_policy()
|
||||
policy: TzPolicy=default_policy() # noqa: B008
|
||||
) -> datetime_aware:
|
||||
tz = dt.tzinfo
|
||||
if tz is None:
|
||||
|
|
|
@ -14,9 +14,9 @@ except ImportError as ie:
|
|||
try:
|
||||
from my.config import twitter as user_config # type: ignore[assignment]
|
||||
except ImportError:
|
||||
raise ie # raise the original exception.. must be something else
|
||||
raise ie # raise the original exception.. must be something else # noqa: B904
|
||||
else:
|
||||
from ..core import warnings
|
||||
from my.core import warnings
|
||||
warnings.high('my.config.twitter is deprecated! Please rename it to my.config.twitter_archive in your config')
|
||||
##
|
||||
|
||||
|
|
10
ruff.toml
10
ruff.toml
|
@ -7,8 +7,11 @@ lint.extend-select = [
|
|||
"UP", # detect deprecated python stdlib stuff
|
||||
"FBT", # detect use of boolean arguments
|
||||
"RUF", # various ruff-specific rules
|
||||
"PLR", # 'refactor' rules
|
||||
"B", # 'bugbear' set -- various possible bugs
|
||||
|
||||
|
||||
|
||||
"PLR",
|
||||
# "S", # bandit (security checks) -- tends to be not very useful, lots of nitpicks
|
||||
# "DTZ", # datetimes checks -- complaining about missing tz and mostly false positives
|
||||
]
|
||||
|
@ -57,4 +60,9 @@ lint.ignore = [
|
|||
"PLR2004", # magic value in comparison -- super annoying in tests
|
||||
###
|
||||
"PLR0402", # import X.Y as Y -- TODO maybe consider enabling it, but double check
|
||||
|
||||
"B009", # calling gettattr with constant attribute -- this is useful to convince mypy
|
||||
"B010", # same as above, but setattr
|
||||
"B017", # pytest.raises(Exception)
|
||||
"B023", # seems to result in false positives?
|
||||
]
|
||||
|
|
|
@ -5,11 +5,13 @@ from more_itertools import ilen
|
|||
|
||||
def test_gdpr() -> None:
|
||||
import my.github.gdpr as gdpr
|
||||
|
||||
assert ilen(gdpr.events()) > 100
|
||||
|
||||
|
||||
def test() -> None:
|
||||
from my.coding.github import get_events
|
||||
from my.github.all import get_events
|
||||
|
||||
events = get_events()
|
||||
assert ilen(events) > 100
|
||||
for e in events:
|
||||
|
|
Loading…
Add table
Reference in a new issue