ruff: enable RET/PIE/PLW
This commit is contained in:
parent
bd1e5d2f11
commit
9fd4227abf
14 changed files with 80 additions and 75 deletions
|
@ -187,8 +187,7 @@ def _repo_depends_on(_repo: Path) -> int:
|
||||||
ff = _repo / pp
|
ff = _repo / pp
|
||||||
if ff.exists():
|
if ff.exists():
|
||||||
return int(ff.stat().st_mtime)
|
return int(ff.stat().st_mtime)
|
||||||
else:
|
raise RuntimeError(f"Could not find a FETCH_HEAD/HEAD file in {_repo}")
|
||||||
raise RuntimeError(f"Could not find a FETCH_HEAD/HEAD file in {_repo}")
|
|
||||||
|
|
||||||
|
|
||||||
def _commits(_repos: List[Path]) -> Iterator[Commit]:
|
def _commits(_repos: List[Path]) -> Iterator[Commit]:
|
||||||
|
|
|
@ -43,7 +43,7 @@ def run_mypy(cfg_path: Path) -> Optional[CompletedProcess]:
|
||||||
cmd = mypy_cmd()
|
cmd = mypy_cmd()
|
||||||
if cmd is None:
|
if cmd is None:
|
||||||
return None
|
return None
|
||||||
mres = run([ # noqa: UP022
|
mres = run([ # noqa: UP022,PLW1510
|
||||||
*cmd,
|
*cmd,
|
||||||
'--namespace-packages',
|
'--namespace-packages',
|
||||||
'--color-output', # not sure if works??
|
'--color-output', # not sure if works??
|
||||||
|
@ -214,10 +214,10 @@ See https://github.com/karlicoss/HPI/blob/master/doc/SETUP.org#setting-up-module
|
||||||
if len(errors) > 0:
|
if len(errors) > 0:
|
||||||
error(f'config check: {len(errors)} errors')
|
error(f'config check: {len(errors)} errors')
|
||||||
return False
|
return False
|
||||||
else:
|
|
||||||
# note: shouldn't exit here, might run something else
|
# note: shouldn't exit here, might run something else
|
||||||
info('config check: success!')
|
info('config check: success!')
|
||||||
return True
|
return True
|
||||||
|
|
||||||
|
|
||||||
from .util import HPIModule, modules
|
from .util import HPIModule, modules
|
||||||
|
|
|
@ -87,7 +87,7 @@ def kopen(path: PathIsh, *args, mode: str='rt', **kwargs) -> IO:
|
||||||
elif name.endswith(Ext.lz4):
|
elif name.endswith(Ext.lz4):
|
||||||
import lz4.frame # type: ignore
|
import lz4.frame # type: ignore
|
||||||
return lz4.frame.open(str(pp), mode, *args, **kwargs)
|
return lz4.frame.open(str(pp), mode, *args, **kwargs)
|
||||||
elif name.endswith(Ext.zstd) or name.endswith(Ext.zst):
|
elif name.endswith(Ext.zstd) or name.endswith(Ext.zst): # noqa: PIE810
|
||||||
kwargs['mode'] = mode
|
kwargs['mode'] = mode
|
||||||
return _zstd_open(pp, *args, **kwargs)
|
return _zstd_open(pp, *args, **kwargs)
|
||||||
elif name.endswith(Ext.targz):
|
elif name.endswith(Ext.targz):
|
||||||
|
|
|
@ -41,8 +41,7 @@ def notnone(x: Optional[T]) -> T:
|
||||||
def unwrap(res: Res[T]) -> T:
|
def unwrap(res: Res[T]) -> T:
|
||||||
if isinstance(res, Exception):
|
if isinstance(res, Exception):
|
||||||
raise res
|
raise res
|
||||||
else:
|
return res
|
||||||
return res
|
|
||||||
|
|
||||||
|
|
||||||
def drop_exceptions(itr: Iterator[Res[T]]) -> Iterator[T]:
|
def drop_exceptions(itr: Iterator[Res[T]]) -> Iterator[T]:
|
||||||
|
|
|
@ -17,8 +17,7 @@ def parse_org_datetime(s: str) -> datetime:
|
||||||
return datetime.strptime(s, fmt)
|
return datetime.strptime(s, fmt)
|
||||||
except ValueError:
|
except ValueError:
|
||||||
continue
|
continue
|
||||||
else:
|
raise RuntimeError(f"Bad datetime string {s}")
|
||||||
raise RuntimeError(f"Bad datetime string {s}")
|
|
||||||
|
|
||||||
|
|
||||||
# TODO I guess want to borrow inspiration from bs4? element type <-> tag; and similar logic for find_one, find_all
|
# TODO I guess want to borrow inspiration from bs4? element type <-> tag; and similar logic for find_one, find_all
|
||||||
|
|
|
@ -341,37 +341,37 @@ def select_range(
|
||||||
if order_by_chosen is None:
|
if order_by_chosen is None:
|
||||||
raise QueryException("""Can't order by range if we have no way to order_by!
|
raise QueryException("""Can't order by range if we have no way to order_by!
|
||||||
Specify a type or a key to order the value by""")
|
Specify a type or a key to order the value by""")
|
||||||
else:
|
|
||||||
# force drop_unsorted=True so we can use _create_range_filter
|
|
||||||
# sort the iterable by the generated order_by_chosen function
|
|
||||||
itr = select(itr, order_by=order_by_chosen, drop_unsorted=True)
|
|
||||||
filter_func: Optional[Where]
|
|
||||||
if order_by_value_type in [datetime, date]:
|
|
||||||
filter_func = _create_range_filter(
|
|
||||||
unparsed_range=unparsed_range,
|
|
||||||
end_parser=parse_datetime_float,
|
|
||||||
within_parser=parse_timedelta_float,
|
|
||||||
attr_func=order_by_chosen, # type: ignore[arg-type]
|
|
||||||
default_before=time.time(),
|
|
||||||
value_coercion_func=_datelike_to_float)
|
|
||||||
elif order_by_value_type in [int, float]:
|
|
||||||
# allow primitives to be converted using the default int(), float() callables
|
|
||||||
filter_func = _create_range_filter(
|
|
||||||
unparsed_range=unparsed_range,
|
|
||||||
end_parser=order_by_value_type,
|
|
||||||
within_parser=order_by_value_type,
|
|
||||||
attr_func=order_by_chosen, # type: ignore[arg-type]
|
|
||||||
default_before=None,
|
|
||||||
value_coercion_func=order_by_value_type)
|
|
||||||
else:
|
|
||||||
# TODO: add additional kwargs to let the user sort by other values, by specifying the parsers?
|
|
||||||
# would need to allow passing the end_parser, within parser, default before and value_coercion_func...
|
|
||||||
# (seems like a lot?)
|
|
||||||
raise QueryException("Sorting by custom types is currently unsupported")
|
|
||||||
|
|
||||||
# use the created filter function
|
# force drop_unsorted=True so we can use _create_range_filter
|
||||||
# we've already applied drop_exceptions and kwargs related to unsortable values above
|
# sort the iterable by the generated order_by_chosen function
|
||||||
itr = select(itr, where=filter_func, limit=limit, reverse=reverse)
|
itr = select(itr, order_by=order_by_chosen, drop_unsorted=True)
|
||||||
|
filter_func: Optional[Where]
|
||||||
|
if order_by_value_type in [datetime, date]:
|
||||||
|
filter_func = _create_range_filter(
|
||||||
|
unparsed_range=unparsed_range,
|
||||||
|
end_parser=parse_datetime_float,
|
||||||
|
within_parser=parse_timedelta_float,
|
||||||
|
attr_func=order_by_chosen, # type: ignore[arg-type]
|
||||||
|
default_before=time.time(),
|
||||||
|
value_coercion_func=_datelike_to_float)
|
||||||
|
elif order_by_value_type in [int, float]:
|
||||||
|
# allow primitives to be converted using the default int(), float() callables
|
||||||
|
filter_func = _create_range_filter(
|
||||||
|
unparsed_range=unparsed_range,
|
||||||
|
end_parser=order_by_value_type,
|
||||||
|
within_parser=order_by_value_type,
|
||||||
|
attr_func=order_by_chosen, # type: ignore[arg-type]
|
||||||
|
default_before=None,
|
||||||
|
value_coercion_func=order_by_value_type)
|
||||||
|
else:
|
||||||
|
# TODO: add additional kwargs to let the user sort by other values, by specifying the parsers?
|
||||||
|
# would need to allow passing the end_parser, within parser, default before and value_coercion_func...
|
||||||
|
# (seems like a lot?)
|
||||||
|
raise QueryException("Sorting by custom types is currently unsupported")
|
||||||
|
|
||||||
|
# use the created filter function
|
||||||
|
# we've already applied drop_exceptions and kwargs related to unsortable values above
|
||||||
|
itr = select(itr, where=filter_func, limit=limit, reverse=reverse)
|
||||||
else:
|
else:
|
||||||
# wrap_unsorted may be used here if the user specified an order_key,
|
# wrap_unsorted may be used here if the user specified an order_key,
|
||||||
# or manually passed a order_value function
|
# or manually passed a order_value function
|
||||||
|
|
|
@ -145,8 +145,7 @@ def _dumps_factory(**kwargs) -> Callable[[Any], str]:
|
||||||
res = factory()
|
res = factory()
|
||||||
if res is not None:
|
if res is not None:
|
||||||
return res
|
return res
|
||||||
else:
|
raise RuntimeError("Should not happen!")
|
||||||
raise RuntimeError("Should not happen!")
|
|
||||||
|
|
||||||
|
|
||||||
def dumps(
|
def dumps(
|
||||||
|
|
|
@ -100,7 +100,7 @@ def _walk_packages(path: Iterable[str], prefix: str='', onerror=None) -> Iterabl
|
||||||
def seen(p, m={}): # noqa: B006
|
def seen(p, m={}): # noqa: B006
|
||||||
if p in m:
|
if p in m:
|
||||||
return True
|
return True
|
||||||
m[p] = True
|
m[p] = True # noqa: RET503
|
||||||
|
|
||||||
for info in pkgutil.iter_modules(path, prefix):
|
for info in pkgutil.iter_modules(path, prefix):
|
||||||
mname = info.name
|
mname = info.name
|
||||||
|
|
|
@ -35,7 +35,7 @@ def is_empty(x) -> bool:
|
||||||
elif isinstance(x, list):
|
elif isinstance(x, list):
|
||||||
return all(map(is_empty, x))
|
return all(map(is_empty, x))
|
||||||
else:
|
else:
|
||||||
assert_never(x)
|
assert_never(x) # noqa: RET503
|
||||||
|
|
||||||
|
|
||||||
class Manager:
|
class Manager:
|
||||||
|
|
|
@ -92,13 +92,12 @@ def estimate_location(dt: DateExact) -> Iterator[FallbackLocation]:
|
||||||
dt=datetime.fromtimestamp(d, timezone.utc),
|
dt=datetime.fromtimestamp(d, timezone.utc),
|
||||||
datasource='via_home')
|
datasource='via_home')
|
||||||
return
|
return
|
||||||
else:
|
|
||||||
# I guess the most reasonable is to fallback on the first location
|
# I guess the most reasonable is to fallback on the first location
|
||||||
lat, lon = hist[-1][1]
|
lat, lon = hist[-1][1]
|
||||||
yield FallbackLocation(
|
yield FallbackLocation(
|
||||||
lat=lat,
|
lat=lat,
|
||||||
lon=lon,
|
lon=lon,
|
||||||
accuracy=config.home_accuracy,
|
accuracy=config.home_accuracy,
|
||||||
dt=datetime.fromtimestamp(d, timezone.utc),
|
dt=datetime.fromtimestamp(d, timezone.utc),
|
||||||
datasource='via_home')
|
datasource='via_home')
|
||||||
return
|
|
||||||
|
|
|
@ -43,8 +43,7 @@ class Photo(NamedTuple):
|
||||||
for bp in config.paths:
|
for bp in config.paths:
|
||||||
if self.path.startswith(bp):
|
if self.path.startswith(bp):
|
||||||
return self.path[len(bp):]
|
return self.path[len(bp):]
|
||||||
else:
|
raise RuntimeError(f"Weird path {self.path}, can't match against anything")
|
||||||
raise RuntimeError(f"Weird path {self.path}, can't match against anything")
|
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def name(self) -> str:
|
def name(self) -> str:
|
||||||
|
|
|
@ -182,10 +182,9 @@ class MMS(NamedTuple):
|
||||||
for (addr, _type) in self.addresses:
|
for (addr, _type) in self.addresses:
|
||||||
if _type == 137:
|
if _type == 137:
|
||||||
return addr
|
return addr
|
||||||
else:
|
# hmm, maybe return instead? but this probably shouldnt happen, means
|
||||||
# hmm, maybe return instead? but this probably shouldnt happen, means
|
# something is very broken
|
||||||
# something is very broken
|
raise RuntimeError(f'No from address matching 137 found in {self.addresses}')
|
||||||
raise RuntimeError(f'No from address matching 137 found in {self.addresses}')
|
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def from_me(self) -> bool:
|
def from_me(self) -> bool:
|
||||||
|
|
|
@ -63,16 +63,14 @@ def _get_user_config():
|
||||||
except ImportError as ie:
|
except ImportError as ie:
|
||||||
if "'time'" not in str(ie):
|
if "'time'" not in str(ie):
|
||||||
raise ie
|
raise ie
|
||||||
else:
|
return empty_config
|
||||||
return empty_config
|
|
||||||
|
|
||||||
try:
|
try:
|
||||||
user_config = time.tz.via_location
|
user_config = time.tz.via_location
|
||||||
except AttributeError as ae:
|
except AttributeError as ae:
|
||||||
if not ("'tz'" in str(ae) or "'via_location'" in str(ae)):
|
if not ("'tz'" in str(ae) or "'via_location'" in str(ae)):
|
||||||
raise ae
|
raise ae
|
||||||
else:
|
return empty_config
|
||||||
return empty_config
|
|
||||||
|
|
||||||
return user_config
|
return user_config
|
||||||
|
|
||||||
|
|
32
ruff.toml
32
ruff.toml
|
@ -1,18 +1,22 @@
|
||||||
target-version = "py38" # NOTE: inferred from pyproject.toml if present
|
target-version = "py38" # NOTE: inferred from pyproject.toml if present
|
||||||
|
|
||||||
lint.extend-select = [
|
lint.extend-select = [
|
||||||
"F", # flakes rules -- default, but extend just in case
|
"F", # flakes rules -- default, but extend just in case
|
||||||
"E", # pycodestyle -- default, but extend just in case
|
"E", # pycodestyle -- default, but extend just in case
|
||||||
"C4", # flake8-comprehensions -- unnecessary list/map/dict calls
|
"C4", # flake8-comprehensions -- unnecessary list/map/dict calls
|
||||||
"UP", # detect deprecated python stdlib stuff
|
"UP", # detect deprecated python stdlib stuff
|
||||||
"FBT", # detect use of boolean arguments
|
"FBT", # detect use of boolean arguments
|
||||||
"RUF", # various ruff-specific rules
|
"RUF", # various ruff-specific rules
|
||||||
"PLR", # 'refactor' rules
|
"PLR", # 'refactor' rules
|
||||||
"B", # 'bugbear' set -- various possible bugs
|
"B", # 'bugbear' set -- various possible bugs
|
||||||
|
|
||||||
"PERF", # various potential performance speedups
|
"PERF", # various potential performance speedups
|
||||||
|
"RET", # early returns
|
||||||
|
"PIE", # 'misc' lints
|
||||||
|
"PLW", # pylint warnings
|
||||||
# "FA", # TODO enable later after we make sure cachew works?
|
# "FA", # TODO enable later after we make sure cachew works?
|
||||||
|
# "PTH", # pathlib migration -- TODO enable later
|
||||||
# "ARG", # TODO useful, but results in some false positives in pytest fixtures... maybe later
|
# "ARG", # TODO useful, but results in some false positives in pytest fixtures... maybe later
|
||||||
|
# "A", # TODO builtin shadowing -- handle later
|
||||||
# "S", # bandit (security checks) -- tends to be not very useful, lots of nitpicks
|
# "S", # bandit (security checks) -- tends to be not very useful, lots of nitpicks
|
||||||
# "DTZ", # datetimes checks -- complaining about missing tz and mostly false positives
|
# "DTZ", # datetimes checks -- complaining about missing tz and mostly false positives
|
||||||
]
|
]
|
||||||
|
@ -67,6 +71,10 @@ lint.ignore = [
|
||||||
"B017", # pytest.raises(Exception)
|
"B017", # pytest.raises(Exception)
|
||||||
"B023", # seems to result in false positives?
|
"B023", # seems to result in false positives?
|
||||||
|
|
||||||
|
# complains about useless pass, but has sort of a false positive if the function has a docstring?
|
||||||
|
# this is common for click entrypoints (e.g. in __main__), so disable
|
||||||
|
"PIE790",
|
||||||
|
|
||||||
# a bit too annoying, offers to convert for loops to list comprehension
|
# a bit too annoying, offers to convert for loops to list comprehension
|
||||||
# , which may heart readability
|
# , which may heart readability
|
||||||
"PERF401",
|
"PERF401",
|
||||||
|
@ -74,4 +82,10 @@ lint.ignore = [
|
||||||
# suggests no using exception in for loops
|
# suggests no using exception in for loops
|
||||||
# we do use this technique a lot, plus in 3.11 happy path exception handling is "zero-cost"
|
# we do use this technique a lot, plus in 3.11 happy path exception handling is "zero-cost"
|
||||||
"PERF203",
|
"PERF203",
|
||||||
|
|
||||||
|
"RET504", # unnecessary assignment before returning -- that can be useful for readability
|
||||||
|
"RET505", # unnecessary else after return -- can hurt readability
|
||||||
|
|
||||||
|
"PLW0603", # global variable update.. we usually know why we are doing this
|
||||||
|
"PLW2901", # for loop variable overwritten, usually this is intentional
|
||||||
]
|
]
|
||||||
|
|
Loading…
Add table
Reference in a new issue