From b594377a599acbc14eb9a2b17e88ca87fcc1a727 Mon Sep 17 00:00:00 2001 From: Dima Gerasimov Date: Wed, 28 Aug 2024 00:00:54 +0100 Subject: [PATCH] ruff: enable RUF ruleset --- my/arbtt.py | 2 +- my/bluemaestro.py | 4 ++-- my/core/__main__.py | 2 +- my/core/error.py | 4 ++-- my/core/source.py | 2 +- my/core/structure.py | 22 +++++++++++----------- my/core/time.py | 2 +- my/core/util.py | 2 +- my/experimental/destructive_parsing.py | 2 +- my/google/takeout/html.py | 2 +- my/location/common.py | 2 +- my/photos/main.py | 2 +- my/polar.py | 2 +- my/twitter/android.py | 5 +++-- ruff.toml | 3 +++ 15 files changed, 31 insertions(+), 27 deletions(-) diff --git a/my/arbtt.py b/my/arbtt.py index 6de8cb2..2bcf291 100644 --- a/my/arbtt.py +++ b/my/arbtt.py @@ -81,7 +81,7 @@ def entries() -> Iterable[Entry]: cmds = [base] # rely on default else: # otherwise, 'merge' them - cmds = [base + ['--logfile', f] for f in inps] + cmds = [[*base, '--logfile', f] for f in inps] import ijson.backends.yajl2_cffi as ijson # type: ignore from subprocess import Popen, PIPE diff --git a/my/bluemaestro.py b/my/bluemaestro.py index 12c114f..50338bb 100644 --- a/my/bluemaestro.py +++ b/my/bluemaestro.py @@ -104,7 +104,7 @@ def measurements() -> Iterable[Res[Measurement]]: f'SELECT "{path.name}" as name, Time, Temperature, Humidity, Pressure, Dewpoint FROM data ORDER BY log_index' ) oldfmt = True - db_dts = list(db.execute('SELECT last_download FROM info'))[0][0] + [(db_dts,)] = db.execute('SELECT last_download FROM info') if db_dts == 'N/A': # ??? happens for 20180923-20180928 continue @@ -137,7 +137,7 @@ def measurements() -> Iterable[Res[Measurement]]: processed_tables |= set(log_tables) # todo use later? - frequencies = [list(db.execute(f'SELECT interval from {t.replace("_log", "_meta")}'))[0][0] for t in log_tables] + frequencies = [list(db.execute(f'SELECT interval from {t.replace("_log", "_meta")}'))[0][0] for t in log_tables] # noqa: RUF015 # todo could just filter out the older datapoints?? dunno. diff --git a/my/core/__main__.py b/my/core/__main__.py index d3c0cc7..3af8e08 100644 --- a/my/core/__main__.py +++ b/my/core/__main__.py @@ -392,7 +392,7 @@ def module_install(*, user: bool, module: Sequence[str], parallel: bool=False, b # I think it only helps for pypi artifacts (not git!), # and only if they weren't cached for r in requirements: - cmds.append(pre_cmd + [r]) + cmds.append([*pre_cmd, r]) else: if parallel: warning('parallel install is not supported on this platform, installing sequentially...') diff --git a/my/core/error.py b/my/core/error.py index 7489f69..cd8d093 100644 --- a/my/core/error.py +++ b/my/core/error.py @@ -153,7 +153,7 @@ def test_sort_res_by() -> None: Exc('last'), ] - results2 = sort_res_by(ress + [0], lambda x: int(x)) + results2 = sort_res_by([*ress, 0], lambda x: int(x)) assert results2 == [Exc('last'), 0] + results[:-1] assert sort_res_by(['caba', 'a', 'aba', 'daba'], key=lambda x: len(x)) == ['a', 'aba', 'caba', 'daba'] @@ -166,7 +166,7 @@ def test_sort_res_by() -> None: def set_error_datetime(e: Exception, dt: Optional[datetime]) -> None: if dt is None: return - e.args = e.args + (dt,) + e.args = (*e.args, dt) # todo not sure if should return new exception? diff --git a/my/core/source.py b/my/core/source.py index 6e0a78a..4510ef0 100644 --- a/my/core/source.py +++ b/my/core/source.py @@ -61,7 +61,7 @@ def import_source( warnings.warn(f"""If you don't want to use this module, to hide this message, add '{module_name}' to your core config disabled_modules in your config, like: class core: - disabled_modules = [{repr(module_name)}] + disabled_modules = [{module_name!r}] """) # try to check if this is a config error or based on dependencies not being installed if isinstance(err, (ImportError, AttributeError)): diff --git a/my/core/structure.py b/my/core/structure.py index 149a22a..be5b307 100644 --- a/my/core/structure.py +++ b/my/core/structure.py @@ -67,21 +67,21 @@ def match_structure( export_dir ├── exp_2020 - │   ├── channel_data - │   │   ├── data1 - │   │   └── data2 - │   ├── index.json - │   ├── messages - │   │   └── messages.csv - │   └── profile - │   └── settings.json + │ ├── channel_data + │ │ ├── data1 + │ │ └── data2 + │ ├── index.json + │ ├── messages + │ │ └── messages.csv + │ └── profile + │ └── settings.json └── exp_2021 ├── channel_data - │   ├── data1 - │   └── data2 + │ ├── data1 + │ └── data2 ├── index.json ├── messages - │   └── messages.csv + │ └── messages.csv └── profile └── settings.json diff --git a/my/core/time.py b/my/core/time.py index 83a407b..5a47c3d 100644 --- a/my/core/time.py +++ b/my/core/time.py @@ -21,7 +21,7 @@ def user_forced() -> Sequence[str]: def _abbr_to_timezone_map() -> Dict[str, pytz.BaseTzInfo]: # also force UTC to always correspond to utc # this makes more sense than Zulu it ends up by default - timezones = pytz.all_timezones + ['UTC'] + list(user_forced()) + timezones = [*pytz.all_timezones, 'UTC', *user_forced()] res: Dict[str, pytz.BaseTzInfo] = {} for tzname in timezones: diff --git a/my/core/util.py b/my/core/util.py index fdd10f9..b49acf6 100644 --- a/my/core/util.py +++ b/my/core/util.py @@ -74,7 +74,7 @@ def _discover_path_importables(pkg_pth: Path, pkg_name: str) -> Iterable[HPIModu continue rel_pt = pkg_dir_path.relative_to(pkg_pth) - pkg_pref = '.'.join((pkg_name, ) + rel_pt.parts) + pkg_pref = '.'.join((pkg_name, *rel_pt.parts)) yield from _walk_packages( (str(pkg_dir_path), ), prefix=f'{pkg_pref}.', diff --git a/my/experimental/destructive_parsing.py b/my/experimental/destructive_parsing.py index 05c5920..056cc0b 100644 --- a/my/experimental/destructive_parsing.py +++ b/my/experimental/destructive_parsing.py @@ -26,7 +26,7 @@ class Helper: assert actual == expected, (key, actual, expected) def zoom(self, key: str) -> 'Helper': - return self.manager.helper(item=self.item.pop(key), path=self.path + (key,)) + return self.manager.helper(item=self.item.pop(key), path=(*self.path, key)) def is_empty(x) -> bool: diff --git a/my/google/takeout/html.py b/my/google/takeout/html.py index 3ce692c..750beac 100644 --- a/my/google/takeout/html.py +++ b/my/google/takeout/html.py @@ -122,7 +122,7 @@ class TakeoutHTMLParser(HTMLParser): # JamiexxVEVO # Jun 21, 2018, 5:48:34 AM # Products: - #  YouTube + # YouTube def handle_data(self, data): if self.state == State.OUTSIDE: if data[:-1].strip() in ("Watched", "Visited"): diff --git a/my/location/common.py b/my/location/common.py index 510e005..f406370 100644 --- a/my/location/common.py +++ b/my/location/common.py @@ -70,7 +70,7 @@ def locations_to_gpx(locations: Iterable[LocationProtocol], buffer: TextIO) -> I ) except AttributeError: yield TypeError( - f"Expected a Location or Location-like object, got {type(location)} {repr(location)}" + f"Expected a Location or Location-like object, got {type(location)} {location!r}" ) continue gpx_segment.points.append(point) diff --git a/my/photos/main.py b/my/photos/main.py index 6262eac..63a6fea 100644 --- a/my/photos/main.py +++ b/my/photos/main.py @@ -209,7 +209,7 @@ def print_all() -> None: if isinstance(p, Exception): print('ERROR!', p) else: - print(f"{str(p.dt):25} {p.path} {p.geo}") + print(f"{p.dt!s:25} {p.path} {p.geo}") # todo cachew -- improve AttributeError: type object 'tuple' has no attribute '__annotations__' -- improve errors? # todo cachew -- invalidate if function code changed? diff --git a/my/polar.py b/my/polar.py index cd2c719..197de18 100644 --- a/my/polar.py +++ b/my/polar.py @@ -27,7 +27,7 @@ class polar(user_config): ''' Polar config is optional, you only need it if you want to specify custom 'polar_dir' ''' - polar_dir: PathIsh = Path('~/.polar').expanduser() + polar_dir: PathIsh = Path('~/.polar').expanduser() # noqa: RUF009 defensive: bool = True # pass False if you want it to fail faster on errors (useful for debugging) diff --git a/my/twitter/android.py b/my/twitter/android.py index f40ad0e..7adfeb6 100644 --- a/my/twitter/android.py +++ b/my/twitter/android.py @@ -155,7 +155,7 @@ _SELECT_OWN_TWEETS = '_SELECT_OWN_TWEETS' def get_own_user_id(conn) -> str: # unclear what's the reliable way to query it, so we use multiple different ones and arbitrate # NOTE: 'SELECT DISTINCT ev_owner_id FROM lists' doesn't work, might include lists from other people? - res = set() + res: Set[str] = set() for q in [ 'SELECT DISTINCT list_mapping_user_id FROM list_mapping', 'SELECT DISTINCT owner_id FROM cursors', @@ -164,7 +164,8 @@ def get_own_user_id(conn) -> str: for (r,) in conn.execute(q): res.add(r) assert len(res) == 1, res - return str(list(res)[0]) + [r] = res + return r # NOTE: diff --git a/ruff.toml b/ruff.toml index a8af399..2b77622 100644 --- a/ruff.toml +++ b/ruff.toml @@ -6,6 +6,7 @@ lint.extend-select = [ "C4", # flake8-comprehensions -- unnecessary list/map/dict calls "UP", # detect deprecated python stdlib stuff "FBT", # detect use of boolean arguments + "RUF", # various ruff-specific rules ] lint.ignore = [ @@ -38,4 +39,6 @@ lint.ignore = [ "UP006", # use type instead of Type "UP007", # use X | Y instead of Union ### + "RUF100", # unused noqa -- handle later + "RUF012", # mutable class attrs should be annotated with ClassVar... ugh pretty annoying for user configs ]