general: update mypy config, seems that logs of type: ignore aren't necessary anymore
This commit is contained in:
parent
8fc4951c57
commit
fe2c99f037
52 changed files with 142 additions and 105 deletions
|
@ -135,7 +135,7 @@ def config_ok() -> bool:
|
|||
# at this point 'my' should already be imported, so doesn't hurt to extract paths from it
|
||||
import my
|
||||
try:
|
||||
paths: List[str] = list(my.__path__) # type: ignore[attr-defined]
|
||||
paths: List[str] = list(my.__path__)
|
||||
except Exception as e:
|
||||
errors.append(e)
|
||||
error('failed to determine module import path')
|
||||
|
@ -152,7 +152,7 @@ def config_ok() -> bool:
|
|||
## check we're not using stub config
|
||||
import my.core
|
||||
try:
|
||||
core_pkg_path = str(Path(my.core.__path__[0]).parent) # type: ignore[attr-defined]
|
||||
core_pkg_path = str(Path(my.core.__path__[0]).parent)
|
||||
if str(cfg_path).startswith(core_pkg_path):
|
||||
error(f'''
|
||||
Seems that the stub config is used ({cfg_path}). This is likely not going to work.
|
||||
|
|
|
@ -30,7 +30,7 @@ def disabled_cachew() -> Iterator[None]:
|
|||
|
||||
|
||||
def _appdirs_cache_dir() -> Path:
|
||||
import appdirs # type: ignore
|
||||
import appdirs
|
||||
cd = Path(appdirs.user_cache_dir('my'))
|
||||
cd.mkdir(exist_ok=True, parents=True)
|
||||
return cd
|
||||
|
|
|
@ -21,7 +21,7 @@ def make_config(cls: Type[C], migration: Callable[[Attrs], Attrs]=lambda x: x) -
|
|||
if k in {f.name for f in fields(cls)} # type: ignore[arg-type] # see https://github.com/python/typing_extensions/issues/115
|
||||
}
|
||||
# todo maybe return type here?
|
||||
return cls(**params) # type: ignore[call-arg]
|
||||
return cls(**params)
|
||||
|
||||
|
||||
F = TypeVar('F')
|
||||
|
|
|
@ -3,6 +3,7 @@ from pathlib import Path
|
|||
from datetime import datetime
|
||||
import functools
|
||||
from contextlib import contextmanager
|
||||
import sys
|
||||
import types
|
||||
from typing import Union, Callable, Dict, Iterable, TypeVar, Sequence, List, Optional, Any, cast, Tuple, TYPE_CHECKING, NoReturn
|
||||
import warnings
|
||||
|
@ -21,13 +22,12 @@ def import_file(p: PathIsh, name: Optional[str] = None) -> types.ModuleType:
|
|||
assert spec is not None, f"Fatal error; Could not create module spec from {name} {p}"
|
||||
foo = importlib.util.module_from_spec(spec)
|
||||
loader = spec.loader; assert loader is not None
|
||||
loader.exec_module(foo) # type: ignore[attr-defined]
|
||||
loader.exec_module(foo)
|
||||
return foo
|
||||
|
||||
|
||||
def import_from(path: PathIsh, name: str) -> types.ModuleType:
|
||||
path = str(path)
|
||||
import sys
|
||||
try:
|
||||
sys.path.append(path)
|
||||
import importlib
|
||||
|
@ -94,7 +94,7 @@ def ensure_unique(
|
|||
|
||||
|
||||
def test_ensure_unique() -> None:
|
||||
import pytest # type: ignore
|
||||
import pytest
|
||||
assert list(ensure_unique([1, 2, 3], key=lambda i: i)) == [1, 2, 3]
|
||||
|
||||
dups = [1, 2, 1, 4]
|
||||
|
@ -432,7 +432,7 @@ def warn_if_empty(f):
|
|||
def wrapped(*args, **kwargs):
|
||||
res = f(*args, **kwargs)
|
||||
return _warn_iterable(res, f=f)
|
||||
return wrapped # type: ignore
|
||||
return wrapped
|
||||
|
||||
|
||||
# global state that turns on/off quick stats
|
||||
|
@ -620,6 +620,10 @@ def assert_subpackage(name: str) -> None:
|
|||
assert name == '__main__' or 'my.core' in name, f'Expected module __name__ ({name}) to be __main__ or start with my.core'
|
||||
|
||||
|
||||
from .compat import ParamSpec
|
||||
_P = ParamSpec('_P')
|
||||
_T = TypeVar('_T')
|
||||
|
||||
# https://stackoverflow.com/a/10436851/706389
|
||||
from concurrent.futures import Future, Executor
|
||||
class DummyExecutor(Executor):
|
||||
|
@ -627,26 +631,31 @@ class DummyExecutor(Executor):
|
|||
self._shutdown = False
|
||||
self._max_workers = max_workers
|
||||
|
||||
# TODO: once support for 3.7 is dropped,
|
||||
# can make 'fn' a positional only parameter,
|
||||
# which fixes the mypy error this throws without the type: ignore
|
||||
def submit(self, fn, *args, **kwargs) -> Future: # type: ignore[override]
|
||||
if self._shutdown:
|
||||
raise RuntimeError('cannot schedule new futures after shutdown')
|
||||
|
||||
f: Future[Any] = Future()
|
||||
try:
|
||||
result = fn(*args, **kwargs)
|
||||
except KeyboardInterrupt:
|
||||
raise
|
||||
except BaseException as e:
|
||||
f.set_exception(e)
|
||||
if TYPE_CHECKING:
|
||||
if sys.version_info[:2] <= (3, 8):
|
||||
# 3.8 doesn't support ParamSpec as Callable arg :(
|
||||
# and any attempt to type results in incompatible supertype.. so whatever
|
||||
def submit(self, fn, *args, **kwargs): ...
|
||||
else:
|
||||
f.set_result(result)
|
||||
def submit(self, fn: Callable[_P, _T], /, *args: _P.args, **kwargs: _P.kwargs) -> Future[_T]: ...
|
||||
else:
|
||||
def submit(self, fn, *args, **kwargs):
|
||||
if self._shutdown:
|
||||
raise RuntimeError('cannot schedule new futures after shutdown')
|
||||
|
||||
return f
|
||||
f: Future[Any] = Future()
|
||||
try:
|
||||
result = fn(*args, **kwargs)
|
||||
except KeyboardInterrupt:
|
||||
raise
|
||||
except BaseException as e:
|
||||
f.set_exception(e)
|
||||
else:
|
||||
f.set_result(result)
|
||||
|
||||
def shutdown(self, wait: bool=True) -> None: # type: ignore[override]
|
||||
return f
|
||||
|
||||
def shutdown(self, wait: bool=True, **kwargs) -> None:
|
||||
self._shutdown = True
|
||||
|
||||
|
||||
|
|
|
@ -86,7 +86,7 @@ else:
|
|||
|
||||
def cached_property(f: Callable[[Cl], R]) -> R:
|
||||
import functools
|
||||
return property(functools.lru_cache(maxsize=1)(f)) # type: ignore
|
||||
return property(functools.lru_cache(maxsize=1)(f))
|
||||
del Cl
|
||||
del R
|
||||
|
||||
|
@ -111,7 +111,7 @@ if sys.version_info[:2] >= (3, 8):
|
|||
from typing import Protocol
|
||||
else:
|
||||
if TYPE_CHECKING:
|
||||
from typing_extensions import Protocol # type: ignore[misc]
|
||||
from typing_extensions import Protocol
|
||||
else:
|
||||
# todo could also use NamedTuple?
|
||||
Protocol = object
|
||||
|
@ -121,12 +121,29 @@ if sys.version_info[:2] >= (3, 8):
|
|||
from typing import TypedDict
|
||||
else:
|
||||
if TYPE_CHECKING:
|
||||
from typing_extensions import TypedDict # type: ignore[misc]
|
||||
from typing_extensions import TypedDict
|
||||
else:
|
||||
from typing import Dict
|
||||
TypedDict = Dict
|
||||
|
||||
|
||||
if sys.version_info[:2] >= (3, 10):
|
||||
from typing import ParamSpec
|
||||
else:
|
||||
if TYPE_CHECKING:
|
||||
from typing_extensions import ParamSpec
|
||||
else:
|
||||
from typing import NamedTuple, Any
|
||||
# erm.. I guess as long as it's not crashing, whatever...
|
||||
class _ParamSpec:
|
||||
def __call__(self, args):
|
||||
class _res:
|
||||
args = None
|
||||
kwargs = None
|
||||
return _res
|
||||
ParamSpec = _ParamSpec()
|
||||
|
||||
|
||||
# bisect_left doesn't have a 'key' parameter (which we use)
|
||||
# till python3.10
|
||||
if sys.version_info[:2] <= (3, 9):
|
||||
|
@ -156,4 +173,4 @@ if sys.version_info[:2] <= (3, 9):
|
|||
hi = mid
|
||||
return lo
|
||||
else:
|
||||
from bisect import bisect_left # type: ignore[misc]
|
||||
from bisect import bisect_left
|
||||
|
|
|
@ -7,16 +7,16 @@ from typing import Sequence, Optional
|
|||
from . import warnings, PathIsh, Path
|
||||
|
||||
try:
|
||||
from my.config import core as user_config # type: ignore[attr-defined]
|
||||
from my.config import core as user_config # type: ignore[attr-defined]
|
||||
except Exception as e:
|
||||
try:
|
||||
from my.config import common as user_config # type: ignore[attr-defined, assignment, misc]
|
||||
from my.config import common as user_config # type: ignore[attr-defined]
|
||||
warnings.high("'common' config section is deprecated. Please rename it to 'core'.")
|
||||
except Exception as e2:
|
||||
# make it defensive, because it's pretty commonly used and would be annoying if it breaks hpi doctor etc.
|
||||
# this way it'll at least use the defaults
|
||||
# todo actually not sure if needs a warning? Perhaps it's okay without it, because the defaults are reasonable enough
|
||||
user_config = object # type: ignore[assignment, misc]
|
||||
user_config = object
|
||||
|
||||
|
||||
_HPI_CACHE_DIR_DEFAULT = ''
|
||||
|
|
|
@ -144,7 +144,7 @@ def all_modules() -> Iterable[HPIModule]:
|
|||
def _iter_my_roots() -> Iterable[Path]:
|
||||
import my # doesn't import any code, because of namespace package
|
||||
|
||||
paths: List[str] = list(my.__path__) # type: ignore[attr-defined]
|
||||
paths: List[str] = list(my.__path__)
|
||||
if len(paths) == 0:
|
||||
# should probably never happen?, if this code is running, it was imported
|
||||
# because something was added to __path__ to match this name
|
||||
|
|
|
@ -125,7 +125,7 @@ def test_sort_res_by() -> None:
|
|||
1,
|
||||
Exc('last'),
|
||||
]
|
||||
results = sort_res_by(ress, lambda x: int(x)) # type: ignore
|
||||
results = sort_res_by(ress, lambda x: int(x))
|
||||
assert results == [
|
||||
1,
|
||||
'bad',
|
||||
|
@ -137,11 +137,11 @@ def test_sort_res_by() -> None:
|
|||
Exc('last'),
|
||||
]
|
||||
|
||||
results2 = sort_res_by(ress + [0], lambda x: int(x)) # type: ignore
|
||||
results2 = sort_res_by(ress + [0], lambda x: int(x))
|
||||
assert results2 == [Exc('last'), 0] + results[:-1]
|
||||
|
||||
assert sort_res_by(['caba', 'a', 'aba', 'daba'], key=lambda x: len(x)) == ['a', 'aba', 'caba', 'daba']
|
||||
assert sort_res_by([], key=lambda x: x) == [] # type: ignore
|
||||
assert sort_res_by([], key=lambda x: x) == []
|
||||
|
||||
|
||||
# helpers to associate timestamps with the errors (so something meaningful could be displayed on the plots, for example)
|
||||
|
@ -215,7 +215,7 @@ See {help_url}\
|
|||
if hasattr(err, 'obj') and hasattr(err, "name"):
|
||||
config_obj = cast(object, getattr(err, 'obj')) # the object that caused the attribute error
|
||||
# e.g. active_browser for my.browser
|
||||
nested_block_name = err.name # type: ignore[attr-defined]
|
||||
nested_block_name = err.name
|
||||
if config_obj.__module__ == 'my.config':
|
||||
click.secho(f"""You're likely missing the nested config block for '{getattr(config_obj, '__name__', str(config_obj))}.{nested_block_name}'.
|
||||
See {help_url} or check the corresponding module.py file for an example\
|
||||
|
|
|
@ -82,7 +82,7 @@ def kopen(path: PathIsh, *args, mode: str='rt', **kwargs) -> IO:
|
|||
ifile.read1 = ifile.read # type: ignore
|
||||
# TODO pass all kwargs here??
|
||||
# todo 'expected "BinaryIO"'??
|
||||
return io.TextIOWrapper(ifile, encoding=encoding) # type: ignore[arg-type]
|
||||
return io.TextIOWrapper(ifile, encoding=encoding)
|
||||
elif name.endswith(Ext.lz4):
|
||||
import lz4.frame # type: ignore
|
||||
return lz4.frame.open(str(pp), mode, *args, **kwargs)
|
||||
|
@ -95,7 +95,7 @@ def kopen(path: PathIsh, *args, mode: str='rt', **kwargs) -> IO:
|
|||
tf = tarfile.open(pp)
|
||||
# TODO pass encoding?
|
||||
x = tf.extractfile(*args); assert x is not None
|
||||
return x # type: ignore[return-value]
|
||||
return x
|
||||
else:
|
||||
return pp.open(mode, *args, **kwargs)
|
||||
|
||||
|
@ -209,7 +209,7 @@ class ZipPath(zipfile_Path):
|
|||
def __truediv__(self, key) -> ZipPath:
|
||||
# need to implement it so the return type is not zipfile.Path
|
||||
tmp = zipfile_Path(self.root) / self.at / key
|
||||
return ZipPath(self.root, tmp.at) # type: ignore[attr-defined]
|
||||
return ZipPath(self.root, tmp.at)
|
||||
|
||||
def iterdir(self) -> Iterator[ZipPath]:
|
||||
for s in self._as_dir().iterdir():
|
||||
|
|
|
@ -19,7 +19,7 @@ def zoom(w, *keys):
|
|||
# TODO need to support lists
|
||||
class Zoomable:
|
||||
def __init__(self, parent, *args, **kwargs) -> None:
|
||||
super().__init__(*args, **kwargs) # type: ignore
|
||||
super().__init__(*args, **kwargs)
|
||||
self.parent = parent
|
||||
|
||||
# TODO not sure, maybe do it via del??
|
||||
|
@ -147,7 +147,7 @@ Expected {c} to be fully consumed by the parser.
|
|||
|
||||
from typing import cast
|
||||
def test_unconsumed() -> None:
|
||||
import pytest # type: ignore
|
||||
import pytest
|
||||
with pytest.raises(UnconsumedError):
|
||||
with wrap({'a': 1234}) as w:
|
||||
w = cast(Wdict, w)
|
||||
|
@ -200,7 +200,7 @@ def test_consume_few() -> None:
|
|||
|
||||
|
||||
def test_zoom() -> None:
|
||||
import pytest # type: ignore
|
||||
import pytest
|
||||
with wrap({'aaa': 'whatever'}) as w:
|
||||
w = cast(Wdict, w)
|
||||
with pytest.raises(KeyError):
|
||||
|
|
|
@ -62,20 +62,21 @@ def setup_logger(logger: logging.Logger, level: LevelIsh) -> None:
|
|||
lvl = mklevel(level)
|
||||
try:
|
||||
import logzero # type: ignore[import]
|
||||
except ModuleNotFoundError:
|
||||
warnings.warn("You might want to install 'logzero' for nice colored logs!")
|
||||
formatter = logging.Formatter(fmt=FORMAT_NOCOLOR, datefmt=DATEFMT)
|
||||
use_logzero = False
|
||||
else:
|
||||
formatter = logzero.LogFormatter(
|
||||
fmt=FORMAT_COLOR,
|
||||
datefmt=DATEFMT,
|
||||
)
|
||||
use_logzero = True
|
||||
except ModuleNotFoundError:
|
||||
warnings.warn("You might want to install 'logzero' for nice colored logs!")
|
||||
formatter = logging.Formatter(fmt=FORMAT_NOCOLOR, datefmt=DATEFMT)
|
||||
use_logzero = False
|
||||
|
||||
logger.addFilter(AddExceptionTraceback())
|
||||
if use_logzero and not COLLAPSE_DEBUG_LOGS: # all set, nothing to do
|
||||
# 'simple' setup
|
||||
logzero.setup_logger(logger.name, level=lvl, formatter=formatter)
|
||||
logzero.setup_logger(logger.name, level=lvl, formatter=formatter) # type: ignore[possibly-undefined]
|
||||
return
|
||||
|
||||
h = CollapseDebugHandler() if COLLAPSE_DEBUG_LOGS else logging.StreamHandler()
|
||||
|
@ -101,7 +102,7 @@ class LazyLogger(logging.Logger):
|
|||
# oh god.. otherwise might go into an inf loop
|
||||
if not hasattr(logger, _init_done):
|
||||
setattr(logger, _init_done, False) # will setup on the first call
|
||||
logger.isEnabledFor = isEnabledFor_lazyinit # type: ignore[assignment]
|
||||
logger.isEnabledFor = isEnabledFor_lazyinit # type: ignore[method-assign]
|
||||
return cast(LazyLogger, logger)
|
||||
|
||||
|
||||
|
|
|
@ -15,7 +15,7 @@ logger = LazyLogger(__name__)
|
|||
if TYPE_CHECKING:
|
||||
# this is kinda pointless at the moment, but handy to annotate DF returning methods now
|
||||
# later will be unignored when they implement type annotations
|
||||
import pandas as pd # type: ignore
|
||||
import pandas as pd
|
||||
# DataFrameT = pd.DataFrame
|
||||
# TODO ugh. pretty annoying, having any is not very useful since it would allow arbitrary coercions..
|
||||
# ideally want to use a type that's like Any but doesn't allow arbitrary coercions??
|
||||
|
@ -26,7 +26,7 @@ else:
|
|||
|
||||
|
||||
def check_dateish(s) -> Iterable[str]:
|
||||
import pandas as pd # type: ignore # noqa: F811 not actually a redefinition
|
||||
import pandas as pd # noqa: F811 not actually a redefinition
|
||||
ctype = s.dtype
|
||||
if str(ctype).startswith('datetime64'):
|
||||
return
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
from pathlib import Path
|
||||
|
||||
def get_mycfg_dir() -> Path:
|
||||
import appdirs # type: ignore[import]
|
||||
import appdirs
|
||||
import os
|
||||
# not sure if that's necessary, i.e. could rely on PYTHONPATH instead
|
||||
# on the other hand, by using MY_CONFIG we are guaranteed to load it from the desired path?
|
||||
|
|
|
@ -495,7 +495,7 @@ Will attempt to call iter() on the value""")
|
|||
unsortable, itr = _handle_unsorted(itr, order_by_chosen, drop_unsorted, wrap_unsorted)
|
||||
|
||||
# run the sort, with the computed order by function
|
||||
itr = iter(sorted(itr, key=order_by_chosen, reverse=reverse)) # type: ignore[arg-type, type-var]
|
||||
itr = iter(sorted(itr, key=order_by_chosen, reverse=reverse)) # type: ignore[arg-type]
|
||||
|
||||
# re-attach unsortable values to the front/back of the list
|
||||
if reverse:
|
||||
|
|
|
@ -40,7 +40,7 @@ def parse_timedelta_string(timedelta_str: str) -> timedelta:
|
|||
if parts is None:
|
||||
raise ValueError(f"Could not parse time duration from {timedelta_str}.\nValid examples: '8h', '1w2d8h5m20s', '2m4s'")
|
||||
time_params = {name: float(param) for name, param in parts.groupdict().items() if param}
|
||||
return timedelta(**time_params) # type: ignore[arg-type]
|
||||
return timedelta(**time_params)
|
||||
|
||||
|
||||
def parse_timedelta_float(timedelta_str: str) -> float:
|
||||
|
@ -83,7 +83,7 @@ def parse_datetime_float(date_str: str) -> float:
|
|||
pass
|
||||
|
||||
try:
|
||||
import dateparser # type: ignore[import]
|
||||
import dateparser
|
||||
except ImportError:
|
||||
pass
|
||||
else:
|
||||
|
|
|
@ -188,8 +188,8 @@ def test_nt_serialize() -> None:
|
|||
|
||||
# test orjson option kwarg
|
||||
data = {datetime.date(year=1970, month=1, day=1): 5}
|
||||
res = jsn.loads(dumps(data, option=orjson.OPT_NON_STR_KEYS))
|
||||
assert res == {'1970-01-01': 5}
|
||||
res2 = jsn.loads(dumps(data, option=orjson.OPT_NON_STR_KEYS))
|
||||
assert res2 == {'1970-01-01': 5}
|
||||
|
||||
|
||||
def test_default_serializer() -> None:
|
||||
|
|
|
@ -22,7 +22,7 @@ def test_sqlite_connect_immutable(tmp_path: Path) -> None:
|
|||
with sqlite3.connect(db) as conn:
|
||||
conn.execute('CREATE TABLE testtable (col)')
|
||||
|
||||
import pytest # type: ignore
|
||||
import pytest
|
||||
with pytest.raises(sqlite3.OperationalError, match='readonly database'):
|
||||
with sqlite_connect_immutable(db) as conn:
|
||||
conn.execute('DROP TABLE testtable')
|
||||
|
|
|
@ -62,7 +62,7 @@ def _iter_all_importables(pkg: ModuleType) -> Iterable[HPIModule]:
|
|||
_discover_path_importables(Path(p), pkg.__name__)
|
||||
# todo might need to handle __path__ for individual modules too?
|
||||
# not sure why __path__ was duplicated, but it did happen..
|
||||
for p in set(pkg.__path__) # type: ignore[attr-defined]
|
||||
for p in set(pkg.__path__)
|
||||
)
|
||||
|
||||
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue