docs: fix typos
found via `codespell -L copie,datas,pres,fo,tooks,noo,ue,ket,frop`
This commit is contained in:
parent
347cd1ef77
commit
2447911040
50 changed files with 69 additions and 69 deletions
|
@ -77,7 +77,7 @@ def entries() -> Iterable[Entry]:
|
|||
if len(inps) == 0:
|
||||
cmds = [base] # rely on default
|
||||
else:
|
||||
# otherise, 'merge' them
|
||||
# otherwise, 'merge' them
|
||||
cmds = [base + ['--logfile', f] for f in inps]
|
||||
|
||||
import ijson.backends.yajl2_cffi as ijson # type: ignore
|
||||
|
|
|
@ -146,7 +146,7 @@ def dataframe() -> DataFrameT:
|
|||
# todo careful about 'how'? we need it to preserve the errors
|
||||
# maybe pd.merge is better suited for this??
|
||||
df = edf.join(mdf, how='outer', rsuffix='_manual')
|
||||
# todo reindex? so we dont' have Nan leftovers
|
||||
# todo reindex? so we don't have Nan leftovers
|
||||
|
||||
# todo set date anyway? maybe just squeeze into the index??
|
||||
noendo = df['error'] == NO_ENDOMONDO
|
||||
|
|
|
@ -59,7 +59,7 @@ class Commit:
|
|||
committed_dt: datetime
|
||||
authored_dt: datetime
|
||||
message: str
|
||||
repo: str # TODO put canonical name here straightaway??
|
||||
repo: str # TODO put canonical name here straight away??
|
||||
sha: str
|
||||
ref: Optional[str] = None
|
||||
# TODO filter so they are authored by me
|
||||
|
|
|
@ -143,7 +143,7 @@ def config_ok() -> bool:
|
|||
else:
|
||||
info(f'import order: {paths}')
|
||||
|
||||
# first try doing as much as possible without actually imporing my.config
|
||||
# first try doing as much as possible without actually importing my.config
|
||||
from .preinit import get_mycfg_dir
|
||||
cfg_path = get_mycfg_dir()
|
||||
# alternative is importing my.config and then getting cfg_path from its __file__/__path__
|
||||
|
@ -267,7 +267,7 @@ def modules_check(*, verbose: bool, list_all: bool, quick: bool, for_modules: Li
|
|||
# todo more specific command?
|
||||
error(f'{click.style("FAIL", fg="red")}: {m:<50} loading failed{vw}')
|
||||
# check that this is an import error in particular, not because
|
||||
# of a ModuleNotFoundError because some dependency wasnt installed
|
||||
# of a ModuleNotFoundError because some dependency wasn't installed
|
||||
if isinstance(e, (ImportError, AttributeError)):
|
||||
warn_my_config_import_error(e)
|
||||
if verbose:
|
||||
|
@ -441,7 +441,7 @@ def _locate_functions_or_prompt(qualified_names: List[str], prompt: bool = True)
|
|||
from .query import locate_qualified_function, QueryException
|
||||
from .stats import is_data_provider
|
||||
|
||||
# if not connected to a terminal, cant prompt
|
||||
# if not connected to a terminal, can't prompt
|
||||
if not sys.stdout.isatty():
|
||||
prompt = False
|
||||
|
||||
|
@ -471,7 +471,7 @@ def _locate_functions_or_prompt(qualified_names: List[str], prompt: bool = True)
|
|||
else:
|
||||
choices = [f.__name__ for f in data_providers]
|
||||
if prompt is False:
|
||||
# theres more than one possible data provider in this module,
|
||||
# there's more than one possible data provider in this module,
|
||||
# STDOUT is not a TTY, can't prompt
|
||||
eprint("During fallback, more than one possible data provider, can't prompt since STDOUT is not a TTY")
|
||||
eprint("Specify one of:")
|
||||
|
@ -576,7 +576,7 @@ def main(debug: bool) -> None:
|
|||
# acts as a contextmanager of sorts - any subcommand will then run
|
||||
# in something like /tmp/hpi_temp_dir
|
||||
# to avoid importing relative modules by accident during development
|
||||
# maybe can be removed later if theres more test coverage/confidence that nothing
|
||||
# maybe can be removed later if there's more test coverage/confidence that nothing
|
||||
# would happen?
|
||||
|
||||
# use a particular directory instead of a random one, since
|
||||
|
|
|
@ -433,7 +433,7 @@ def warn_if_empty(f):
|
|||
QUICK_STATS = False
|
||||
|
||||
|
||||
# incase user wants to use the stats functions/quick option
|
||||
# in case user wants to use the stats functions/quick option
|
||||
# elsewhere -- can use this decorator instead of editing
|
||||
# the global state directly
|
||||
@contextmanager
|
||||
|
|
|
@ -127,7 +127,7 @@ else:
|
|||
TypedDict = Dict
|
||||
|
||||
|
||||
# bisect_left doesnt have a 'key' parameter (which we use)
|
||||
# bisect_left doesn't have a 'key' parameter (which we use)
|
||||
# till python3.10
|
||||
if sys.version_info[:2] <= (3, 9):
|
||||
from typing import List, TypeVar, Any, Optional, Callable
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
"""
|
||||
A helper module for defining denylists for sources programatically
|
||||
A helper module for defining denylists for sources programmatically
|
||||
(in lamens terms, this lets you remove some output from a module you don't want)
|
||||
|
||||
For docs, see doc/DENYLIST.md
|
||||
|
|
|
@ -119,7 +119,7 @@ def _extract_requirements(a: ast.Module) -> Requires:
|
|||
elif isinstance(c, ast.Str):
|
||||
deps.append(c.s)
|
||||
else:
|
||||
raise RuntimeError(f"Expecting string contants only in {REQUIRES} declaration")
|
||||
raise RuntimeError(f"Expecting string constants only in {REQUIRES} declaration")
|
||||
return tuple(deps)
|
||||
return None
|
||||
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
'''
|
||||
A hook to insert user's config directory into Python's search path.
|
||||
|
||||
Ideally that would be in __init__.py (so it's executed without having to import explicityly)
|
||||
Ideally that would be in __init__.py (so it's executed without having to import explicitly)
|
||||
But, with namespace packages, we can't have __init__.py in the parent subpackage
|
||||
(see http://python-notes.curiousefficiency.org/en/latest/python_concepts/import_traps.html#the-init-py-trap)
|
||||
|
||||
|
|
|
@ -46,7 +46,7 @@ def _zstd_open(path: Path, *args, **kwargs) -> IO:
|
|||
# TODO use the 'dependent type' trick for return type?
|
||||
def kopen(path: PathIsh, *args, mode: str='rt', **kwargs) -> IO:
|
||||
# just in case, but I think this shouldn't be necessary anymore
|
||||
# since when we cann .read_text, encoding is passed already
|
||||
# since when we call .read_text, encoding is passed already
|
||||
if mode in {'r', 'rt'}:
|
||||
encoding = kwargs.get('encoding', 'utf8')
|
||||
else:
|
||||
|
|
|
@ -145,7 +145,7 @@ class CollapseDebugHandler(logging.StreamHandler):
|
|||
import os
|
||||
columns, _ = os.get_terminal_size(0)
|
||||
# ugh. the columns thing is meh. dunno I guess ultimately need curses for that
|
||||
# TODO also would be cool to have a terminal post-processor? kinda like tail but aware of logging keyworkds (INFO/DEBUG/etc)
|
||||
# TODO also would be cool to have a terminal post-processor? kinda like tail but aware of logging keywords (INFO/DEBUG/etc)
|
||||
self.stream.write(msg + ' ' * max(0, columns - len(msg)) + ('' if cur else '\n'))
|
||||
self.flush()
|
||||
except:
|
||||
|
|
|
@ -74,7 +74,7 @@ No 'error' column detected. You probably forgot to handle errors defensively, wh
|
|||
from typing import Any, Callable, TypeVar
|
||||
FuncT = TypeVar('FuncT', bound=Callable[..., DataFrameT])
|
||||
|
||||
# TODO ugh. typing this is a mess... shoul I use mypy_extensions.VarArg/KwArgs?? or what??
|
||||
# TODO ugh. typing this is a mess... should I use mypy_extensions.VarArg/KwArgs?? or what??
|
||||
from decorator import decorator
|
||||
@decorator
|
||||
def check_dataframe(f: FuncT, error_col_policy: ErrorColPolicy='add_if_missing', *args, **kwargs) -> DataFrameT:
|
||||
|
|
|
@ -26,7 +26,7 @@ ET = Res[T]
|
|||
U = TypeVar("U")
|
||||
# In a perfect world, the return value from a OrderFunc would just be U,
|
||||
# not Optional[U]. However, since this has to deal with so many edge
|
||||
# cases, theres a possibility that the functions generated by
|
||||
# cases, there's a possibility that the functions generated by
|
||||
# _generate_order_by_func can't find an attribute
|
||||
OrderFunc = Callable[[ET], Optional[U]]
|
||||
Where = Callable[[ET], bool]
|
||||
|
@ -54,7 +54,7 @@ def locate_function(module_name: str, function_name: str) -> Callable[[], Iterab
|
|||
for (fname, func) in inspect.getmembers(mod, inspect.isfunction):
|
||||
if fname == function_name:
|
||||
return func
|
||||
# incase the function is defined dynamically,
|
||||
# in case the function is defined dynamically,
|
||||
# like with a globals().setdefault(...) or a module-level __getattr__ function
|
||||
func = getattr(mod, function_name, None)
|
||||
if func is not None and callable(func):
|
||||
|
@ -244,7 +244,7 @@ def _drop_unsorted(itr: Iterator[ET], orderfunc: OrderFunc) -> Iterator[ET]:
|
|||
|
||||
|
||||
# try getting the first value from the iterator
|
||||
# similar to my.core.common.warn_if_empty? this doesnt go through the whole iterator though
|
||||
# similar to my.core.common.warn_if_empty? this doesn't go through the whole iterator though
|
||||
def _peek_iter(itr: Iterator[ET]) -> Tuple[Optional[ET], Iterator[ET]]:
|
||||
itr = more_itertools.peekable(itr)
|
||||
try:
|
||||
|
@ -290,7 +290,7 @@ def _handle_unsorted(
|
|||
return iter([]), itr
|
||||
|
||||
|
||||
# handles creating an order_value functon, using a lookup for
|
||||
# handles creating an order_value function, using a lookup for
|
||||
# different types. ***This consumes the iterator***, so
|
||||
# you should definitely itertoolts.tee it beforehand
|
||||
# as to not exhaust the values
|
||||
|
@ -374,7 +374,7 @@ def select(
|
|||
by allowing you to provide custom predicates (functions) which can sort
|
||||
by a function, an attribute, dict key, or by the attributes values.
|
||||
|
||||
Since this supports mixed types, theres always a possibility
|
||||
Since this supports mixed types, there's always a possibility
|
||||
of KeyErrors or AttributeErrors while trying to find some value to order by,
|
||||
so this provides multiple mechanisms to deal with that
|
||||
|
||||
|
|
|
@ -220,7 +220,7 @@ def _create_range_filter(
|
|||
# inclusivity here? Is [after, before) currently,
|
||||
# items are included on the lower bound but not the
|
||||
# upper bound
|
||||
# typically used for datetimes so doesnt have to
|
||||
# typically used for datetimes so doesn't have to
|
||||
# be exact in that case
|
||||
def generated_predicate(obj: Any) -> bool:
|
||||
ov: Any = attr_func(obj)
|
||||
|
@ -294,7 +294,7 @@ def select_range(
|
|||
|
||||
# some operations to do before ordering/filtering
|
||||
if drop_exceptions or raise_exceptions or where is not None:
|
||||
# doesnt wrap unsortable items, because we pass no order related kwargs
|
||||
# doesn't wrap unsortable items, because we pass no order related kwargs
|
||||
itr = select(itr, where=where, drop_exceptions=drop_exceptions, raise_exceptions=raise_exceptions)
|
||||
|
||||
order_by_chosen: Optional[OrderFunc] = None
|
||||
|
@ -356,7 +356,7 @@ Specify a type or a key to order the value by""")
|
|||
#
|
||||
# this select is also run if the user didn't specify anything to
|
||||
# order by, and is just returning the data in the same order as
|
||||
# as the srouce iterable
|
||||
# as the source iterable
|
||||
# i.e. none of the range-related filtering code ran, this is just a select
|
||||
itr = select(itr,
|
||||
order_by=order_by_chosen,
|
||||
|
@ -483,7 +483,7 @@ def test_parse_range() -> None:
|
|||
|
||||
assert res2 == RangeTuple(after=start_date.timestamp(), before=end_date.timestamp(), within=None)
|
||||
|
||||
# cant specify all three
|
||||
# can't specify all three
|
||||
with pytest.raises(QueryException, match=r"Cannot specify 'after', 'before' and 'within'"):
|
||||
dt_parse_range(unparsed_range=RangeTuple(str(start_date), str(end_date.timestamp()), "7d"))
|
||||
|
||||
|
|
|
@ -96,7 +96,7 @@ def _dumps_factory(**kwargs) -> Callable[[Any], str]:
|
|||
# is rust-based and compiling on rarer architectures may not work
|
||||
# out of the box
|
||||
#
|
||||
# unlike the builtin JSON modue which serializes NamedTuples as lists
|
||||
# unlike the builtin JSON module which serializes NamedTuples as lists
|
||||
# (even if you provide a default function), simplejson correctly
|
||||
# serializes namedtuples to dictionaries
|
||||
|
||||
|
@ -157,7 +157,7 @@ def dumps(
|
|||
def test_serialize_fallback() -> None:
|
||||
import json as jsn # dont cause possible conflicts with module code
|
||||
|
||||
# cant use a namedtuple here, since the default json.dump serializer
|
||||
# can't use a namedtuple here, since the default json.dump serializer
|
||||
# serializes namedtuples as tuples, which become arrays
|
||||
# just test with an array of mixed objects
|
||||
X = [5, datetime.timedelta(seconds=5.0)]
|
||||
|
@ -216,7 +216,7 @@ def test_default_serializer() -> None:
|
|||
def _serialize_with_default(o: Any) -> Any:
|
||||
if isinstance(o, Unserializable):
|
||||
return {"x": o.x, "y": o.y}
|
||||
raise TypeError("Couldnt serialize")
|
||||
raise TypeError("Couldn't serialize")
|
||||
|
||||
# this serializes both Unserializable, which is a custom type otherwise
|
||||
# not handled, and timedelta, which is handled by the '_default_encode'
|
||||
|
|
|
@ -94,7 +94,7 @@ def sqlite_copy_and_open(db: PathIsh) -> sqlite3.Connection:
|
|||
|
||||
# NOTE hmm, so this kinda works
|
||||
# V = TypeVar('V', bound=Tuple[Any, ...])
|
||||
# def select(cols: V, rest: str, *, db: sqlite3.Connetion) -> Iterator[V]:
|
||||
# def select(cols: V, rest: str, *, db: sqlite3.Connection) -> Iterator[V]:
|
||||
# but sadly when we pass columns (Tuple[str, ...]), it seems to bind this type to V?
|
||||
# and then the return type ends up as Iterator[Tuple[str, ...]], which isn't desirable :(
|
||||
# a bit annoying to have this copy-pasting, but hopefully not a big issue
|
||||
|
|
|
@ -35,7 +35,7 @@ def is_data_provider(fun: Any) -> bool:
|
|||
1. returns iterable or something like that
|
||||
2. takes no arguments? (otherwise not callable by stats anyway?)
|
||||
3. doesn't start with an underscore (those are probably helper functions?)
|
||||
4. functions isnt the 'inputs' function (or ends with '_inputs')
|
||||
4. functions isn't the 'inputs' function (or ends with '_inputs')
|
||||
"""
|
||||
# todo maybe for 2 allow default arguments? not sure
|
||||
# one example which could benefit is my.pdfs
|
||||
|
|
|
@ -246,7 +246,7 @@ def stats():
|
|||
sys.path = orig_path
|
||||
# shouldn't crash at least
|
||||
assert res is None # good as far as discovery is concerned
|
||||
assert xx.read_text() == 'some precious data' # make sure module wasn't evauluated
|
||||
assert xx.read_text() == 'some precious data' # make sure module wasn't evaluated
|
||||
|
||||
|
||||
### tests end
|
||||
|
|
|
@ -46,7 +46,7 @@ from .core import Json, get_files
|
|||
@dataclass
|
||||
class Item:
|
||||
'''
|
||||
Some completely arbirary artificial stuff, just for testing
|
||||
Some completely arbitrary artificial stuff, just for testing
|
||||
'''
|
||||
username: str
|
||||
raw: Json
|
||||
|
|
|
@ -38,7 +38,7 @@ def datas() -> Iterable[Res[Emfit]]:
|
|||
import dataclasses
|
||||
|
||||
# data from emfit is coming in UTC. There is no way (I think?) to know the 'real' timezone, and local times matter more for sleep analysis
|
||||
# TODO actully this is wrong?? check this..
|
||||
# TODO actually this is wrong?? check this..
|
||||
emfit_tz = config.timezone
|
||||
|
||||
for x in dal.sleeps(config.export_path):
|
||||
|
|
|
@ -177,7 +177,7 @@ def messages() -> Iterator[Res[Message]]:
|
|||
reply_to_id = x.reply_to_id
|
||||
# hmm, reply_to be missing due to the synthetic nature of export, so have to be defensive
|
||||
reply_to = None if reply_to_id is None else msgs.get(reply_to_id)
|
||||
# also would be interesting to merge together entities rather than resuling messages from different sources..
|
||||
# also would be interesting to merge together entities rather than resulting messages from different sources..
|
||||
# then the merging thing could be moved to common?
|
||||
try:
|
||||
sender = senders[x.sender_id]
|
||||
|
|
|
@ -128,7 +128,7 @@ def _get_summary(e) -> Tuple[str, Optional[Link], Optional[EventId], Optional[Bo
|
|||
rt = pl['ref_type']
|
||||
ref = pl['ref']
|
||||
if what == 'created':
|
||||
# FIXME should handle delection?...
|
||||
# FIXME should handle deletion?...
|
||||
eid = EventIds.repo_created(dts=dts, name=rname, ref_type=rt, ref=ref)
|
||||
mref = '' if ref is None else ' ' + ref
|
||||
# todo link to branch? only contains weird API link though
|
||||
|
|
|
@ -58,7 +58,7 @@ def items() -> Iterator[Res[Item]]:
|
|||
type=r['type'],
|
||||
created=datetime.fromtimestamp(r['time']),
|
||||
title=r['title'],
|
||||
# todo hmm maybe a method to stip off html tags would be nice
|
||||
# todo hmm maybe a method to strip off html tags would be nice
|
||||
text_html=r['text'],
|
||||
url=r['url'],
|
||||
)
|
||||
|
|
|
@ -53,7 +53,7 @@ class _Message(_BaseMessage):
|
|||
@dataclass(unsafe_hash=True)
|
||||
class Message(_BaseMessage):
|
||||
user: User
|
||||
# TODO could also extract Thread objec? not sure if useful
|
||||
# TODO could also extract Thread object? not sure if useful
|
||||
# reply_to: Optional[Message]
|
||||
|
||||
|
||||
|
|
|
@ -242,7 +242,7 @@ def plot_one(sleep: SleepEntry, fig: Figure, axes: Axes, xlims=None, showtext=Tr
|
|||
|
||||
def predicate(sleep: SleepEntry):
|
||||
"""
|
||||
Filter for comparing similar sleep sesssions
|
||||
Filter for comparing similar sleep sessions
|
||||
"""
|
||||
start = sleep.created.time()
|
||||
end = sleep.completed.time()
|
||||
|
|
|
@ -64,7 +64,7 @@ class FallbackLocation(LocationProtocol):
|
|||
)
|
||||
|
||||
|
||||
# a location estimator can return multiple fallbacks, incase there are
|
||||
# a location estimator can return multiple fallbacks, in case there are
|
||||
# differing accuracies/to allow for possible matches to be computed
|
||||
# iteratively
|
||||
LocationEstimator = Callable[[DateExact], Iterator[FallbackLocation]]
|
||||
|
|
|
@ -50,7 +50,7 @@ def fallback_locations() -> Iterator[FallbackLocation]:
|
|||
)
|
||||
|
||||
|
||||
# for compatibility with my.location.via_ip, this shouldnt be used by other modules
|
||||
# for compatibility with my.location.via_ip, this shouldn't be used by other modules
|
||||
def locations() -> Iterator[Location]:
|
||||
medium("locations is deprecated, should use fallback_locations or estimate_location")
|
||||
yield from map(FallbackLocation.to_location, fallback_locations())
|
||||
|
|
|
@ -82,7 +82,7 @@ def _iter_via_grep(fo) -> Iterable[TsLatLon]:
|
|||
|
||||
|
||||
# todo could also use pool? not sure if that would really be faster...
|
||||
# earch thread could process 100K at once?
|
||||
# search thread could process 100K at once?
|
||||
# would need to find out a way to know when to stop? process in some sort of sqrt progression??
|
||||
|
||||
|
||||
|
|
|
@ -79,7 +79,7 @@ class Annotation(NamedTuple):
|
|||
def _as_annotation(*, raw: pdfannots.Annotation, path: str) -> Annotation:
|
||||
d = vars(raw)
|
||||
pos = raw.pos
|
||||
# make mypy happy (pos alwasy present for Annotation https://github.com/0xabu/pdfannots/blob/dbdfefa158971e1746fae2da139918e9f59439ea/pdfannots/types.py#L302)
|
||||
# make mypy happy (pos always present for Annotation https://github.com/0xabu/pdfannots/blob/dbdfefa158971e1746fae2da139918e9f59439ea/pdfannots/types.py#L302)
|
||||
assert pos is not None
|
||||
d['page'] = pos.page.pageno
|
||||
return Annotation(
|
||||
|
|
|
@ -43,7 +43,7 @@ class Photo(NamedTuple):
|
|||
if self.path.startswith(bp):
|
||||
return self.path[len(bp):]
|
||||
else:
|
||||
raise RuntimeError(f'Weird path {self.path}, cant match against anything')
|
||||
raise RuntimeError(f"Weird path {self.path}, can't match against anything")
|
||||
|
||||
@property
|
||||
def name(self) -> str:
|
||||
|
|
|
@ -48,7 +48,7 @@ def _get_exif_data(image) -> Exif:
|
|||
|
||||
def to_degree(value) -> float:
|
||||
"""Helper function to convert the GPS coordinates
|
||||
stored in the EXIF to degress in float format"""
|
||||
stored in the EXIF to digress in float format"""
|
||||
(d, m, s) = value
|
||||
return d + (m / 60.0) + (s / 3600.0)
|
||||
|
||||
|
@ -65,7 +65,7 @@ from datetime import datetime
|
|||
from typing import Optional
|
||||
|
||||
# TODO surely there is a library that does it??
|
||||
# TODO this belogs to a private overlay or something
|
||||
# TODO this belongs to a private overlay or something
|
||||
# basically have a function that patches up dates after the files were yielded..
|
||||
_DT_REGEX = re.compile(r'\D(\d{8})\D*(\d{6})\D')
|
||||
def dt_from_path(p: Path) -> Optional[datetime]:
|
||||
|
|
|
@ -197,7 +197,7 @@ def _get_events(backups: Sequence[Path], parallel: bool=True) -> Iterator[Event]
|
|||
# eh. I guess just take max and it will always be correct?
|
||||
assert not first
|
||||
yield Event(
|
||||
dt=bdt, # TODO average wit ps.save_dt?
|
||||
dt=bdt, # TODO average with ps.save_dt?
|
||||
text="unfavorited",
|
||||
kind=ps,
|
||||
eid=f'unf-{ps.sid}',
|
||||
|
|
|
@ -39,7 +39,7 @@ class Entry(NamedTuple):
|
|||
def timestamp(self) -> datetime:
|
||||
ts = self.row['timestamp']
|
||||
# already with timezone apparently
|
||||
# TODO not sure if should stil localize though? it only kept tz offset, not real tz
|
||||
# TODO not sure if should still localize though? it only kept tz offset, not real tz
|
||||
return datetime.fromisoformat(ts)
|
||||
# TODO also has gps info!
|
||||
|
||||
|
|
|
@ -35,7 +35,7 @@ class config(user_config):
|
|||
fast: bool = True
|
||||
|
||||
# sort locations by date
|
||||
# incase multiple sources provide them out of order
|
||||
# in case multiple sources provide them out of order
|
||||
sort_locations: bool = True
|
||||
|
||||
# if the accuracy for the location is more than 5km, don't use
|
||||
|
@ -94,7 +94,7 @@ def _locations() -> Iterator[Tuple[LatLon, datetime]]:
|
|||
|
||||
except Exception as e:
|
||||
from my.core.warnings import high
|
||||
logger.exception("Could not setup via_location using my.location.all provider, falling back to legacy google implemetation", exc_info=e)
|
||||
logger.exception("Could not setup via_location using my.location.all provider, falling back to legacy google implementation", exc_info=e)
|
||||
high("Setup my.google.takeout.parser, then my.location.all for better google takeout/location data")
|
||||
|
||||
import my.location.google
|
||||
|
@ -134,7 +134,7 @@ def _find_tz_for_locs(finder: Any, locs: Iterable[Tuple[LatLon, datetime]]) -> I
|
|||
def _iter_local_dates() -> Iterator[DayWithZone]:
|
||||
finder = _timezone_finder(fast=config.fast) # rely on the default
|
||||
#pdt = None
|
||||
# TODO: warnings doesnt actually warn?
|
||||
# TODO: warnings doesn't actually warn?
|
||||
# warnings = []
|
||||
|
||||
locs: Iterable[Tuple[LatLon, datetime]]
|
||||
|
|
|
@ -102,7 +102,7 @@ def _handle_db(db: sqlite3.Connection) -> Iterator[Res[_Entity]]:
|
|||
try:
|
||||
yield _parse_person(row)
|
||||
except Exception as e:
|
||||
# todo attach error contex?
|
||||
# todo attach error context?
|
||||
yield e
|
||||
|
||||
for row in db.execute('SELECT * FROM match'):
|
||||
|
|
|
@ -68,7 +68,7 @@ def watched() -> Iterable[Res[Watched]]:
|
|||
continue
|
||||
|
||||
if title.startswith('Subscribed to') and url.startswith('https://www.youtube.com/channel/'):
|
||||
# todo might be interesting to process somwhere?
|
||||
# todo might be interesting to process somewhere?
|
||||
continue
|
||||
|
||||
# all titles contain it, so pointless to include 'Watched '
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue