use lazylogger

This commit is contained in:
Dima Gerasimov 2020-02-26 22:09:44 +01:00
parent f71ff39aad
commit a665cf528c
2 changed files with 4 additions and 19 deletions

View file

@ -1,17 +1,10 @@
import sys import sys
import logging
from .takeout import get_logger, get_locations, iter_locations, get_groups from .takeout import logger, get_locations, iter_locations, get_groups
from kython.klogging import setup_logzero
# TODO remove this? # TODO remove this?
def main(): def main():
logger = get_logger()
setup_logzero(logger, level=logging.DEBUG)
if len(sys.argv) > 1: if len(sys.argv) > 1:
cmd = sys.argv[1] cmd = sys.argv[1]
# TODO ok, update cache makes sense just to refresh in case of code changes... # TODO ok, update cache makes sense just to refresh in case of code changes...

View file

@ -3,14 +3,11 @@ Module for Google Takeout data
""" """
import json import json
import logging
import re
from collections import deque from collections import deque
from datetime import datetime from datetime import datetime
from itertools import islice from itertools import islice
from pathlib import Path from pathlib import Path
from typing import Any, Collection, Deque, Iterable, Iterator, List, NamedTuple, Optional, Sequence from typing import Any, Collection, Deque, Iterable, Iterator, List, NamedTuple, Optional, Sequence
from zipfile import ZipFile
import pytz import pytz
# pip3 install geopy # pip3 install geopy
@ -25,13 +22,12 @@ except:
# fallback to default backend. warning? # fallback to default backend. warning?
import ijson # type: ignore import ijson # type: ignore
from ..common import get_files from ..common import get_files, LazyLogger
from ..takeout import get_last_takeout from ..takeout import get_last_takeout
from ..kython import kompress from ..kython import kompress
def get_logger(): logger = LazyLogger(__package__)
return logging.getLogger("location")
def cache_path(*args, **kwargs): def cache_path(*args, **kwargs):
@ -51,8 +47,6 @@ class Location(NamedTuple):
# TODO use pool? not sure if that would really be faster... # TODO use pool? not sure if that would really be faster...
def _iter_locations_fo(fo, start, stop) -> Iterator[Location]: def _iter_locations_fo(fo, start, stop) -> Iterator[Location]:
logger = get_logger()
total = 0 total = 0
errors = 0 errors = 0
@ -115,7 +109,7 @@ _LOCATION_JSON = 'Takeout/Location History/Location History.json'
# TODO CACHEW_OFF env variable? # TODO CACHEW_OFF env variable?
# TODO use mcachew # TODO use mcachew
from cachew import cachew, mtime_hash from cachew import cachew, mtime_hash
@cachew(cache_path, hashf=mtime_hash, cls=Location, chunk_by=10000, logger=get_logger()) @cachew(cache_path, hashf=mtime_hash, cls=Location, chunk_by=10000, logger=logger)
def _iter_locations(path: Path, start=0, stop=None) -> Iterator[Location]: def _iter_locations(path: Path, start=0, stop=None) -> Iterator[Location]:
if path.suffix == '.json': if path.suffix == '.json':
ctx = path.open('r') ctx = path.open('r')
@ -183,8 +177,6 @@ class Window:
# TODO cachew as well? # TODO cachew as well?
# TODO maybe if tag is none, we just don't care? # TODO maybe if tag is none, we just don't care?
def get_groups(*args, **kwargs) -> List[LocInterval]: def get_groups(*args, **kwargs) -> List[LocInterval]:
logger = get_logger()
all_locations = iter(iter_locations(*args, **kwargs)) all_locations = iter(iter_locations(*args, **kwargs))
locsi = Window(all_locations) locsi = Window(all_locations)
i = 0 i = 0