general: switch to make_logger in a few modules, use a bit more consistent logging, rely on default INFO level

This commit is contained in:
Dima Gerasimov 2023-06-21 18:01:51 +01:00 committed by karlicoss
parent 661714f1d9
commit dff31455f1
8 changed files with 27 additions and 24 deletions

View file

@ -10,15 +10,13 @@ import re
import sqlite3 import sqlite3
from typing import Iterable, Sequence, Set, Optional from typing import Iterable, Sequence, Set, Optional
from my.core import get_files, LazyLogger, dataclass, Res from my.core import get_files, make_logger, dataclass, Res
from my.core.sqlite import sqlite_connect_immutable from my.core.sqlite import sqlite_connect_immutable
from my.config import bluemaestro as config from my.config import bluemaestro as config
# todo control level via env variable? logger = make_logger(__name__)
# i.e. HPI_LOGGING_MY_BLUEMAESTRO_LEVEL=debug
logger = LazyLogger(__name__, level='debug')
def inputs() -> Sequence[Path]: def inputs() -> Sequence[Path]:
@ -63,7 +61,7 @@ def measurements() -> Iterable[Res[Measurement]]:
# tables are immutable, so can save on processing.. # tables are immutable, so can save on processing..
processed_tables: Set[str] = set() processed_tables: Set[str] = set()
for f in dbs: for f in dbs:
logger.debug('processing %s', f) logger.info('processing %s', f)
tot = 0 tot = 0
new = 0 new = 0
# todo assert increasing timestamp? # todo assert increasing timestamp?

View file

@ -25,8 +25,8 @@ class github(user_config):
### ###
from ..core import LazyLogger from ..core import make_logger
logger = LazyLogger(__name__) logger = make_logger(__name__)
from ..core.cfg import make_config from ..core.cfg import make_config
@ -85,6 +85,7 @@ def events() -> Iterable[Res[Event]]:
'repository_files_': None, # repository artifacts, probs not very useful 'repository_files_': None, # repository artifacts, probs not very useful
} }
for f in files: for f in files:
logger.info(f'{f} : processing...')
handler: Any handler: Any
for prefix, h in handler_map.items(): for prefix, h in handler_map.items():
if not f.name.startswith(prefix): if not f.name.startswith(prefix):

View file

@ -14,7 +14,7 @@ from more_itertools import unique_everseen
from my.core import ( from my.core import (
get_files, Paths, get_files, Paths,
make_config, make_config,
LazyLogger, make_logger,
datetime_naive, datetime_naive,
Json, Json,
Res, assert_never, Res, assert_never,
@ -24,7 +24,7 @@ from my.core.sqlite import sqlite_connect_immutable, select
from my.config import instagram as user_config from my.config import instagram as user_config
logger = LazyLogger(__name__, level='debug') logger = make_logger(__name__)
@dataclass @dataclass
class instagram_android_config(user_config.android): class instagram_android_config(user_config.android):
@ -132,7 +132,9 @@ def _entities() -> Iterator[Res[Union[User, _Message]]]:
# NOTE: definitely need to merge multiple, app seems to recycle old messages # NOTE: definitely need to merge multiple, app seems to recycle old messages
# TODO: hmm hard to guarantee timestamp ordering when we use synthetic input data... # TODO: hmm hard to guarantee timestamp ordering when we use synthetic input data...
# todo use TypedDict? # todo use TypedDict?
for f in inputs(): dbs = inputs()
for f in dbs:
logger.info(f'{f} : processing...')
with sqlite_connect_immutable(f) as db: with sqlite_connect_immutable(f) as db:
# TODO ugh. seems like no way to extract username? # TODO ugh. seems like no way to extract username?
# sometimes messages (e.g. media_share) contain it in message field # sometimes messages (e.g. media_share) contain it in message field

View file

@ -15,14 +15,14 @@ from my.core import (
datetime_naive, datetime_naive,
Res, Res,
assert_never, assert_never,
LazyLogger, make_logger,
) )
from my.core.kompress import ZipPath from my.core.kompress import ZipPath
from my.config import instagram as user_config from my.config import instagram as user_config
logger = LazyLogger(__name__, level='debug') logger = make_logger(__name__)
@dataclass @dataclass
@ -113,6 +113,7 @@ def _entities() -> Iterator[Res[Union[User, _Message]]]:
for fname, ffiles in file_map.items(): for fname, ffiles in file_map.items():
for ffile in sorted(ffiles, key=lambda p: int(p.stem.split('_')[-1])): for ffile in sorted(ffiles, key=lambda p: int(p.stem.split('_')[-1])):
logger.info(f'{ffile} : processing...')
j = json.loads(ffile.read_text()) j = json.loads(ffile.read_text())
id_len = 10 id_len = 10

View file

@ -64,10 +64,11 @@ except ModuleNotFoundError as e:
############################ ############################
from typing import List, Sequence, Mapping, Iterator, Any from typing import List, Sequence, Mapping, Iterator, Any
from my.core.common import mcachew, get_files, LazyLogger, make_dict, Stats from my.core import make_logger
from my.core.common import mcachew, get_files, make_dict, Stats
logger = LazyLogger(__name__, level='info') logger = make_logger(__name__)
from pathlib import Path from pathlib import Path
@ -85,8 +86,8 @@ Upvote = dal.Upvote
def _dal() -> dal.DAL: def _dal() -> dal.DAL:
inp = list(inputs()) inp = list(inputs())
return dal.DAL(inp) return dal.DAL(inp)
cache = mcachew(depends_on=inputs, logger=logger) # depends on inputs only
cache = mcachew(depends_on=inputs)
@cache @cache
def saved() -> Iterator[Save]: def saved() -> Iterator[Save]:

View file

@ -9,14 +9,14 @@ from pathlib import Path
from datetime import timedelta from datetime import timedelta
from typing import Sequence, Iterable from typing import Sequence, Iterable
from .core import get_files, LazyLogger from my.core import get_files, make_logger
from .core.common import mcachew from my.core.common import mcachew
from .core.error import Res, split_errors from my.core.error import Res, split_errors
from my.config import rescuetime as config from my.config import rescuetime as config
log = LazyLogger(__name__, level='info') logger = make_logger(__name__)
def inputs() -> Sequence[Path]: def inputs() -> Sequence[Path]:

View file

@ -44,11 +44,11 @@ from typing import List, Optional, NamedTuple, Sequence, Iterator
from pathlib import Path from pathlib import Path
import json import json
from ..core.common import get_files, LazyLogger, Json from my.core import get_files, make_logger, Json
logger = LazyLogger(__name__, level="warning") logger = make_logger(__name__)
def inputs() -> Sequence[Path]: def inputs() -> Sequence[Path]:
@ -175,7 +175,7 @@ class ZipExport:
self.old_format = True self.old_format = True
def raw(self, what: str, *, fname: Optional[str]=None) -> Iterator[Json]: def raw(self, what: str, *, fname: Optional[str]=None) -> Iterator[Json]:
logger.info('processing: %s %s', self.zpath, what) logger.info(f'{self.zpath} : processing {what}')
path = fname or what path = fname or what
if not self.old_format: if not self.old_format:

View file

@ -5,11 +5,11 @@ from typing import Iterator, Optional, Dict, Any, Sequence
from pathlib import Path from pathlib import Path
import sqlite3 import sqlite3
from my.core import LazyLogger, Res, datetime_aware from my.core import make_logger, Res, datetime_aware
from my.core.sqlite import sqlite_copy_and_open from my.core.sqlite import sqlite_copy_and_open
logger = LazyLogger(__name__, level='debug') logger = make_logger(__name__)
def inputs() -> Sequence[Path]: def inputs() -> Sequence[Path]: