takeout module; make more resilient to multipart
This commit is contained in:
parent
232d62b3b7
commit
56f64c16db
3 changed files with 45 additions and 17 deletions
30
my/takeout.py
Normal file
30
my/takeout.py
Normal file
|
@ -0,0 +1,30 @@
|
|||
from pathlib import Path
|
||||
from typing import Optional
|
||||
|
||||
from .common import get_files
|
||||
|
||||
from mycfg import paths
|
||||
|
||||
from kython.kompress import open as kopen
|
||||
|
||||
def get_last_takeout(*, path: Optional[str]=None) -> Path:
|
||||
"""
|
||||
Ok, sometimes google splits takeout into two zip archives
|
||||
I guess I could detect it (they've got 001/002 etc suffixes), but fornow that works fine..
|
||||
"""
|
||||
for takeout in reversed(get_files(paths.google.takeout_path, glob='*.zip')):
|
||||
if path is None:
|
||||
return takeout
|
||||
else:
|
||||
try:
|
||||
kopen(takeout, path)
|
||||
return takeout
|
||||
except:
|
||||
# TODO eh, a bit horrible, but works for now..
|
||||
continue
|
||||
raise RuntimeError(f'Not found: {path}')
|
||||
|
||||
# TODO might be a good idea to merge across multiple taekouts...
|
||||
# perhaps even a special takeout module that deals with all of this automatically?
|
||||
# e.g. accumulate, filter and maybe report useless takeouts?
|
||||
|
Loading…
Add table
Add a link
Reference in a new issue