extract _load_locations method

This commit is contained in:
Dima Gerasimov 2019-04-30 12:54:13 +02:00
parent 22c06a48e1
commit 7f51eedc0f

View file

@ -48,17 +48,11 @@ def tagger(dt: datetime, point: geopy.Point) -> Tag:
else: else:
return "other" return "other"
# TODO hope they are sorted... def _load_locations(fo) -> Iterator[Location]:
# TODO that could also serve as basis for tz provider logger = get_logger()
def load_locations() -> Iterator[Location]:
logger = get_logger() # TODO count errors?
last_takeout = max(TAKEOUTS_PATH.glob('takeout*.zip'))
# TODO wonder if old takeouts could contribute as well??
total = 0 total = 0
errors = 0 errors = 0
with kompress.open(last_takeout, 'Takeout/Location History/Location History.json') as fo:
for j in ijson.items(fo, 'locations.item'): for j in ijson.items(fo, 'locations.item'):
dt = datetime.utcfromtimestamp(int(j["timestampMs"]) / 1000) dt = datetime.utcfromtimestamp(int(j["timestampMs"]) / 1000)
if total % 10000 == 0: if total % 10000 == 0:
@ -88,6 +82,17 @@ def load_locations() -> Iterator[Location]:
tag=tag tag=tag
) )
# TODO hope they are sorted...
# TODO that could also serve as basis for tz provider
def load_locations() -> Iterator[Location]:
logger = get_logger()
last_takeout = max(TAKEOUTS_PATH.glob('takeout*.zip'))
# TODO wonder if old takeouts could contribute as well??
with kompress.open(last_takeout, 'Takeout/Location History/Location History.json') as fo:
return _load_locations(fo)
def iter_locations(cached: bool=False) -> Iterator[Location]: def iter_locations(cached: bool=False) -> Iterator[Location]:
import sys import sys
sys.path.append('/L/Dropbox/data/location_provider') # jeez.. otherwise it refuses to unpickle :( sys.path.append('/L/Dropbox/data/location_provider') # jeez.. otherwise it refuses to unpickle :(