merge all rss

This commit is contained in:
Dima Gerasimov 2019-09-30 20:09:40 +01:00
parent 35b6d6ff97
commit 420d444633
4 changed files with 26 additions and 10 deletions

View file

@ -8,5 +8,3 @@ class Subscription(NamedTuple):
id: str
subscribed: bool=True

View file

@ -12,7 +12,6 @@ from dateutil.parser import isoparse
@listify
def parse_file(f: Path):
raw = json.loads(f.read_text())
print(raw)
for r in raw:
yield Subscription(
# TODO created_at?

View file

@ -6,7 +6,7 @@ import json
from pathlib import Path
from typing import Dict, List
from datetime import datetime
from dateutil.parser import isoparse
import pytz
@listify
@ -14,12 +14,13 @@ def parse_file(f: Path):
raw = json.loads(f.read_text())
for r in raw:
# err, some even don't have website..
website = r.get('website')
rid = r['id']
website = r.get('website', rid) # meh
yield Subscription(
# TODO created_at?
title=r['title'],
url=website,
id=r['id'],
id=rid,
)
def get_states() -> Dict[datetime, List[Subscription]]:
@ -27,6 +28,7 @@ def get_states() -> Dict[datetime, List[Subscription]]:
for f in sorted(Path(paths.feedly.export_dir).glob('*.json')):
dts = f.stem.split('_')[-1]
dt = datetime.strptime(dts, '%Y%m%d%H%M%S')
dt = pytz.utc.localize(dt)
subs = parse_file(f)
res[dt] = subs
return res

View file

@ -1,12 +1,29 @@
from itertools import chain
from typing import List
from ._rss import Subscription
from . import feedbin
from . import feedly
from ._rss import Subscription
# TODO google reader?
def get_history():
def get_all_subscriptions() -> List[Subscription]:
"""
It's useful to keep track of websites you unsubscribed from too,
Keeps track of everything I ever subscribed to. It's useful to keep track of unsubscribed too
so you don't try to subscribe again (or at least take into account why you unsubscribed before)
"""
pass
states = {}
states.update(feedly.get_states())
states.update(feedbin.get_states())
by_url = {}
for d, feeds in sorted(states.items()):
for f in feeds:
if f.url not in by_url:
by_url[f.url] = f
res = []
last = {x.url: x for x in max(states.items())[1]}
for u, x in sorted(by_url.items()):
present = u in last
res.append(x._replace(subscribed=present))
return res