initial rss

This commit is contained in:
Dima Gerasimov 2019-09-30 19:50:58 +01:00
parent bf8ceea523
commit 35b6d6ff97
5 changed files with 109 additions and 0 deletions

12
my/_rss.py Normal file
View file

@ -0,0 +1,12 @@
# shared Rss stuff
from typing import NamedTuple
class Subscription(NamedTuple):
# TODO date?
title: str
url: str
id: str
subscribed: bool=True

View file

@ -54,3 +54,25 @@ R = TypeVar('R')
def cproperty(f: Callable[[Cl], R]) -> R:
return property(functools.lru_cache(maxsize=1)(f)) # type: ignore
# https://stackoverflow.com/a/12377059/706389
def listify(fn=None, wrapper=list):
"""
Wraps a function's return value in wrapper (e.g. list)
Useful when an algorithm can be expressed more cleanly as a generator
"""
def listify_return(fn):
@functools.wraps(fn)
def listify_helper(*args, **kw):
return wrapper(fn(*args, **kw))
return listify_helper
if fn is None:
return listify_return
return listify_return(fn)
# def dictify(fn=None, key=None, value=None):
# def md(it):
# return make_dict(it, key=key, value=value)
# return listify(fn=fn, wrapper=md)

31
my/feedbin.py Normal file
View file

@ -0,0 +1,31 @@
from . import paths
from .common import listify
from ._rss import Subscription
import json
from pathlib import Path
from typing import Dict, List
from datetime import datetime
from dateutil.parser import isoparse
@listify
def parse_file(f: Path):
raw = json.loads(f.read_text())
print(raw)
for r in raw:
yield Subscription(
# TODO created_at?
title=r['title'],
url=r['site_url'],
id=r['id'],
)
def get_states() -> Dict[datetime, List[Subscription]]:
res = {}
for f in sorted(Path(paths.feedbin.export_dir).glob('*.json')):
dts = f.stem.split('_')[-1]
dt = isoparse(dts)
subs = parse_file(f)
res[dt] = subs
return res

32
my/feedly.py Normal file
View file

@ -0,0 +1,32 @@
from . import paths
from .common import listify
from ._rss import Subscription
import json
from pathlib import Path
from typing import Dict, List
from datetime import datetime
from dateutil.parser import isoparse
@listify
def parse_file(f: Path):
raw = json.loads(f.read_text())
for r in raw:
# err, some even don't have website..
website = r.get('website')
yield Subscription(
# TODO created_at?
title=r['title'],
url=website,
id=r['id'],
)
def get_states() -> Dict[datetime, List[Subscription]]:
res = {}
for f in sorted(Path(paths.feedly.export_dir).glob('*.json')):
dts = f.stem.split('_')[-1]
dt = datetime.strptime(dts, '%Y%m%d%H%M%S')
subs = parse_file(f)
res[dt] = subs
return res

12
my/rss.py Normal file
View file

@ -0,0 +1,12 @@
from . import feedbin
from . import feedly
from ._rss import Subscription
# TODO google reader?
def get_history():
"""
It's useful to keep track of websites you unsubscribed from too,
so you don't try to subscribe again (or at least take into account why you unsubscribed before)
"""
pass