scan-feeds: Import feeds into the same file.

This allows me to accumulate feeds over time in case someone's blog is
down at any given moment.
This commit is contained in:
Nelson Elhage 2010-01-19 10:52:29 -05:00
parent 11dec64ca0
commit ab6966baf1
1 changed files with 12 additions and 10 deletions

View File

@ -9,7 +9,11 @@ import dateutil.tz as tz
with open('bloggers.yml') as f:
users = yaml.safe_load(f.read())
log = {}
try:
with open('out/report.yml') as f:
log = yaml.safe_load(f.read())
except IOError:
log = {}
START = datetime.datetime(2009, 12, 21, 6)
@ -37,19 +41,17 @@ def parse_feeds(weeks, uri):
while len(weeks) <= wn:
weeks.append([])
weeks[wn].append(dict(
date=date,
title=post.title,
url=get_link(post)))
post = dict(date=date,
title=post.title,
url=get_link(post))
if post['url'] not in [p['url'] for p in weeks[wn]]:
weeks[wn].append(post)
for (username, u) in users.items():
weeks = []
print "[%s]" % (username)
weeks = log.setdefault(username, [])
for l in u['links']:
parse_feeds(weeks, l[2])
log[username] = weeks
for (i, w) in enumerate(weeks):
print " [%d]: %s" % (i, w)
with open('out/report.yml', 'w') as f:
yaml.safe_dump(log, f)