74 lines
2.4 KiB
Python
74 lines
2.4 KiB
Python
#!/usr/bin/env python3
|
|
import datetime
|
|
import os.path
|
|
import time
|
|
import urllib.parse
|
|
|
|
import feedparser
|
|
import gusmobile
|
|
|
|
def load_feed_urls(filename="feeds.txt"):
|
|
feeds = []
|
|
with open(filename, "r") as fp:
|
|
for line in fp:
|
|
line = line.strip()
|
|
if not line or line.startswith("#"):
|
|
continue
|
|
feeds.append(line)
|
|
return feeds
|
|
|
|
def add_port_to_url(url):
|
|
# A temporary patch for a bug in gusmobile
|
|
url = urllib.parse.urlsplit(url)
|
|
if ":" not in url.netloc:
|
|
url = url._replace(netloc=url.netloc+":1965")
|
|
return urllib.parse.urlunsplit(url)
|
|
|
|
def items_from_feed_string(feed_str):
|
|
feed_obj = feedparser.parse(feed_str)
|
|
feed = feed_obj.feed
|
|
return [(entry.updated_parsed, entry.link, entry.title, feed.title) for entry in feed_obj.entries]
|
|
|
|
def format_aggregated(items, filename, n_feeds):
|
|
with open(filename, "w") as fp:
|
|
# Add header
|
|
if os.path.exists("header.gmi"):
|
|
with open("header.gmi", "r") as fp2:
|
|
fp.write(fp2.read())
|
|
else:
|
|
fp.write("# CAPCOM Gemini feed aggregator\n\n")
|
|
# Feed count
|
|
fp.write("Aggregating {} Atom feeds from Geminispace.\n".format(n_feeds))
|
|
# List feed entries
|
|
current_day = (0,0)
|
|
for updated, link, entry_title, feed_title in items:
|
|
item_day = (updated.tm_year, updated.tm_yday)
|
|
if item_day != current_day:
|
|
current_day = item_day
|
|
fp.write("\n## " + datetime.datetime.fromtimestamp(time.mktime(updated)).strftime("%Y-%m-%d") + "\n\n")
|
|
fp.write("=> {} {} - {}\n".format(link, feed_title, entry_title))
|
|
fp.write("\n")
|
|
# Add footer
|
|
if os.path.exists("footer.gmi"):
|
|
with open("footer.gmi", "r") as fp2:
|
|
fp.write(fp2.read())
|
|
|
|
def aggregate(feed_file="feeds.txt", output_file="index.gmi"):
|
|
feed_urls = load_feed_urls(feed_file)
|
|
items = []
|
|
for feed_url in feed_urls:
|
|
print("Fetching ", feed_url)
|
|
feed_url = add_port_to_url(feed_url)
|
|
resp = gusmobile.fetch(feed_url)
|
|
if resp.status == "20":
|
|
items.extend(items_from_feed_string(resp.content))
|
|
items.sort(reverse=True)
|
|
items = items[0:64]
|
|
format_aggregated(items, output_file, len(feed_urls))
|
|
|
|
def main():
|
|
aggregate("feeds.txt", "index.gmi")
|
|
|
|
if __name__ == "__main__":
|
|
main()
|