summaryrefslogtreecommitdiffstats
path: root/.bin/nwsflux
diff options
context:
space:
mode:
authorRomain Gonçalves <me@rgoncalves.se>2021-12-23 18:28:03 +0000
committerRomain Gonçalves <me@rgoncalves.se>2021-12-23 18:28:03 +0000
commit0f08d04698c814955116b6bae50752e64b774d8f (patch)
tree8cf9a33557093eebfd25aab2872e97639c7e2f62 /.bin/nwsflux
downloaddots-0f08d04698c814955116b6bae50752e64b774d8f.tar.gz
Thu Dec 23 06:28:03 PM UTC 2021
Diffstat (limited to '.bin/nwsflux')
-rwxr-xr-x.bin/nwsflux192
1 files changed, 192 insertions, 0 deletions
diff --git a/.bin/nwsflux b/.bin/nwsflux
new file mode 100755
index 0000000..3e725b0
--- /dev/null
+++ b/.bin/nwsflux
@@ -0,0 +1,192 @@
+#!/usr/bin/env python3
+#
+# nwsflux
+
+import miniflux
+import urllib3
+import logging
+import os
+import re
+import shlex
+import argparse
+
+
+def get_local_feed(line: str) -> dict:
+ """
+ Parse a line and return a constructed dict like miniflux's API.
+ """
+ feed_url = shlex.split(line)[0]
+ title = shlex.split(line)[-1]
+ tag = shlex.split(line)[1]
+ if tag == title:
+ tag = 'all'
+
+ return {
+ 'feed_url': feed_url,
+ 'title': title,
+ 'category': {
+ 'title': tag
+ }
+ }
+
+
+def get_local_feeds(filename: str) -> list:
+ """
+ Read feeds from a newsboat url file.
+ """
+
+ with open(filename, 'r') as f:
+ content = f.readlines()
+
+ r = re.compile('^http.*://')
+ content = list(filter(r.match, content))
+
+ return [get_local_feed(line) for line in content]
+
+
+def delete_categories(client: miniflux.Client, local_cats: dict, remote_cats: dict):
+ """
+ Remove remote categories that are absent in local file.
+
+ . Useless for now, miniflux's API return non-empty categories only!
+ """
+ for remote in remote_cats:
+ if any(local['title'] == remote['title'] for local in local_cats):
+ continue
+ try:
+ logging.info(f'remove category: {remote["title"]}')
+ client.delete_category(remote['id'])
+ # ignores categories that are empty on remote
+ except miniflux.ClientError as e:
+ logging.error('can not remove non-empty category:'
+ f'{remote["title"]} {e}')
+
+
+def create_categories(client: miniflux.Client, local_cats: dict, remote_cats: dict):
+ """
+ Create categories present in local file and absent on remote.
+ """
+ for local in local_cats:
+ if any(remote['title'] == local['title'] for remote in remote_cats):
+ continue
+ try:
+ logging.info(f'create category: {local["title"]}')
+ client.create_category(local['title'])
+ # ignores categories that are empty on remote
+ except miniflux.ClientError as e:
+ logging.error(f'remote category empty: {local["title"]} {e}')
+
+
+def sync_categories(client: miniflux.Client, local_feeds: dict, remote_feeds: dict) -> dict:
+ local_cats = get_uniq_categories(local_feeds)
+ remote_cats = get_uniq_categories(remote_feeds)
+
+ delete_categories(client, local_cats, remote_cats)
+ create_categories(client, local_cats, remote_cats)
+ return client.get_categories()
+
+
+def get_uniq_categories(buffer: list) -> list:
+ return list(map(dict, frozenset(
+ frozenset(x['category'].items()) for x in buffer
+ )))
+
+
+def delete_feeds(client: miniflux.Client, local_feeds: dict, remote_feeds: dict):
+ """
+ Remove remote feeds that are absent in local file.
+ """
+ for remote in remote_feeds:
+ if any(local['feed_url'] == remote['feed_url'] for local in local_feeds):
+ continue
+ logging.info(f'remove feed: {remote["feed_url"]}')
+ client.delete_feed(remote['id'])
+
+
+def create_feeds(client: miniflux.Client, local_feeds: dict, remote_feeds: dict):
+ """
+ Create feeds that are present in local file and absent on remote.
+ """
+ categories = client.get_categories()
+
+ for local in local_feeds:
+ if any(remote['feed_url'] == local['feed_url'] for remote in remote_feeds):
+ continue
+ logging.info(f'create feed: {local}')
+ category_id = next((x['id'] for x in categories if x['title'] == local['category']['title']), None)
+ try:
+ client.create_feed(local['feed_url'], category_id)
+ except miniflux.ClientError as e:
+ logging.error(e)
+
+
+def sync_feeds(client: miniflux.Client, local_feeds: dict, remote_feeds: dict) -> dict:
+ """
+ Synchronize all given feeds.
+ """
+ delete_feeds(client, local_feeds, remote_feeds)
+ create_feeds(client, local_feeds, remote_feeds)
+ return client.get_feeds()
+
+
+def parse():
+ """
+ Parse command-line arguments.
+ """
+ parser = argparse.ArgumentParser(description='Synchronize RSS feeds from'
+ 'newsboat to an miniflux instance.')
+
+ parser.add_argument('-c', dest='config', type=str, required=True,
+ help='Newsboat url file')
+ parser.add_argument('-k', dest='key', type=str, required=True,
+ help='Miniflux API key')
+ parser.add_argument('-u', dest='url', type=str, required=True,
+ help='Miniflux url')
+ parser.add_argument('-d', dest='debug', action='store_true',
+ help='Enable debugging output')
+ parser.add_argument('-e', dest='export', action='store_true',
+ help='Export Miniflux OPML config to stdout')
+
+ return parser.parse_args()
+
+
+def main():
+
+ """
+ SSL verify.
+ """
+ os.environ["CURL_CA_BUNDLE"] = ""
+ urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning)
+
+ """
+ Arguments.
+ """
+ args = parse()
+
+ """
+ Debugging.
+ """
+ logging.basicConfig(level=logging.INFO)
+ if not args.debug:
+ logging.level = logging.NOTSET
+
+ """
+ Synchronization.
+ """
+ client = miniflux.Client(args.url, api_key=args.key)
+
+ local_feeds = get_local_feeds(args.config)
+ remote_feeds = client.get_feeds()
+
+ sync_categories(client, local_feeds, remote_feeds)
+ sync_feeds(client, local_feeds, remote_feeds)
+
+ """
+ Export Miniflux to OPML.
+ """
+ if args.export:
+ print(client.export_feeds())
+
+
+if __name__ == '__main__':
+ main()
remember that computers suck.