f73275493167 draft — Malcolm tip 20 days ago
FIX Switch to curl because reddit started denying these requests, handle failure more gracefully
3 files changed, 16 insertions(+), 3 deletions(-)

M feed2maildir/converter.py
M feed2maildir/reader.py
M scripts/feed2maildir
M feed2maildir/converter.py +5 -2
@@ 109,7 109,7 @@ Content-Type: {}
         newtimes = {}
         for feedname, feed in feeds.items():
             self.output('Processing : {}'.format(feedname))
-            if feed is not None:
+            if feed is not None and feed.entries:
                 try: # to get the update time from the feed itself
                     feedup = self.mktime(feed.feed.updated)
                 except: # there is no info, then find it in the posts

          
@@ 153,7 153,10 @@ Content-Type: {}
                         db[feedname]).astimezone(
                             dateutil.tz.tzutc()).strftime('%Y-%m-%d %H:%M:%S %Z')
 
-            self.output('Feedname : {} : Feedup : {}'.format(feedname, newtimes[feedname]))
+            if feedname in newtimes:
+                self.output('Feedname : {} : Feedup : {}'.format(feedname, newtimes[feedname]))
+            else:
+                self.output('Feedname : {} : Not Updated'.format(feedname))
 
         return (new, newtimes)
 

          
M feed2maildir/reader.py +4 -1
@@ 1,3 1,5 @@ 
+import subprocess
+
 import feedparser
 
 class Reader:

          
@@ 7,7 9,8 @@ class Reader:
         self.feeds = {}
         self.silent = silent
         for feed in feeds:
-            f = feedparser.parse(feeds[feed])
+            subprocess.check_call(['curl', '-s', '-L', '-o', '/tmp/feed2maildir.xml', feeds[feed]])
+            f = feedparser.parse(open('/tmp/feed2maildir.xml', 'rb'))
             if f.bozo:
                 self.output('WARNING: could not parse feed {}'.format(feed))
                 self.output('WARNING: {}'.format(f.bozo_exception))

          
M scripts/feed2maildir +7 -0
@@ 22,6 22,8 @@ def main():
                         help='strip HTML from the feed content')
     parser.add_argument('-l', action='store_true',
                         help='just write the links without the content')
+    parser.add_argument('-f', metavar='<feed>',
+                        help='Limit to a specific feed')
     args = vars(parser.parse_args())
 
     if args['c']:

          
@@ 29,6 31,10 @@ def main():
     else:
         loader = Loader()
     config = loader.config
+
+    if args['f']:
+        config['feeds'] = {args['f']: config['feeds'][args['f']]}
+
     reader = Reader(config['feeds'])
 
     if args['d']:

          
@@ 47,6 53,7 @@ def main():
     converter.load(reader.feeds)
     converter.run()
 
+
 if __name__ == '__main__':
     main()