Specify URLs in the config file instead of in the CLI.
4 files changed, 58 insertions(+), 22 deletions(-)

M README.rst
M silorider/commands/process.py
M silorider/commands/utils.py
M silorider/main.py
M README.rst +21 -7
@@ 27,31 27,45 @@ Right now, the following silos are suppo
 Quickstart
 ----------
 
-SiloRider will need to read a configuration file in `INI`_ format. The minimum requirement is to define at least one "silo" using a ``silo:<name>`` section::
+SiloRider will need to read a configuration file in `INI`_ format. The minimum
+requirement is to define at least one "silo" using a ``silo:<name>`` section,
+and to specify the url to one of your personal websites::
 
     [silo:my_mastodon]
     type: mastodon
     url: https://mastodon.social
 
-This defines one Mastodon silo to which you want to post your entries.
+    [urls]
+    my_blog: https://your.website.com
+
+This defines one Mastodon silo to which you want to cross-post entries from
+your blog at ``your.website.com``.
 
 You can then run::
 
     silorider auth my_mastodon 
 
-This command will authenticate your Mastodon account and provide SiloRider with the permission to post to your timeline. The authorization tokens are stored in a cache file that defaults to ``silorider.db``, next to the configuration file. Later, this cache will also contain the list of entries already posted to each silo.
+This command will authenticate your Mastodon account and provide SiloRider with
+the permission to post to your timeline. The authorization tokens are stored in
+a cache file that defaults to ``silorider.db``, next to the configuration file.
+Later, this cache will also contain the list of entries already posted to each
+silo.
 
 Once authenticated, you can run::
 
-    silorider populate https://yourwebsite
+    silorider populate
 
-This will populate the cache with the existing entries, since you probably don't want the first run of SiloRider to cross-post your last dozen or so entries in one go.
+This will populate the cache with the existing entries, since you probably
+don't want the first run of SiloRider to cross-post your last dozen or so
+entries in one go.
 
 Later, when you post something new, you can then run::
 
-    silorider process https://yourwebsite
+    silorider process
 
-This will pick up the new entries and post them to Mastodon. You can run this command again regularly... if there's something new, SiloRider will cross-post it to the configured silos. If not, it will just exit.
+This will pick up the new entries and post them to Mastodon. You can run this
+command again regularly... if there's something new, SiloRider will cross-post
+it to the configured silos. If not, it will just exit.
 
 
 .. _POSSE: https://indieweb.org/POSSE

          
M silorider/commands/process.py +6 -4
@@ 1,5 1,5 @@ 
 import logging
-from .utils import get_named_silos
+from .utils import get_named_silos, get_named_urls
 from ..silos.base import SiloPostingContext
 from ..parse import parse_url
 

          
@@ 7,9 7,11 @@ from ..parse import parse_url
 logger = logging.getLogger(__name__)
 
 
-def process_url(url, ctx):
-    p = Processor(ctx, url)
-    p.process()
+def process_urls(ctx):
+    for url in get_named_urls(ctx.config, ctx.args.url):
+        logger.info("Processing %s" % url)
+        p = Processor(ctx, url)
+        p.process()
 
 
 class Processor:

          
M silorider/commands/utils.py +19 -1
@@ 5,6 5,17 @@ from ..parse import parse_mf2
 logger = logging.getLogger(__name__)
 
 
+def get_named_urls(config, names):
+    named_urls = None
+    if config.has_section('urls'):
+        named_urls = config.items('urls')
+    if not names:
+        return [url for (_, url) in named_urls]
+
+    return [url for (name, url) in named_urls
+            if name in names]
+
+
 def get_named_silos(silos, names):
     if not names:
         return silos

          
@@ 21,7 32,14 @@ def get_named_silos(silos, names):
     return res
 
 
-def populate_cache(url, ctx):
+def populate_cache(ctx):
+    urls = get_named_urls(ctx.config, ctx.args.url)
+    for url in urls:
+        logger.info("Caching entries from %s" % url)
+        _populate_cache_for_url(url, ctx)
+
+
+def _populate_cache_for_url(url, ctx):
     import mf2util
     import dateutil.parser
 

          
M silorider/main.py +12 -10
@@ 42,14 42,13 @@ def _setup_auth(parser):
 
 def _setup_process(parser):
     def _run(ctx):
-        from .commands.process import process_url
-        for url in ctx.args.site_url:
-            process_url(url, ctx)
+        from .commands.process import process_urls
+        process_urls(ctx)
 
     parser.add_argument(
-        'site_url',
+        '-u', '--url',
         action='append',
-        help="URL of the website to read from.")
+        help="Only parse the given URL name(s).")
     parser.add_argument(
         '-s', '--silo',
         action='append',

          
@@ 68,17 67,16 @@ def _setup_process(parser):
 def _setup_populate(parser):
     def _run(ctx):
         from .commands.utils import populate_cache
-        for url in ctx.args.site_url:
-            populate_cache(url, ctx)
+        populate_cache(ctx)
 
     parser.add_argument(
-        'site_url',
+        '-u', '--url',
         action='append',
-        help="URL of the website to read from.")
+        help="Only populate from the given URL name(s).")
     parser.add_argument(
         '-s', '--silo',
         action='append',
-        help="Which silo to populate.")
+        help="Only populate the given silo(s).")
     parser.add_argument(
         '--until',
         help="The date until which to populate the cache (included).")

          
@@ 160,6 158,10 @@ def _unsafe_main(args=None):
         logger.warning("No silos defined in the configuration file. "
                        "Nothing to do!")
         return
+    if not config.has_section('urls') or not config.items('urls'):
+        logger.warning("No URLs defined in the configuration file. "
+                       "Nothing to do!")
+        return
 
     logger.debug("Initializing cache.")
     from .cache.base import load_cache