Mercurial > silorider
changeset 15:cb1dc5c864d8 0.2.0
Specify URLs in the config file instead of in the CLI.
author | Ludovic Chabant <ludovic@chabant.com> |
---|---|
date | Sun, 29 Jul 2018 23:59:39 -0700 |
parents | c6bae5dcd8d7 |
children | 025e6a5bb5a3 |
files | README.rst silorider/commands/process.py silorider/commands/utils.py silorider/main.py |
diffstat | 4 files changed, 58 insertions(+), 22 deletions(-) [+] |
line wrap: on
line diff
--- a/README.rst Sun Jul 29 23:51:50 2018 -0700 +++ b/README.rst Sun Jul 29 23:59:39 2018 -0700 @@ -27,31 +27,45 @@ Quickstart ---------- -SiloRider will need to read a configuration file in `INI`_ format. The minimum requirement is to define at least one "silo" using a ``silo:<name>`` section:: +SiloRider will need to read a configuration file in `INI`_ format. The minimum +requirement is to define at least one "silo" using a ``silo:<name>`` section, +and to specify the url to one of your personal websites:: [silo:my_mastodon] type: mastodon url: https://mastodon.social -This defines one Mastodon silo to which you want to post your entries. + [urls] + my_blog: https://your.website.com + +This defines one Mastodon silo to which you want to cross-post entries from +your blog at ``your.website.com``. You can then run:: silorider auth my_mastodon -This command will authenticate your Mastodon account and provide SiloRider with the permission to post to your timeline. The authorization tokens are stored in a cache file that defaults to ``silorider.db``, next to the configuration file. Later, this cache will also contain the list of entries already posted to each silo. +This command will authenticate your Mastodon account and provide SiloRider with +the permission to post to your timeline. The authorization tokens are stored in +a cache file that defaults to ``silorider.db``, next to the configuration file. +Later, this cache will also contain the list of entries already posted to each +silo. Once authenticated, you can run:: - silorider populate https://yourwebsite + silorider populate -This will populate the cache with the existing entries, since you probably don't want the first run of SiloRider to cross-post your last dozen or so entries in one go. +This will populate the cache with the existing entries, since you probably +don't want the first run of SiloRider to cross-post your last dozen or so +entries in one go. Later, when you post something new, you can then run:: - silorider process https://yourwebsite + silorider process -This will pick up the new entries and post them to Mastodon. You can run this command again regularly... if there's something new, SiloRider will cross-post it to the configured silos. If not, it will just exit. +This will pick up the new entries and post them to Mastodon. You can run this +command again regularly... if there's something new, SiloRider will cross-post +it to the configured silos. If not, it will just exit. .. _POSSE: https://indieweb.org/POSSE
--- a/silorider/commands/process.py Sun Jul 29 23:51:50 2018 -0700 +++ b/silorider/commands/process.py Sun Jul 29 23:59:39 2018 -0700 @@ -1,5 +1,5 @@ import logging -from .utils import get_named_silos +from .utils import get_named_silos, get_named_urls from ..silos.base import SiloPostingContext from ..parse import parse_url @@ -7,9 +7,11 @@ logger = logging.getLogger(__name__) -def process_url(url, ctx): - p = Processor(ctx, url) - p.process() +def process_urls(ctx): + for url in get_named_urls(ctx.config, ctx.args.url): + logger.info("Processing %s" % url) + p = Processor(ctx, url) + p.process() class Processor:
--- a/silorider/commands/utils.py Sun Jul 29 23:51:50 2018 -0700 +++ b/silorider/commands/utils.py Sun Jul 29 23:59:39 2018 -0700 @@ -5,6 +5,17 @@ logger = logging.getLogger(__name__) +def get_named_urls(config, names): + named_urls = None + if config.has_section('urls'): + named_urls = config.items('urls') + if not names: + return [url for (_, url) in named_urls] + + return [url for (name, url) in named_urls + if name in names] + + def get_named_silos(silos, names): if not names: return silos @@ -21,7 +32,14 @@ return res -def populate_cache(url, ctx): +def populate_cache(ctx): + urls = get_named_urls(ctx.config, ctx.args.url) + for url in urls: + logger.info("Caching entries from %s" % url) + _populate_cache_for_url(url, ctx) + + +def _populate_cache_for_url(url, ctx): import mf2util import dateutil.parser
--- a/silorider/main.py Sun Jul 29 23:51:50 2018 -0700 +++ b/silorider/main.py Sun Jul 29 23:59:39 2018 -0700 @@ -42,14 +42,13 @@ def _setup_process(parser): def _run(ctx): - from .commands.process import process_url - for url in ctx.args.site_url: - process_url(url, ctx) + from .commands.process import process_urls + process_urls(ctx) parser.add_argument( - 'site_url', + '-u', '--url', action='append', - help="URL of the website to read from.") + help="Only parse the given URL name(s).") parser.add_argument( '-s', '--silo', action='append', @@ -68,17 +67,16 @@ def _setup_populate(parser): def _run(ctx): from .commands.utils import populate_cache - for url in ctx.args.site_url: - populate_cache(url, ctx) + populate_cache(ctx) parser.add_argument( - 'site_url', + '-u', '--url', action='append', - help="URL of the website to read from.") + help="Only populate from the given URL name(s).") parser.add_argument( '-s', '--silo', action='append', - help="Which silo to populate.") + help="Only populate the given silo(s).") parser.add_argument( '--until', help="The date until which to populate the cache (included).") @@ -160,6 +158,10 @@ logger.warning("No silos defined in the configuration file. " "Nothing to do!") return + if not config.has_section('urls') or not config.items('urls'): + logger.warning("No URLs defined in the configuration file. " + "Nothing to do!") + return logger.debug("Initializing cache.") from .cache.base import load_cache