changeset 205:e725af1d48fb

bake: Changes in how assets directories are configured. Change `skip_patterns` and `force_patterns` to `ignore` and `force`. Put less responsibility on the `bake` command to specify all those settings, and more on the `Baker` and `ProcessorPipeline` themselves. Add some tests.
author Ludovic Chabant <ludovic@chabant.com>
date Sun, 18 Jan 2015 12:12:57 -0800
parents f98451237371
children cba781477bd0
files piecrust/baking/baker.py piecrust/commands/builtin/baking.py piecrust/processing/base.py piecrust/serving.py tests/test_processing_base.py
diffstat 5 files changed, 57 insertions(+), 75 deletions(-) [+]
line wrap: on
line diff
--- a/piecrust/baking/baker.py	Sun Jan 18 11:53:18 2015 -0800
+++ b/piecrust/baking/baker.py	Sun Jan 18 12:12:57 2015 -0800
@@ -18,14 +18,12 @@
 
 
 class Baker(object):
-    def __init__(self, app, out_dir, force=False,
-                 no_assets=False, num_workers=4):
+    def __init__(self, app, out_dir, force=False):
         assert app and out_dir
         self.app = app
         self.out_dir = out_dir
         self.force = force
-        self.no_assets = no_assets
-        self.num_workers = num_workers
+        self.num_workers = app.config.get('baker/workers', 4)
 
         # Remember what taxonomy pages we should skip
         # (we'll bake them repeatedly later with each taxonomy term)
--- a/piecrust/commands/builtin/baking.py	Sun Jan 18 11:53:18 2015 -0800
+++ b/piecrust/commands/builtin/baking.py	Sun Jan 18 12:12:57 2015 -0800
@@ -61,29 +61,15 @@
             return 1
 
     def _bakeSources(self, ctx, out_dir):
-        num_workers = ctx.app.config.get('baker/workers') or 4
         baker = Baker(
                 ctx.app, out_dir,
-                force=ctx.args.force,
-                no_assets=ctx.args.no_assets,
-                num_workers=num_workers)
+                force=ctx.args.force)
         baker.bake()
 
     def _bakeAssets(self, ctx, out_dir):
-        baker_params = ctx.app.config.get('baker') or {}
-        mounts = (baker_params.get('assets_dirs') or
-                  ctx.app.assets_dirs)
-        skip_patterns = (baker_params.get('ignore') or
-                         baker_params.get('skip_patterns'))
-        force_patterns = (baker_params.get('force') or
-                          baker_params.get('force_patterns'))
-        num_workers = ctx.app.config.get('baker/workers') or 4
         proc = ProcessorPipeline(
-                ctx.app, mounts, out_dir,
-                force=ctx.args.force,
-                skip_patterns=skip_patterns,
-                force_patterns=force_patterns,
-                num_workers=num_workers)
+                ctx.app, out_dir,
+                force=ctx.args.force)
         proc.run()
 
 
--- a/piecrust/processing/base.py	Sun Jan 18 11:53:18 2015 -0800
+++ b/piecrust/processing/base.py	Sun Jan 18 12:12:57 2015 -0800
@@ -113,45 +113,51 @@
 
 
 class ProcessorPipeline(object):
-    def __init__(self, app, mounts, out_dir, force=False,
-            skip_patterns=None, force_patterns=None, num_workers=4):
+    def __init__(self, app, out_dir, force=False):
         assert app and out_dir
         self.app = app
-        self.mounts = mounts
+        self.out_dir = out_dir
+        self.force = force
+
         tmp_dir = app.cache_dir
         if not tmp_dir:
             import tempfile
             tmp_dir = os.path.join(tempfile.gettempdir(), 'piecrust')
         self.tmp_dir = os.path.join(tmp_dir, 'proc')
-        self.out_dir = out_dir
-        self.force = force
-        self.skip_patterns = skip_patterns or []
-        self.force_patterns = force_patterns or []
-        self.processors = app.plugin_loader.getProcessors()
-        self.num_workers = num_workers
+
+        baker_params = app.config.get('baker') or {}
+
+        assets_dirs = baker_params.get('assets_dirs', app.assets_dirs)
+        self.mounts = make_mount_info(assets_dirs)
 
-        self.mounts = make_mount_info(self.mounts)
+        self.num_workers = baker_params.get('workers', 4)
 
-        self.skip_patterns += ['_cache', '_counter',
+        ignores = baker_params.get('ignore', [])
+        ignores += [
+                '_cache', '_counter',
                 'theme_info.yml',
                 '.DS_Store', 'Thumbs.db',
                 '.git*', '.hg*', '.svn']
+        self.skip_patterns = make_re(ignores)
+        self.force_patterns = make_re(baker_params.get('force', []))
 
-        self.skip_patterns = make_re(self.skip_patterns)
-        self.force_patterns = make_re(self.force_patterns)
+        self.processors = app.plugin_loader.getProcessors()
 
     def addSkipPatterns(self, patterns):
         self.skip_patterns += make_re(patterns)
 
     def filterProcessors(self, authorized_names):
-        if not authorized_names or authorized_names == '*':
+        self.processors = self.getFilteredProcessors(authorized_names)
+
+    def getFilteredProcessors(self, authorized_names):
+        if not authorized_names or authorized_names == 'all':
             return self.processors
 
         if isinstance(authorized_names, str):
             authorized_names = split_processor_names_re.split(authorized_names)
 
         procs = []
-        has_star = '*' in authorized_names
+        has_star = 'all' in authorized_names
         for p in self.processors:
             for name in authorized_names:
                 if name == p.PROCESSOR_NAME:
@@ -164,10 +170,6 @@
                     procs.append(p)
         return procs
 
-        return list(filter(
-            lambda p: p.PROCESSOR_NAME in authorized_names,
-            self.processors))
-
     def run(self, src_dir_or_file=None, *,
             new_only=False, delete=True,
             previous_record=None, save_record=True):
@@ -346,7 +348,8 @@
                     '%s [not baked, overridden]' % rel_path))
             return
 
-        processors = pipeline.filterProcessors(job.mount_info['processors'])
+        processors = pipeline.getFilteredProcessors(
+                job.mount_info['processors'])
         try:
             builder = ProcessingTreeBuilder(processors)
             tree_root = builder.build(rel_path)
@@ -390,7 +393,7 @@
         if not isinstance(info, dict):
             raise Exception("Asset directory info for '%s' is not a "
                             "dictionary." % name)
-        info.setdefault('processors', '*')
+        info.setdefault('processors', 'all -uglifyjs -cleancss')
 
     return mounts
 
--- a/piecrust/serving.py	Sun Jan 18 11:53:18 2015 -0800
+++ b/piecrust/serving.py	Sun Jan 18 12:12:57 2015 -0800
@@ -63,8 +63,6 @@
         self.static_preview = static_preview
         self.synchronous_asset_pipeline = synchronous_asset_pipeline
         self._out_dir = None
-        self._skip_patterns = None
-        self._force_patterns = None
         self._asset_record = None
         self._page_record = None
         self._mimetype_map = load_mimetype_map()
@@ -73,14 +71,8 @@
         # Bake all the assets so we know what we have, and so we can serve
         # them to the client. We need a temp app for this.
         app = PieCrust(root_dir=self.root_dir, debug=self.debug)
-        mounts = app.assets_dirs
         self._out_dir = os.path.join(app.cache_dir, 'server')
-        self._skip_patterns = app.config.get('baker/skip_patterns')
-        self._force_patterns = app.config.get('baker/force_patterns')
-        pipeline = ProcessorPipeline(
-                app, mounts, self._out_dir,
-                skip_patterns=self._skip_patterns,
-                force_patterns=self._force_patterns)
+        pipeline = ProcessorPipeline(app, self._out_dir)
         self._asset_record = pipeline.run()
         self._page_record = ServeRecord()
 
@@ -184,17 +176,12 @@
         # Yep, we know about this URL because we processed an asset that
         # maps to it... make sure it's up to date by re-processing it
         # before serving.
-        mounts = app.assets_dirs
         asset_in_path = entry.path
         asset_out_path = os.path.join(self._out_dir, rel_req_path)
 
         if self.synchronous_asset_pipeline:
             logger.debug("Making sure '%s' is up-to-date." % asset_in_path)
-            pipeline = ProcessorPipeline(
-                    app, mounts, self._out_dir,
-                    skip_patterns=self._skip_patterns,
-                    force_patterns=self._force_patterns,
-                    num_workers=1)
+            pipeline = ProcessorPipeline(app, self._out_dir)
             r = pipeline.run(asset_in_path, delete=False, save_record=False,
                              previous_record=self._asset_record)
             assert len(r.entries) == 1
@@ -204,11 +191,7 @@
 
     def _try_serve_new_asset(self, app, environ, request):
         logger.debug("Searching for a new asset with path: %s" % request.path)
-        mounts = app.assets_dirs
-        pipeline = ProcessorPipeline(
-                app, mounts, self._out_dir,
-                skip_patterns=self._skip_patterns,
-                force_patterns=self._force_patterns)
+        pipeline = ProcessorPipeline(app, self._out_dir)
         r = pipeline.run(new_only=True, delete=False, save_record=False,
                          previous_record=self._asset_record)
         for e in r.entries:
--- a/tests/test_processing_base.py	Sun Jan 18 11:53:18 2015 -0800
+++ b/tests/test_processing_base.py	Sun Jan 18 12:12:57 2015 -0800
@@ -34,11 +34,10 @@
         return True
 
 
-def _get_pipeline(fs, cache=True, **kwargs):
-    app = fs.getApp(cache=cache)
-    mounts = [os.path.join(app.root_dir, 'assets')]
-    return ProcessorPipeline(app, mounts, fs.path('counter'),
-            num_workers=1, **kwargs)
+def _get_pipeline(fs, app=None):
+    app = app or fs.getApp()
+    app.config.set('baker/num_workers', 1)
+    return ProcessorPipeline(app, fs.path('counter'))
 
 
 def test_empty():
@@ -172,15 +171,28 @@
             .withFile('kitchen/assets/_hidden.html', 'Shhh')
             .withFile('kitchen/assets/foo/_important.html', 'Important!'))
     with mock_fs_scope(fs):
-        pp = _get_pipeline(fs, skip_patterns=['/^_/'])
+        pp = _get_pipeline(fs)
+        pp.addSkipPatterns(patterns)
         pp.filterProcessors(['copy'])
-        expected = {}
-        assert expected == fs.getStructure('counter')
+        assert {} == fs.getStructure('counter')
         pp.run()
-        expected = {
-                'something.html': 'A test file.',
-                'foo': {
-                    '_important.html': 'Important!'}
-                }
         assert expected == fs.getStructure('counter')
 
+
+@pytest.mark.parametrize('names, expected', [
+        ('all', ['copy', 'concat', 'less', 'sass', 'sitemap']),
+        ('all -sitemap', ['copy', 'concat', 'less', 'sass']),
+        ('-sitemap -less -sass all', ['copy', 'concat']),
+        ('copy', ['copy']),
+        ('less sass', ['less', 'sass'])
+    ])
+def test_filter_processor(names, expected):
+    fs = mock_fs()
+    with mock_fs_scope(fs):
+        app = fs.getApp()
+        pp = _get_pipeline(fs, app=app)
+        pp.filterProcessors('copy concat less sass sitemap')
+        procs = pp.getFilteredProcessors(names)
+        actual = [p.PROCESSOR_NAME for p in procs]
+        assert sorted(actual) == sorted(expected)
+