diff tests/test_processing_base.py @ 414:c4b3a7fd2f87

bake: Make pipeline processing multi-process. Not many changes here, as it's pretty straightforward, but an API change for processors so they know if they're being initialized/disposed from the main process or from one of the workers. This makes it possible to do global stuff that has side-effects (e.g. create a directory) vs. doing in-memory stuff.
author Ludovic Chabant <ludovic@chabant.com>
date Sat, 20 Jun 2015 19:20:30 -0700
parents e7b865f8f335
children d90ccdf18156
line wrap: on
line diff
--- a/tests/test_processing_base.py	Sat Jun 20 19:16:38 2015 -0700
+++ b/tests/test_processing_base.py	Sat Jun 20 19:20:30 2015 -0700
@@ -2,8 +2,10 @@
 import os.path
 import shutil
 import pytest
-from piecrust.processing.base import (ProcessorPipeline, SimpleFileProcessor)
+from piecrust.processing.base import SimpleFileProcessor
+from piecrust.processing.pipeline import ProcessorPipeline
 from piecrust.processing.records import ProcessorPipelineRecord
+from piecrust.processing.worker import get_filtered_processors
 from .mockutil import mock_fs, mock_fs_scope
 
 
@@ -36,7 +38,6 @@
 
 def _get_pipeline(fs, app=None):
     app = app or fs.getApp()
-    app.config.set('baker/num_workers', 1)
     return ProcessorPipeline(app, fs.path('counter'))
 
 
@@ -44,7 +45,7 @@
     fs = mock_fs()
     with mock_fs_scope(fs):
         pp = _get_pipeline(fs)
-        pp.filterProcessors(['copy'])
+        pp.enabled_processors = ['copy']
         expected = {}
         assert expected == fs.getStructure('counter')
         pp.run()
@@ -57,7 +58,7 @@
             .withFile('kitchen/assets/something.html', 'A test file.'))
     with mock_fs_scope(fs):
         pp = _get_pipeline(fs)
-        pp.filterProcessors(['copy'])
+        pp.enabled_processors = ['copy']
         expected = {}
         assert expected == fs.getStructure('counter')
         pp.run()
@@ -70,7 +71,7 @@
             .withFile('kitchen/assets/blah.foo', 'A test file.'))
     with mock_fs_scope(fs):
         pp = _get_pipeline(fs)
-        pp.filterProcessors(['copy'])
+        pp.enabled_processors = ['copy']
         pp.run()
         expected = {'blah.foo': 'A test file.'}
         assert expected == fs.getStructure('counter')
@@ -93,10 +94,10 @@
 def test_two_levels_dirtyness():
     fs = (mock_fs()
             .withFile('kitchen/assets/blah.foo', 'A test file.'))
-    with mock_fs_scope(fs) as scope:
+    with mock_fs_scope(fs):
         pp = _get_pipeline(fs)
-        pp.processors.append(FooProcessor(('foo', 'bar'), scope._open))
-        pp.filterProcessors(['foo', 'copy'])
+        pp.enabled_processors = ['copy']
+        pp.additional_processors = [FooProcessor(('foo', 'bar'))]
         pp.run()
         expected = {'blah.bar': 'FOO: A test file.'}
         assert expected == fs.getStructure('counter')
@@ -126,7 +127,7 @@
                 'blah2.foo': 'Ooops'}
         assert expected == fs.getStructure('kitchen/assets')
         pp = _get_pipeline(fs)
-        pp.filterProcessors(['copy'])
+        pp.enabled_processors = ['copy']
         pp.run()
         assert expected == fs.getStructure('counter')
 
@@ -145,18 +146,21 @@
     with mock_fs_scope(fs):
         pp = _get_pipeline(fs)
         noop = NoopProcessor(('foo', 'foo'))
-        pp.processors.append(noop)
-        pp.filterProcessors(['foo', 'copy'])
+        pp.enabled_processors = ['copy']
+        pp.additional_processors = [noop]
         pp.run()
-        assert 1 == len(noop.processed)
+        assert os.path.exists(fs.path('/counter/blah.foo')) is True
+        mtime = os.path.getmtime(fs.path('/counter/blah.foo'))
 
+        time.sleep(1)
         pp.run()
-        assert 1 == len(noop.processed)
+        assert mtime == os.path.getmtime(fs.path('/counter/blah.foo'))
 
+        time.sleep(1)
         ProcessorPipelineRecord.RECORD_VERSION += 1
         try:
             pp.run()
-            assert 2 == len(noop.processed)
+            assert mtime < os.path.getmtime(fs.path('/counter/blah.foo'))
         finally:
             ProcessorPipelineRecord.RECORD_VERSION -= 1
 
@@ -170,24 +174,27 @@
             {'something.html': 'A test file.',
                 'foo': {'_important.html': 'Important!'}})
         ])
-def test_skip_pattern(patterns, expected):
+def test_ignore_pattern(patterns, expected):
     fs = (mock_fs()
             .withFile('kitchen/assets/something.html', 'A test file.')
             .withFile('kitchen/assets/_hidden.html', 'Shhh')
             .withFile('kitchen/assets/foo/_important.html', 'Important!'))
     with mock_fs_scope(fs):
         pp = _get_pipeline(fs)
-        pp.addSkipPatterns(patterns)
-        pp.filterProcessors(['copy'])
+        pp.addIgnorePatterns(patterns)
+        pp.enabled_processors = ['copy']
         assert {} == fs.getStructure('counter')
         pp.run()
         assert expected == fs.getStructure('counter')
 
 
 @pytest.mark.parametrize('names, expected', [
-        ('all', ['copy', 'concat', 'less', 'sass', 'sitemap']),
-        ('all -sitemap', ['copy', 'concat', 'less', 'sass']),
-        ('-sitemap -less -sass all', ['copy', 'concat']),
+        ('all', ['cleancss', 'compass', 'copy', 'concat', 'less', 'requirejs',
+                 'sass', 'sitemap', 'uglifyjs']),
+        ('all -sitemap', ['cleancss', 'copy', 'compass', 'concat', 'less',
+                          'requirejs', 'sass', 'uglifyjs']),
+        ('-sitemap -less -sass all', ['cleancss', 'copy', 'compass', 'concat',
+                                      'requirejs', 'uglifyjs']),
         ('copy', ['copy']),
         ('less sass', ['less', 'sass'])
     ])
@@ -195,9 +202,8 @@
     fs = mock_fs()
     with mock_fs_scope(fs):
         app = fs.getApp()
-        pp = _get_pipeline(fs, app=app)
-        pp.filterProcessors('copy concat less sass sitemap')
-        procs = pp.getFilteredProcessors(names)
+        processors = app.plugin_loader.getProcessors()
+        procs = get_filtered_processors(processors, names)
         actual = [p.PROCESSOR_NAME for p in procs]
         assert sorted(actual) == sorted(expected)