diff tests/test_processing_base.py @ 120:133845647083

Better error management and removal support in baking/processing. * Baker and processor pipeline now store errors in their records. * They also support deleting output files that are no longer valid. * The basic transitional record class implements more boilerplate code. * The processor pipeline is run from the `bake` command directly. * New unit tests. * Unit test mocking now mocks `os.remove` too.
author Ludovic Chabant <ludovic@chabant.com>
date Sun, 09 Nov 2014 14:46:23 -0800
parents 3471ffa059b2
children e725af1d48fb
line wrap: on
line diff
--- a/tests/test_processing_base.py	Wed Oct 29 08:19:58 2014 -0700
+++ b/tests/test_processing_base.py	Sun Nov 09 14:46:23 2014 -0800
@@ -1,11 +1,41 @@
+import time
 import os.path
+import shutil
 import pytest
-from piecrust.processing.base import ProcessorPipeline
+from piecrust.processing.base import (ProcessorPipeline, SimpleFileProcessor)
+from piecrust.processing.records import ProcessorPipelineRecord
 from .mockutil import mock_fs, mock_fs_scope
 
 
-def _get_pipeline(fs, **kwargs):
-    app = fs.getApp(cache=False)
+class FooProcessor(SimpleFileProcessor):
+    def __init__(self, exts=None, open_func=None):
+        exts = exts or {'foo', 'foo'}
+        super(FooProcessor, self).__init__({exts[0]: exts[1]})
+        self.PROCESSOR_NAME = exts[0]
+        self.open_func = open_func or open
+
+    def _doProcess(self, in_path, out_path):
+        with self.open_func(in_path, 'r') as f:
+            text = f.read()
+        with self.open_func(out_path, 'w') as f:
+            f.write("%s: %s" % (self.PROCESSOR_NAME.upper(), text))
+        return True
+
+
+class NoopProcessor(SimpleFileProcessor):
+    def __init__(self, exts):
+        super(NoopProcessor, self).__init__({exts[0]: exts[1]})
+        self.PROCESSOR_NAME = exts[0]
+        self.processed = []
+
+    def _doProcess(self, in_path, out_path):
+        self.processed.append(in_path)
+        shutil.copyfile(in_path, out_path)
+        return True
+
+
+def _get_pipeline(fs, cache=True, **kwargs):
+    app = fs.getApp(cache=cache)
     mounts = [os.path.join(app.root_dir, 'assets')]
     return ProcessorPipeline(app, mounts, fs.path('counter'),
             num_workers=1, **kwargs)
@@ -36,6 +66,97 @@
         assert expected == fs.getStructure('counter')
 
 
+def test_one_level_dirtyness():
+    fs = (mock_fs()
+            .withFile('kitchen/assets/blah.foo', 'A test file.'))
+    with mock_fs_scope(fs):
+        pp = _get_pipeline(fs)
+        pp.filterProcessors(['copy'])
+        pp.run()
+        expected = {'blah.foo': 'A test file.'}
+        assert expected == fs.getStructure('counter')
+        mtime = os.path.getmtime(fs.path('/counter/blah.foo'))
+        assert abs(time.time() - mtime) <= 2
+
+        pp.run()
+        assert expected == fs.getStructure('counter')
+        assert mtime == os.path.getmtime(fs.path('/counter/blah.foo'))
+
+        fs.withFile('kitchen/assets/blah.foo', 'A new test file.')
+        pp.run()
+        expected = {'blah.foo': 'A new test file.'}
+        assert expected == fs.getStructure('counter')
+        assert mtime < os.path.getmtime(fs.path('/counter/blah.foo'))
+
+
+def test_two_levels_dirtyness():
+    fs = (mock_fs()
+            .withFile('kitchen/assets/blah.foo', 'A test file.'))
+    with mock_fs_scope(fs) as scope:
+        pp = _get_pipeline(fs)
+        pp.processors.append(FooProcessor(('foo', 'bar'), scope._open))
+        pp.filterProcessors(['foo', 'copy'])
+        pp.run()
+        expected = {'blah.bar': 'FOO: A test file.'}
+        assert expected == fs.getStructure('counter')
+        mtime = os.path.getmtime(fs.path('/counter/blah.bar'))
+        assert abs(time.time() - mtime) <= 2
+
+        pp.run()
+        assert expected == fs.getStructure('counter')
+        assert mtime == os.path.getmtime(fs.path('/counter/blah.bar'))
+
+        fs.withFile('kitchen/assets/blah.foo', 'A new test file.')
+        pp.run()
+        expected = {'blah.bar': 'FOO: A new test file.'}
+        assert expected == fs.getStructure('counter')
+        assert mtime < os.path.getmtime(fs.path('/counter/blah.bar'))
+
+
+def test_removed():
+    fs = (mock_fs()
+            .withFile('kitchen/assets/blah1.foo', 'A test file.')
+            .withFile('kitchen/assets/blah2.foo', 'Ooops'))
+    with mock_fs_scope(fs):
+        expected = {
+                'blah1.foo': 'A test file.',
+                'blah2.foo': 'Ooops'}
+        assert expected == fs.getStructure('kitchen/assets')
+        pp = _get_pipeline(fs)
+        pp.filterProcessors(['copy'])
+        pp.run()
+        assert expected == fs.getStructure('counter')
+
+        os.remove(fs.path('/kitchen/assets/blah2.foo'))
+        expected = {
+                'blah1.foo': 'A test file.'}
+        assert expected == fs.getStructure('kitchen/assets')
+        pp.run()
+        assert expected == fs.getStructure('counter')
+
+
+def test_record_version_change():
+    fs = (mock_fs()
+            .withFile('kitchen/assets/blah.foo', 'A test file.'))
+    with mock_fs_scope(fs):
+        pp = _get_pipeline(fs)
+        noop = NoopProcessor(('foo', 'foo'))
+        pp.processors.append(noop)
+        pp.filterProcessors(['foo', 'copy'])
+        pp.run()
+        assert 1 == len(noop.processed)
+
+        pp.run()
+        assert 1 == len(noop.processed)
+
+        ProcessorPipelineRecord.RECORD_VERSION += 1
+        try:
+            pp.run()
+            assert 2 == len(noop.processed)
+        finally:
+            ProcessorPipelineRecord.RECORD_VERSION -= 1
+
+
 @pytest.mark.parametrize('patterns, expected', [
         (['_'],
             {'something.html': 'A test file.'}),