Mercurial > piecrust2
comparison tests/test_processing_base.py @ 414:c4b3a7fd2f87
bake: Make pipeline processing multi-process.
Not many changes here, as it's pretty straightforward, but an API change for
processors so they know if they're being initialized/disposed from the main
process or from one of the workers. This makes it possible to do global stuff
that has side-effects (e.g. create a directory) vs. doing in-memory stuff.
author | Ludovic Chabant <ludovic@chabant.com> |
---|---|
date | Sat, 20 Jun 2015 19:20:30 -0700 |
parents | e7b865f8f335 |
children | d90ccdf18156 |
comparison
equal
deleted
inserted
replaced
413:eacf0a3afd0c | 414:c4b3a7fd2f87 |
---|---|
1 import time | 1 import time |
2 import os.path | 2 import os.path |
3 import shutil | 3 import shutil |
4 import pytest | 4 import pytest |
5 from piecrust.processing.base import (ProcessorPipeline, SimpleFileProcessor) | 5 from piecrust.processing.base import SimpleFileProcessor |
6 from piecrust.processing.pipeline import ProcessorPipeline | |
6 from piecrust.processing.records import ProcessorPipelineRecord | 7 from piecrust.processing.records import ProcessorPipelineRecord |
8 from piecrust.processing.worker import get_filtered_processors | |
7 from .mockutil import mock_fs, mock_fs_scope | 9 from .mockutil import mock_fs, mock_fs_scope |
8 | 10 |
9 | 11 |
10 class FooProcessor(SimpleFileProcessor): | 12 class FooProcessor(SimpleFileProcessor): |
11 def __init__(self, exts=None, open_func=None): | 13 def __init__(self, exts=None, open_func=None): |
34 return True | 36 return True |
35 | 37 |
36 | 38 |
37 def _get_pipeline(fs, app=None): | 39 def _get_pipeline(fs, app=None): |
38 app = app or fs.getApp() | 40 app = app or fs.getApp() |
39 app.config.set('baker/num_workers', 1) | |
40 return ProcessorPipeline(app, fs.path('counter')) | 41 return ProcessorPipeline(app, fs.path('counter')) |
41 | 42 |
42 | 43 |
43 def test_empty(): | 44 def test_empty(): |
44 fs = mock_fs() | 45 fs = mock_fs() |
45 with mock_fs_scope(fs): | 46 with mock_fs_scope(fs): |
46 pp = _get_pipeline(fs) | 47 pp = _get_pipeline(fs) |
47 pp.filterProcessors(['copy']) | 48 pp.enabled_processors = ['copy'] |
48 expected = {} | 49 expected = {} |
49 assert expected == fs.getStructure('counter') | 50 assert expected == fs.getStructure('counter') |
50 pp.run() | 51 pp.run() |
51 expected = {} | 52 expected = {} |
52 assert expected == fs.getStructure('counter') | 53 assert expected == fs.getStructure('counter') |
55 def test_one_file(): | 56 def test_one_file(): |
56 fs = (mock_fs() | 57 fs = (mock_fs() |
57 .withFile('kitchen/assets/something.html', 'A test file.')) | 58 .withFile('kitchen/assets/something.html', 'A test file.')) |
58 with mock_fs_scope(fs): | 59 with mock_fs_scope(fs): |
59 pp = _get_pipeline(fs) | 60 pp = _get_pipeline(fs) |
60 pp.filterProcessors(['copy']) | 61 pp.enabled_processors = ['copy'] |
61 expected = {} | 62 expected = {} |
62 assert expected == fs.getStructure('counter') | 63 assert expected == fs.getStructure('counter') |
63 pp.run() | 64 pp.run() |
64 expected = {'something.html': 'A test file.'} | 65 expected = {'something.html': 'A test file.'} |
65 assert expected == fs.getStructure('counter') | 66 assert expected == fs.getStructure('counter') |
68 def test_one_level_dirtyness(): | 69 def test_one_level_dirtyness(): |
69 fs = (mock_fs() | 70 fs = (mock_fs() |
70 .withFile('kitchen/assets/blah.foo', 'A test file.')) | 71 .withFile('kitchen/assets/blah.foo', 'A test file.')) |
71 with mock_fs_scope(fs): | 72 with mock_fs_scope(fs): |
72 pp = _get_pipeline(fs) | 73 pp = _get_pipeline(fs) |
73 pp.filterProcessors(['copy']) | 74 pp.enabled_processors = ['copy'] |
74 pp.run() | 75 pp.run() |
75 expected = {'blah.foo': 'A test file.'} | 76 expected = {'blah.foo': 'A test file.'} |
76 assert expected == fs.getStructure('counter') | 77 assert expected == fs.getStructure('counter') |
77 mtime = os.path.getmtime(fs.path('/counter/blah.foo')) | 78 mtime = os.path.getmtime(fs.path('/counter/blah.foo')) |
78 assert abs(time.time() - mtime) <= 2 | 79 assert abs(time.time() - mtime) <= 2 |
91 | 92 |
92 | 93 |
93 def test_two_levels_dirtyness(): | 94 def test_two_levels_dirtyness(): |
94 fs = (mock_fs() | 95 fs = (mock_fs() |
95 .withFile('kitchen/assets/blah.foo', 'A test file.')) | 96 .withFile('kitchen/assets/blah.foo', 'A test file.')) |
96 with mock_fs_scope(fs) as scope: | 97 with mock_fs_scope(fs): |
97 pp = _get_pipeline(fs) | 98 pp = _get_pipeline(fs) |
98 pp.processors.append(FooProcessor(('foo', 'bar'), scope._open)) | 99 pp.enabled_processors = ['copy'] |
99 pp.filterProcessors(['foo', 'copy']) | 100 pp.additional_processors = [FooProcessor(('foo', 'bar'))] |
100 pp.run() | 101 pp.run() |
101 expected = {'blah.bar': 'FOO: A test file.'} | 102 expected = {'blah.bar': 'FOO: A test file.'} |
102 assert expected == fs.getStructure('counter') | 103 assert expected == fs.getStructure('counter') |
103 mtime = os.path.getmtime(fs.path('/counter/blah.bar')) | 104 mtime = os.path.getmtime(fs.path('/counter/blah.bar')) |
104 assert abs(time.time() - mtime) <= 2 | 105 assert abs(time.time() - mtime) <= 2 |
124 expected = { | 125 expected = { |
125 'blah1.foo': 'A test file.', | 126 'blah1.foo': 'A test file.', |
126 'blah2.foo': 'Ooops'} | 127 'blah2.foo': 'Ooops'} |
127 assert expected == fs.getStructure('kitchen/assets') | 128 assert expected == fs.getStructure('kitchen/assets') |
128 pp = _get_pipeline(fs) | 129 pp = _get_pipeline(fs) |
129 pp.filterProcessors(['copy']) | 130 pp.enabled_processors = ['copy'] |
130 pp.run() | 131 pp.run() |
131 assert expected == fs.getStructure('counter') | 132 assert expected == fs.getStructure('counter') |
132 | 133 |
133 time.sleep(1) | 134 time.sleep(1) |
134 os.remove(fs.path('/kitchen/assets/blah2.foo')) | 135 os.remove(fs.path('/kitchen/assets/blah2.foo')) |
143 fs = (mock_fs() | 144 fs = (mock_fs() |
144 .withFile('kitchen/assets/blah.foo', 'A test file.')) | 145 .withFile('kitchen/assets/blah.foo', 'A test file.')) |
145 with mock_fs_scope(fs): | 146 with mock_fs_scope(fs): |
146 pp = _get_pipeline(fs) | 147 pp = _get_pipeline(fs) |
147 noop = NoopProcessor(('foo', 'foo')) | 148 noop = NoopProcessor(('foo', 'foo')) |
148 pp.processors.append(noop) | 149 pp.enabled_processors = ['copy'] |
149 pp.filterProcessors(['foo', 'copy']) | 150 pp.additional_processors = [noop] |
150 pp.run() | 151 pp.run() |
151 assert 1 == len(noop.processed) | 152 assert os.path.exists(fs.path('/counter/blah.foo')) is True |
152 | 153 mtime = os.path.getmtime(fs.path('/counter/blah.foo')) |
153 pp.run() | 154 |
154 assert 1 == len(noop.processed) | 155 time.sleep(1) |
155 | 156 pp.run() |
157 assert mtime == os.path.getmtime(fs.path('/counter/blah.foo')) | |
158 | |
159 time.sleep(1) | |
156 ProcessorPipelineRecord.RECORD_VERSION += 1 | 160 ProcessorPipelineRecord.RECORD_VERSION += 1 |
157 try: | 161 try: |
158 pp.run() | 162 pp.run() |
159 assert 2 == len(noop.processed) | 163 assert mtime < os.path.getmtime(fs.path('/counter/blah.foo')) |
160 finally: | 164 finally: |
161 ProcessorPipelineRecord.RECORD_VERSION -= 1 | 165 ProcessorPipelineRecord.RECORD_VERSION -= 1 |
162 | 166 |
163 | 167 |
164 @pytest.mark.parametrize('patterns, expected', [ | 168 @pytest.mark.parametrize('patterns, expected', [ |
168 {}), | 172 {}), |
169 (['/^_/'], | 173 (['/^_/'], |
170 {'something.html': 'A test file.', | 174 {'something.html': 'A test file.', |
171 'foo': {'_important.html': 'Important!'}}) | 175 'foo': {'_important.html': 'Important!'}}) |
172 ]) | 176 ]) |
173 def test_skip_pattern(patterns, expected): | 177 def test_ignore_pattern(patterns, expected): |
174 fs = (mock_fs() | 178 fs = (mock_fs() |
175 .withFile('kitchen/assets/something.html', 'A test file.') | 179 .withFile('kitchen/assets/something.html', 'A test file.') |
176 .withFile('kitchen/assets/_hidden.html', 'Shhh') | 180 .withFile('kitchen/assets/_hidden.html', 'Shhh') |
177 .withFile('kitchen/assets/foo/_important.html', 'Important!')) | 181 .withFile('kitchen/assets/foo/_important.html', 'Important!')) |
178 with mock_fs_scope(fs): | 182 with mock_fs_scope(fs): |
179 pp = _get_pipeline(fs) | 183 pp = _get_pipeline(fs) |
180 pp.addSkipPatterns(patterns) | 184 pp.addIgnorePatterns(patterns) |
181 pp.filterProcessors(['copy']) | 185 pp.enabled_processors = ['copy'] |
182 assert {} == fs.getStructure('counter') | 186 assert {} == fs.getStructure('counter') |
183 pp.run() | 187 pp.run() |
184 assert expected == fs.getStructure('counter') | 188 assert expected == fs.getStructure('counter') |
185 | 189 |
186 | 190 |
187 @pytest.mark.parametrize('names, expected', [ | 191 @pytest.mark.parametrize('names, expected', [ |
188 ('all', ['copy', 'concat', 'less', 'sass', 'sitemap']), | 192 ('all', ['cleancss', 'compass', 'copy', 'concat', 'less', 'requirejs', |
189 ('all -sitemap', ['copy', 'concat', 'less', 'sass']), | 193 'sass', 'sitemap', 'uglifyjs']), |
190 ('-sitemap -less -sass all', ['copy', 'concat']), | 194 ('all -sitemap', ['cleancss', 'copy', 'compass', 'concat', 'less', |
195 'requirejs', 'sass', 'uglifyjs']), | |
196 ('-sitemap -less -sass all', ['cleancss', 'copy', 'compass', 'concat', | |
197 'requirejs', 'uglifyjs']), | |
191 ('copy', ['copy']), | 198 ('copy', ['copy']), |
192 ('less sass', ['less', 'sass']) | 199 ('less sass', ['less', 'sass']) |
193 ]) | 200 ]) |
194 def test_filter_processor(names, expected): | 201 def test_filter_processor(names, expected): |
195 fs = mock_fs() | 202 fs = mock_fs() |
196 with mock_fs_scope(fs): | 203 with mock_fs_scope(fs): |
197 app = fs.getApp() | 204 app = fs.getApp() |
198 pp = _get_pipeline(fs, app=app) | 205 processors = app.plugin_loader.getProcessors() |
199 pp.filterProcessors('copy concat less sass sitemap') | 206 procs = get_filtered_processors(processors, names) |
200 procs = pp.getFilteredProcessors(names) | |
201 actual = [p.PROCESSOR_NAME for p in procs] | 207 actual = [p.PROCESSOR_NAME for p in procs] |
202 assert sorted(actual) == sorted(expected) | 208 assert sorted(actual) == sorted(expected) |
203 | 209 |