Mercurial > piecrust2
comparison piecrust/commands/builtin/baking.py @ 415:0e9a94b7fdfa
bake: Improve bake record information.
* Store things in the bake record that require less interaction between the
master process and the workers. For instance, don't store the paginator
object in the render pass info -- instead, just store whether pagination
was used, and whether it had more items.
* Simplify information passing between workers and bake passes by saving the
rendering info to the JSON cache. This means the "render first sub" job
doesn't have to return anything except errors now.
* Add more performance counter info.
author | Ludovic Chabant <ludovic@chabant.com> |
---|---|
date | Sat, 20 Jun 2015 19:23:16 -0700 |
parents | e7b865f8f335 |
children | 4a43d7015b75 |
comparison
equal
deleted
inserted
replaced
414:c4b3a7fd2f87 | 415:0e9a94b7fdfa |
---|---|
7 from piecrust.baking.baker import Baker | 7 from piecrust.baking.baker import Baker |
8 from piecrust.baking.records import ( | 8 from piecrust.baking.records import ( |
9 BakeRecord, BakeRecordEntry, SubPageBakeInfo) | 9 BakeRecord, BakeRecordEntry, SubPageBakeInfo) |
10 from piecrust.chefutil import format_timed | 10 from piecrust.chefutil import format_timed |
11 from piecrust.commands.base import ChefCommand | 11 from piecrust.commands.base import ChefCommand |
12 from piecrust.processing.base import ProcessorPipeline | 12 from piecrust.processing.pipeline import ProcessorPipeline |
13 from piecrust.processing.records import ( | 13 from piecrust.processing.records import ( |
14 ProcessorPipelineRecord, | 14 ProcessorPipelineRecord, |
15 FLAG_PREPARED, FLAG_PROCESSED, FLAG_OVERRIDEN, | 15 FLAG_PREPARED, FLAG_PROCESSED, |
16 FLAG_BYPASSED_STRUCTURED_PROCESSING) | 16 FLAG_BYPASSED_STRUCTURED_PROCESSING) |
17 from piecrust.rendering import PASS_FORMATTING, PASS_RENDERING | 17 from piecrust.rendering import ( |
18 PASS_FORMATTING, PASS_RENDERING) | |
18 | 19 |
19 | 20 |
20 logger = logging.getLogger(__name__) | 21 logger = logging.getLogger(__name__) |
21 | 22 |
22 | 23 |
43 '--assets-only', | 44 '--assets-only', |
44 help="Only bake the assets (don't bake the web pages).", | 45 help="Only bake the assets (don't bake the web pages).", |
45 action='store_true') | 46 action='store_true') |
46 parser.add_argument( | 47 parser.add_argument( |
47 '--html-only', | 48 '--html-only', |
48 help="Only bake HTML files (don't run the asset pipeline).", | 49 help="Only bake the pages (don't run the asset pipeline).", |
49 action='store_true') | 50 action='store_true') |
50 parser.add_argument( | 51 parser.add_argument( |
51 '--show-timers', | 52 '--show-timers', |
52 help="Show detailed timing information.", | 53 help="Show detailed timing information.", |
53 action='store_true') | 54 action='store_true') |
55 def run(self, ctx): | 56 def run(self, ctx): |
56 out_dir = (ctx.args.output or | 57 out_dir = (ctx.args.output or |
57 os.path.join(ctx.app.root_dir, '_counter')) | 58 os.path.join(ctx.app.root_dir, '_counter')) |
58 | 59 |
59 success = True | 60 success = True |
61 ctx.timers = {} | |
60 start_time = time.perf_counter() | 62 start_time = time.perf_counter() |
61 try: | 63 try: |
62 # Bake the site sources. | 64 # Bake the site sources. |
63 if not ctx.args.assets_only: | 65 if not ctx.args.assets_only: |
64 success = success & self._bakeSources(ctx, out_dir) | 66 success = success & self._bakeSources(ctx, out_dir) |
65 | 67 |
66 # Bake the assets. | 68 # Bake the assets. |
67 if not ctx.args.html_only: | 69 if not ctx.args.html_only: |
68 success = success & self._bakeAssets(ctx, out_dir) | 70 success = success & self._bakeAssets(ctx, out_dir) |
71 | |
72 # Show merged timers. | |
73 if ctx.args.show_timers: | |
74 from colorama import Fore | |
75 logger.info("-------------------") | |
76 logger.info("Timing information:") | |
77 for name in sorted(ctx.timers.keys()): | |
78 val_str = '%8.1f s' % ctx.timers[name] | |
79 logger.info( | |
80 "[%s%s%s] %s" % | |
81 (Fore.GREEN, val_str, Fore.RESET, name)) | |
69 | 82 |
70 # All done. | 83 # All done. |
71 logger.info('-------------------------') | 84 logger.info('-------------------------') |
72 logger.info(format_timed(start_time, 'done baking')) | 85 logger.info(format_timed(start_time, 'done baking')) |
73 return 0 if success else 1 | 86 return 0 if success else 1 |
83 ctx.app.config.set('baker/workers', ctx.args.workers) | 96 ctx.app.config.set('baker/workers', ctx.args.workers) |
84 baker = Baker( | 97 baker = Baker( |
85 ctx.app, out_dir, | 98 ctx.app, out_dir, |
86 force=ctx.args.force) | 99 force=ctx.args.force) |
87 record = baker.bake() | 100 record = baker.bake() |
88 | 101 _merge_timers(record.timers, ctx.timers) |
89 if ctx.args.show_timers: | |
90 if record.timers: | |
91 from colorama import Fore | |
92 logger.info("-------------------") | |
93 logger.info("Timing information:") | |
94 for name in sorted(record.timers.keys()): | |
95 val_str = '%8.1f s' % record.timers[name] | |
96 logger.info( | |
97 "[%s%s%s] %s" % | |
98 (Fore.GREEN, val_str, Fore.RESET, name)) | |
99 else: | |
100 logger.warning("Timing information is not available.") | |
101 | |
102 return record.success | 102 return record.success |
103 | 103 |
104 def _bakeAssets(self, ctx, out_dir): | 104 def _bakeAssets(self, ctx, out_dir): |
105 proc = ProcessorPipeline( | 105 proc = ProcessorPipeline( |
106 ctx.app, out_dir, | 106 ctx.app, out_dir, |
107 force=ctx.args.force) | 107 force=ctx.args.force) |
108 record = proc.run() | 108 record = proc.run() |
109 _merge_timers(record.timers, ctx.timers) | |
109 return record.success | 110 return record.success |
111 | |
112 | |
113 def _merge_timers(source, target): | |
114 if source is None: | |
115 return | |
116 | |
117 for name, val in source.items(): | |
118 if name not in target: | |
119 target[name] = 0 | |
120 target[name] += val | |
110 | 121 |
111 | 122 |
112 class ShowRecordCommand(ChefCommand): | 123 class ShowRecordCommand(ChefCommand): |
113 def __init__(self): | 124 def __init__(self): |
114 super(ShowRecordCommand, self).__init__() | 125 super(ShowRecordCommand, self).__init__() |
133 parser.add_argument( | 144 parser.add_argument( |
134 '--last', | 145 '--last', |
135 type=int, | 146 type=int, |
136 default=0, | 147 default=0, |
137 help="Show the last Nth bake record.") | 148 help="Show the last Nth bake record.") |
149 parser.add_argument( | |
150 '--html-only', | |
151 action='store_true', | |
152 help="Only show records for pages (not from the asset " | |
153 "pipeline).") | |
154 parser.add_argument( | |
155 '--assets-only', | |
156 action='store_true', | |
157 help="Only show records for assets (not from pages).") | |
138 | 158 |
139 def run(self, ctx): | 159 def run(self, ctx): |
140 out_dir = ctx.args.output or os.path.join(ctx.app.root_dir, '_counter') | 160 out_dir = ctx.args.output or os.path.join(ctx.app.root_dir, '_counter') |
141 record_id = hashlib.md5(out_dir.encode('utf8')).hexdigest() | 161 record_id = hashlib.md5(out_dir.encode('utf8')).hexdigest() |
142 suffix = '' if ctx.args.last == 0 else '.%d' % ctx.args.last | 162 suffix = '' if ctx.args.last == 0 else '.%d' % ctx.args.last |
148 | 168 |
149 out_pattern = None | 169 out_pattern = None |
150 if ctx.args.out: | 170 if ctx.args.out: |
151 out_pattern = '*%s*' % ctx.args.out.strip('*') | 171 out_pattern = '*%s*' % ctx.args.out.strip('*') |
152 | 172 |
173 if not ctx.args.assets_only: | |
174 self._showBakeRecord(ctx, record_name, pattern, out_pattern) | |
175 if not ctx.args.html_only: | |
176 self._showProcessingRecord(ctx, record_name, pattern, out_pattern) | |
177 | |
178 def _showBakeRecord(self, ctx, record_name, pattern, out_pattern): | |
179 # Show the bake record. | |
153 record_cache = ctx.app.cache.getCache('baker') | 180 record_cache = ctx.app.cache.getCache('baker') |
154 if not record_cache.has(record_name): | 181 if not record_cache.has(record_name): |
155 raise Exception("No record has been created for this output path. " | 182 raise Exception("No record has been created for this output path. " |
156 "Did you bake there yet?") | 183 "Did you bake there yet?") |
157 | 184 |
158 # Show the bake record. | |
159 record = BakeRecord.load(record_cache.getCachePath(record_name)) | 185 record = BakeRecord.load(record_cache.getCachePath(record_name)) |
160 logging.info("Bake record for: %s" % record.out_dir) | 186 logging.info("Bake record for: %s" % record.out_dir) |
161 logging.info("From: %s" % record_name) | 187 logging.info("From: %s" % record_name) |
162 logging.info("Last baked: %s" % | 188 logging.info("Last baked: %s" % |
163 datetime.datetime.fromtimestamp(record.bake_time)) | 189 datetime.datetime.fromtimestamp(record.bake_time)) |
165 logging.info("Status: success") | 191 logging.info("Status: success") |
166 else: | 192 else: |
167 logging.error("Status: failed") | 193 logging.error("Status: failed") |
168 logging.info("Entries:") | 194 logging.info("Entries:") |
169 for entry in record.entries: | 195 for entry in record.entries: |
170 if pattern and not fnmatch.fnmatch(entry.rel_path, pattern): | 196 if pattern and not fnmatch.fnmatch(entry.path, pattern): |
171 continue | 197 continue |
172 if out_pattern and not ( | 198 if out_pattern and not ( |
173 any([o for o in entry.out_paths | 199 any([o for o in entry.out_paths |
174 if fnmatch.fnmatch(o, out_pattern)])): | 200 if fnmatch.fnmatch(o, out_pattern)])): |
175 continue | 201 continue |
176 | 202 |
177 flags = [] | 203 flags = _get_flag_descriptions( |
178 if entry.flags & BakeRecordEntry.FLAG_OVERRIDEN: | 204 entry.flags, |
179 flags.append('overriden') | 205 { |
180 | 206 BakeRecordEntry.FLAG_NEW: 'new', |
181 passes = {PASS_RENDERING: 'render', PASS_FORMATTING: 'format'} | 207 BakeRecordEntry.FLAG_SOURCE_MODIFIED: 'modified', |
208 BakeRecordEntry.FLAG_OVERRIDEN: 'overriden'}) | |
182 | 209 |
183 logging.info(" - ") | 210 logging.info(" - ") |
184 logging.info(" path: %s" % entry.rel_path) | 211 |
185 logging.info(" spec: %s:%s" % (entry.source_name, | 212 rel_path = os.path.relpath(entry.path, ctx.app.root_dir) |
186 entry.rel_path)) | 213 logging.info(" path: %s" % rel_path) |
214 logging.info(" source: %s" % entry.source_name) | |
187 if entry.taxonomy_info: | 215 if entry.taxonomy_info: |
188 tax_name, term, source_name = entry.taxonomy_info | 216 ti = entry.taxonomy_info |
189 logging.info(" taxonomy: %s (%s:%s)" % | 217 logging.info(" taxonomy: %s = %s (in %s)" % |
190 (term, source_name, tax_name)) | 218 (ti.taxonomy_name, ti.term, ti.source_name)) |
191 else: | 219 else: |
192 logging.info(" taxonomy: <none>") | 220 logging.info(" taxonomy: <none>") |
193 logging.info(" flags: %s" % ', '.join(flags)) | 221 logging.info(" flags: %s" % _join(flags)) |
194 logging.info(" config: %s" % entry.config) | 222 logging.info(" config: %s" % entry.config) |
223 | |
224 if entry.errors: | |
225 logging.error(" errors: %s" % entry.errors) | |
195 | 226 |
196 logging.info(" %d sub-pages:" % len(entry.subs)) | 227 logging.info(" %d sub-pages:" % len(entry.subs)) |
197 for sub in entry.subs: | 228 for sub in entry.subs: |
229 sub_flags = _get_flag_descriptions( | |
230 sub.flags, | |
231 { | |
232 SubPageBakeInfo.FLAG_BAKED: 'baked', | |
233 SubPageBakeInfo.FLAG_FORCED_BY_SOURCE: | |
234 'forced by source', | |
235 SubPageBakeInfo.FLAG_FORCED_BY_NO_PREVIOUS: | |
236 'forced by missing previous record entry', | |
237 SubPageBakeInfo.FLAG_FORCED_BY_PREVIOUS_ERRORS: | |
238 'forced by previous errors', | |
239 SubPageBakeInfo.FLAG_FORMATTING_INVALIDATED: | |
240 'formatting invalidated'}) | |
241 | |
198 logging.info(" - ") | 242 logging.info(" - ") |
199 logging.info(" URL: %s" % sub.out_uri) | 243 logging.info(" URL: %s" % sub.out_uri) |
200 logging.info(" path: %s" % os.path.relpath(sub.out_path, | 244 logging.info(" path: %s" % os.path.relpath( |
201 out_dir)) | 245 sub.out_path, record.out_dir)) |
202 logging.info(" baked?: %s" % sub.was_baked) | 246 logging.info(" flags: %s" % _join(sub_flags)) |
203 | 247 |
204 sub_flags = [] | 248 if sub.render_info: |
205 if sub.flags & SubPageBakeInfo.FLAG_FORCED_BY_SOURCE: | 249 pass_names = { |
206 sub_flags.append('forced by source') | 250 PASS_FORMATTING: 'formatting pass', |
207 if sub.flags & SubPageBakeInfo.FLAG_FORCED_BY_NO_PREVIOUS: | 251 PASS_RENDERING: 'rendering pass'} |
208 sub_flags.append('forced by missing previous record entry') | 252 for p, ri in sub.render_info.items(): |
209 if sub.flags & SubPageBakeInfo.FLAG_FORCED_BY_PREVIOUS_ERRORS: | 253 logging.info(" - %s" % pass_names[p]) |
210 sub_flags.append('forced by previous errors') | 254 logging.info(" used sources: %s" % |
211 logging.info(" flags: %s" % ', '.join(sub_flags)) | 255 _join(ri.used_source_names)) |
212 | 256 pgn_info = 'no' |
213 for p, pi in sub.render_passes.items(): | 257 if ri.used_pagination: |
214 logging.info(" %s pass:" % passes[p]) | 258 pgn_info = 'yes' |
215 logging.info(" used srcs: %s" % | 259 if ri.pagination_has_more: |
216 ', '.join(pi.used_source_names)) | 260 pgn_info += ', has more' |
217 logging.info(" used terms: %s" % | 261 logging.info(" used pagination: %s", pgn_info) |
218 ', '.join( | 262 logging.info(" used assets: %s", |
219 ['%s (%s:%s)' % (t, sn, tn) | 263 'yes' if ri.used_assets else 'no') |
220 for sn, tn, t in | 264 logging.info(" used terms: %s" % |
221 pi.used_taxonomy_terms])) | 265 _join( |
266 ['%s=%s (%s)' % (tn, t, sn) | |
267 for sn, tn, t in | |
268 ri.used_taxonomy_terms])) | |
269 else: | |
270 logging.info(" no render info") | |
222 | 271 |
223 if sub.errors: | 272 if sub.errors: |
224 logging.error(" errors: %s" % sub.errors) | 273 logging.error(" errors: %s" % sub.errors) |
225 | 274 |
226 logging.info(" assets: %s" % ', '.join(entry.assets)) | 275 def _showProcessingRecord(self, ctx, record_name, pattern, out_pattern): |
227 if entry.errors: | |
228 logging.error(" errors: %s" % entry.errors) | |
229 | |
230 record_cache = ctx.app.cache.getCache('proc') | 276 record_cache = ctx.app.cache.getCache('proc') |
231 if not record_cache.has(record_name): | 277 if not record_cache.has(record_name): |
232 return | 278 raise Exception("No record has been created for this output path. " |
279 "Did you bake there yet?") | |
233 | 280 |
234 # Show the pipeline record. | 281 # Show the pipeline record. |
235 record = ProcessorPipelineRecord.load( | 282 record = ProcessorPipelineRecord.load( |
236 record_cache.getCachePath(record_name)) | 283 record_cache.getCachePath(record_name)) |
237 logging.info("") | 284 logging.info("") |
242 logging.info("Status: success") | 289 logging.info("Status: success") |
243 else: | 290 else: |
244 logging.error("Status: failed") | 291 logging.error("Status: failed") |
245 logging.info("Entries:") | 292 logging.info("Entries:") |
246 for entry in record.entries: | 293 for entry in record.entries: |
247 if pattern and not fnmatch.fnmatch(entry.rel_input, pattern): | 294 rel_path = os.path.relpath(entry.path, ctx.app.root_dir) |
295 if pattern and not fnmatch.fnmatch(rel_path, pattern): | |
248 continue | 296 continue |
249 if out_pattern and not ( | 297 if out_pattern and not ( |
250 any([o for o in entry.rel_outputs | 298 any([o for o in entry.rel_outputs |
251 if fnmatch.fnmatch(o, out_pattern)])): | 299 if fnmatch.fnmatch(o, out_pattern)])): |
252 continue | 300 continue |
253 | 301 |
254 flags = [] | 302 flags = _get_flag_descriptions( |
255 if entry.flags & FLAG_PREPARED: | 303 entry.flags, |
256 flags.append('prepared') | 304 { |
257 if entry.flags & FLAG_PROCESSED: | 305 FLAG_PREPARED: 'prepared', |
258 flags.append('processed') | 306 FLAG_PROCESSED: 'processed', |
259 if entry.flags & FLAG_OVERRIDEN: | 307 FLAG_BYPASSED_STRUCTURED_PROCESSING: 'external'}) |
260 flags.append('overriden') | 308 |
261 if entry.flags & FLAG_BYPASSED_STRUCTURED_PROCESSING: | |
262 flags.append('external') | |
263 logger.info(" - ") | 309 logger.info(" - ") |
264 logger.info(" path: %s" % entry.rel_input) | 310 logger.info(" path: %s" % rel_path) |
265 logger.info(" out paths: %s" % entry.rel_outputs) | 311 logger.info(" out paths: %s" % entry.rel_outputs) |
266 logger.info(" flags: %s" % flags) | 312 logger.info(" flags: %s" % _join(flags)) |
267 logger.info(" proc tree: %s" % format_proc_tree( | 313 logger.info(" proc tree: %s" % _format_proc_tree( |
268 entry.proc_tree, 14*' ')) | 314 entry.proc_tree, 14*' ')) |
315 | |
269 if entry.errors: | 316 if entry.errors: |
270 logger.error(" errors: %s" % entry.errors) | 317 logger.error(" errors: %s" % entry.errors) |
271 | 318 |
272 | 319 |
273 def format_proc_tree(tree, margin='', level=0): | 320 def _join(items, sep=', ', text_if_none='none'): |
321 if items: | |
322 return sep.join(items) | |
323 return text_if_none | |
324 | |
325 | |
326 def _get_flag_descriptions(flags, descriptions): | |
327 res = [] | |
328 for k, v in descriptions.items(): | |
329 if flags & k: | |
330 res.append(v) | |
331 return res | |
332 | |
333 | |
334 def _format_proc_tree(tree, margin='', level=0): | |
274 name, children = tree | 335 name, children = tree |
275 res = '%s%s+ %s\n' % (margin if level > 0 else '', level * ' ', name) | 336 res = '%s%s+ %s\n' % (margin if level > 0 else '', level * ' ', name) |
276 if children: | 337 if children: |
277 for c in children: | 338 for c in children: |
278 res += format_proc_tree(c, margin, level + 1) | 339 res += _format_proc_tree(c, margin, level + 1) |
279 return res | 340 return res |
280 | 341 |