comparison piecrust/commands/builtin/baking.py @ 698:33ab9badfd7a

render: Change how we store render passes info. Previously we used a dictionary with integers as keys, which doesn't go well with JSON serialization. Now replace with an array of fixed length with items that are `None` by default.
author Ludovic Chabant <ludovic@chabant.com>
date Wed, 23 Mar 2016 16:39:22 -0700
parents c11a4339fccb
children 066d6156525c
comparison
equal deleted inserted replaced
697:9e5393fcfab2 698:33ab9badfd7a
232 232
233 def _showBakeRecord(self, ctx, record_name, pattern, out_pattern): 233 def _showBakeRecord(self, ctx, record_name, pattern, out_pattern):
234 # Show the bake record. 234 # Show the bake record.
235 record_cache = ctx.app.cache.getCache('baker') 235 record_cache = ctx.app.cache.getCache('baker')
236 if not record_cache.has(record_name): 236 if not record_cache.has(record_name):
237 raise Exception("No record has been created for this output path. " 237 logger.warning(
238 "Did you bake there yet?") 238 "No page bake record has been created for this output "
239 "path.")
240 return None
239 241
240 record = BakeRecord.load(record_cache.getCachePath(record_name)) 242 record = BakeRecord.load(record_cache.getCachePath(record_name))
241 logging.info("Bake record for: %s" % record.out_dir) 243 logging.info("Bake record for: %s" % record.out_dir)
242 logging.info("From: %s" % record_name) 244 logging.info("From: %s" % record_name)
243 logging.info("Last baked: %s" % 245 logging.info("Last baked: %s" %
298 logging.info(" URL: %s" % sub.out_uri) 300 logging.info(" URL: %s" % sub.out_uri)
299 logging.info(" path: %s" % os.path.relpath( 301 logging.info(" path: %s" % os.path.relpath(
300 sub.out_path, record.out_dir)) 302 sub.out_path, record.out_dir))
301 logging.info(" flags: %s" % _join(sub_flags)) 303 logging.info(" flags: %s" % _join(sub_flags))
302 304
303 if sub.render_info: 305 pass_names = {
304 pass_names = { 306 PASS_FORMATTING: 'formatting pass',
305 PASS_FORMATTING: 'formatting pass', 307 PASS_RENDERING: 'rendering pass'}
306 PASS_RENDERING: 'rendering pass'} 308 for p, ri in enumerate(sub.render_info):
307 for p, ri in sub.render_info.items(): 309 logging.info(" - %s" % pass_names[p])
308 logging.info(" - %s" % pass_names[p]) 310 if not ri:
309 logging.info(" used sources: %s" % 311 logging.info(" no info")
310 _join(ri.used_source_names)) 312 continue
311 pgn_info = 'no' 313
312 if ri.used_pagination: 314 logging.info(" used sources: %s" %
313 pgn_info = 'yes' 315 _join(ri.used_source_names))
314 if ri.pagination_has_more: 316 pgn_info = 'no'
315 pgn_info += ', has more' 317 if ri.used_pagination:
316 logging.info(" used pagination: %s", pgn_info) 318 pgn_info = 'yes'
317 logging.info(" used assets: %s", 319 if ri.pagination_has_more:
318 'yes' if ri.used_assets else 'no') 320 pgn_info += ', has more'
319 logging.info(" used terms: %s" % 321 logging.info(" used pagination: %s", pgn_info)
320 _join( 322 logging.info(" used assets: %s",
321 ['%s=%s (%s)' % (tn, t, sn) 323 'yes' if ri.used_assets else 'no')
322 for sn, tn, t in 324 logging.info(" used terms: %s" %
323 ri.used_taxonomy_terms])) 325 _join(
324 else: 326 ['%s=%s (%s)' % (tn, t, sn)
325 logging.info(" no render info") 327 for sn, tn, t in
328 ri.used_taxonomy_terms]))
326 329
327 if sub.errors: 330 if sub.errors:
328 logging.error(" errors: %s" % sub.errors) 331 logging.error(" errors: %s" % sub.errors)
329 332
330 return record 333 return record
331 334
332 def _showProcessingRecord(self, ctx, record_name, pattern, out_pattern): 335 def _showProcessingRecord(self, ctx, record_name, pattern, out_pattern):
333 record_cache = ctx.app.cache.getCache('proc') 336 record_cache = ctx.app.cache.getCache('proc')
334 if not record_cache.has(record_name): 337 if not record_cache.has(record_name):
335 raise Exception("No record has been created for this output path. " 338 logger.warning(
336 "Did you bake there yet?") 339 "No asset processing record has been created for this "
340 "output path.")
341 return None
337 342
338 # Show the pipeline record. 343 # Show the pipeline record.
339 record = ProcessorPipelineRecord.load( 344 record = ProcessorPipelineRecord.load(
340 record_cache.getCachePath(record_name)) 345 record_cache.getCachePath(record_name))
341 logging.info("") 346 logging.info("")