Mercurial > piecrust2
changeset 338:938be93215cb
bake: Improve render context and bake record, fix incremental bake bugs.
* Used sources and taxonomies are now stored on a per-render-pass basis.
This fixes bugs where sources/taxonomies were used for one pass, but that
pass is skipped on a later bake because its result is cached.
* Bake records are now created for all pages even when they're not baked.
Record collapsing is gone except for taxonomy index pages.
* Bake records now also have sub-entries in order to store information about
each sub-page, since some sub-pages could use sources/taxonomies differently
than others, or be missing from the output. This lets PieCrust handle
clean/dirty states on a sub-page level.
author | Ludovic Chabant <ludovic@chabant.com> |
---|---|
date | Mon, 06 Apr 2015 19:59:54 -0700 |
parents | 49408002798e |
children | 0ab712eab0fb |
files | piecrust/baking/baker.py piecrust/baking/records.py piecrust/baking/scheduler.py piecrust/baking/single.py piecrust/commands/builtin/baking.py piecrust/records.py piecrust/rendering.py piecrust/routing.py piecrust/serving.py piecrust/templating/jinjaengine.py |
diffstat | 10 files changed, 353 insertions(+), 214 deletions(-) [+] |
line wrap: on
line diff
--- a/piecrust/baking/baker.py Sat Apr 04 07:55:49 2015 -0700 +++ b/piecrust/baking/baker.py Mon Apr 06 19:59:54 2015 -0700 @@ -103,7 +103,6 @@ t = time.clock() record.current.bake_time = time.time() record.current.out_dir = self.out_dir - record.collapseRecords() record.saveCurrent(record_cache.getCachePath(record_name)) logger.debug(format_timed(t, 'saved bake record', colored=False)) @@ -194,17 +193,6 @@ def _bakeTaxonomies(self, record): logger.debug("Baking taxonomies") - class _TaxonomyTermsInfo(object): - def __init__(self): - self.dirty_terms = set() - self.all_terms = set() - - def __str__(self): - return 'dirty:%s, all:%s' % (self.dirty_terms, self.all_terms) - - def __repr__(self): - return 'dirty:%s, all:%s' % (self.dirty_terms, self.all_terms) - # Let's see all the taxonomy terms for which we must bake a # listing page... first, pre-populate our big map of used terms. # For each source name, we have a list of taxonomies, and for each @@ -222,62 +210,46 @@ # Now see which ones are 'dirty' based on our bake record. logger.debug("Gathering dirty taxonomy terms") for prev_entry, cur_entry in record.transitions.values(): + # Re-bake all taxonomy pages that include new or changed + # pages. + if cur_entry and cur_entry.was_any_sub_baked: + entries = [cur_entry] + if prev_entry: + entries.append(prev_entry) + + for tax in self.app.taxonomies: + changed_terms = set() + for e in entries: + terms = e.config.get(tax.setting_name) + if terms: + if not tax.is_multiple: + terms = [terms] + changed_terms |= set(terms) + + if len(changed_terms) > 0: + tt_info = buckets[cur_entry.source_name][tax.name] + tt_info.dirty_terms |= changed_terms + + # Remember all terms used. for tax in self.app.taxonomies: - # Re-bake all taxonomy pages that include new or changed - # pages. - if cur_entry and cur_entry.was_baked_successfully: - if prev_entry and prev_entry.was_baked_successfully: - # Entry was re-baked this time. Mark as dirty both the - # old and new terms. - changed_terms = [] - prev_terms = prev_entry.config.get(tax.setting_name) - cur_terms = cur_entry.config.get(tax.setting_name) - if tax.is_multiple: - if prev_terms is not None: - changed_terms += prev_terms - if cur_terms is not None: - changed_terms += cur_terms - else: - if prev_terms is not None: - changed_terms.append(prev_terms) - if cur_terms is not None: - changed_terms.append(cur_terms) - else: - # Entry was not baked last time. Just mark as dirty - # all the new terms. - changed_terms = cur_entry.config.get(tax.setting_name) - - if changed_terms is not None: - if not isinstance(changed_terms, list): - changed_terms = [changed_terms] - tt_info = buckets[cur_entry.source_name][tax.name] - tt_info.dirty_terms |= set(changed_terms) - - # Remember all terms used. - if cur_entry and cur_entry.was_baked_successfully: + if cur_entry and not cur_entry.was_overriden: cur_terms = cur_entry.config.get(tax.setting_name) - if cur_terms is not None: - if not isinstance(cur_terms, list): + if cur_terms: + if not tax.is_multiple: cur_terms = [cur_terms] tt_info = buckets[cur_entry.source_name][tax.name] tt_info.all_terms |= set(cur_terms) - elif (prev_entry and prev_entry.was_baked_successfully and - cur_entry and not cur_entry.was_baked): - prev_terms = prev_entry.config.get(tax.setting_name) - if prev_terms is not None: - if not isinstance(prev_terms, list): - prev_terms = [prev_terms] - tt_info = buckets[prev_entry.source_name][tax.name] - tt_info.all_terms |= set(prev_terms) # Re-bake the combination pages for terms that are 'dirty'. known_combinations = set() logger.debug("Gathering dirty term combinations") for prev_entry, cur_entry in record.transitions.values(): - if cur_entry and cur_entry.was_baked_successfully: - known_combinations |= cur_entry.used_taxonomy_terms - elif prev_entry: - known_combinations |= prev_entry.used_taxonomy_terms + if not cur_entry: + continue + used_taxonomy_terms = cur_entry.getAllUsedTaxonomyTerms() + for sn, tn, terms in used_taxonomy_terms: + if isinstance(terms, tuple): + known_combinations.add((sn, tn, terms)) for sn, tn, terms in known_combinations: tt_info = buckets[sn][tn] tt_info.all_terms.add(terms) @@ -341,10 +313,7 @@ if tt in tt_info.all_terms: logger.debug("Creating unbaked entry for taxonomy " "term '%s:%s'." % (tn, tt)) - entry = BakeRecordPageEntry( - prev_entry.source_name, prev_entry.rel_path, - prev_entry.path, prev_entry.taxonomy_info) - record.addEntry(entry) + record.collapseEntry(prev_entry) else: logger.debug("Taxonomy term '%s:%s' isn't used anymore." % (tn, tt)) @@ -471,14 +440,20 @@ entry.errors.append(str(ex)) ex = ex.__cause__ - if entry.errors: - for e in entry.errors: - logger.error(e) + has_error = False + for e in entry.getAllErrors(): + has_error = True + logger.error(e) + if has_error: return False - if entry.was_baked_successfully: - uri = entry.out_uris[0] - friendly_uri = uri if uri != '' else '[main page]' + if entry.was_any_sub_baked: + first_sub = entry.subs[0] + + friendly_uri = first_sub.out_uri + if friendly_uri == '': + friendly_uri = '[main page]' + friendly_count = '' if entry.num_subs > 1: friendly_count = ' (%d pages)' % entry.num_subs @@ -488,3 +463,14 @@ return True + +class _TaxonomyTermsInfo(object): + def __init__(self): + self.dirty_terms = set() + self.all_terms = set() + + def __str__(self): + return 'dirty:%s, all:%s' % (self.dirty_terms, self.all_terms) + + def __repr__(self): + return 'dirty:%s, all:%s' % (self.dirty_terms, self.all_terms)
--- a/piecrust/baking/records.py Sat Apr 04 07:55:49 2015 -0700 +++ b/piecrust/baking/records.py Mon Apr 06 19:59:54 2015 -0700 @@ -1,3 +1,4 @@ +import copy import os.path import logging from piecrust.records import Record, TransitionalRecord @@ -19,7 +20,7 @@ class BakeRecord(Record): - RECORD_VERSION = 11 + RECORD_VERSION = 12 def __init__(self): super(BakeRecord, self).__init__() @@ -28,10 +29,43 @@ self.success = True -FLAG_NONE = 0 -FLAG_SOURCE_MODIFIED = 2**0 -FLAG_OVERRIDEN = 2**1 -FLAG_FORCED_BY_SOURCE = 2**2 +class BakeRecordPassInfo(object): + def __init__(self): + self.used_source_names = set() + self.used_taxonomy_terms = set() + + +class BakeRecordSubPageEntry(object): + FLAG_NONE = 0 + FLAG_BAKED = 2**0 + FLAG_FORCED_BY_SOURCE = 2**1 + FLAG_FORCED_BY_NO_PREVIOUS = 2**2 + FLAG_FORCED_BY_PREVIOUS_ERRORS = 2**3 + FLAG_FORMATTING_INVALIDATED = 2**4 + + def __init__(self, out_uri, out_path): + self.out_uri = out_uri + self.out_path = out_path + self.flags = self.FLAG_NONE + self.errors = [] + self.render_passes = {} + + @property + def was_clean(self): + return (self.flags & self.FLAG_BAKED) == 0 and len(self.errors) == 0 + + @property + def was_baked(self): + return (self.flags & self.FLAG_BAKED) != 0 + + @property + def was_baked_successfully(self): + return self.was_baked and len(self.errors) == 0 + + def collapseRenderPasses(self, other): + for p, pinfo in self.render_passes.items(): + if p not in other.render_passes: + other.render_passes[p] = copy.deepcopy(pinfo) class BakeRecordPageEntry(object): @@ -40,49 +74,73 @@ The `taxonomy_info` attribute should be a tuple of the form: (taxonomy name, term, source name) """ + FLAG_NONE = 0 + FLAG_NEW = 2**0 + FLAG_SOURCE_MODIFIED = 2**1 + FLAG_OVERRIDEN = 2**2 + def __init__(self, source_name, rel_path, path, taxonomy_info=None): self.source_name = source_name self.rel_path = rel_path self.path = path self.taxonomy_info = taxonomy_info - - self.flags = FLAG_NONE + self.flags = self.FLAG_NONE self.config = None - self.errors = [] - self.out_uris = [] - self.out_paths = [] - self.clean_uris = [] - self.clean_out_paths = [] - self.used_source_names = set() - self.used_taxonomy_terms = set() - self.used_pagination_item_count = 0 + self.subs = [] + self.assets = [] @property def path_mtime(self): return os.path.getmtime(self.path) @property - def was_baked(self): - return len(self.out_paths) > 0 or len(self.errors) > 0 - - @property - def was_baked_successfully(self): - return len(self.out_paths) > 0 and len(self.errors) == 0 + def was_overriden(self): + return (self.flags & self.FLAG_OVERRIDEN) != 0 @property def num_subs(self): - return len(self.out_paths) + return len(self.subs) + + @property + def was_any_sub_baked(self): + for o in self.subs: + if o.was_baked: + return True + return False + + def getSub(self, sub_index): + return self.subs[sub_index - 1] + + def getAllErrors(self): + for o in self.subs: + yield from o.errors + + def getAllUsedSourceNames(self): + res = set() + for o in self.subs: + for p, pinfo in o.render_passes.items(): + res |= pinfo.used_source_names + return res + + def getAllUsedTaxonomyTerms(self): + res = set() + for o in self.subs: + for p, pinfo in o.render_passes.items(): + res |= pinfo.used_taxonomy_terms + return res class TransitionalBakeRecord(TransitionalRecord): def __init__(self, previous_path=None): super(TransitionalBakeRecord, self).__init__(BakeRecord, previous_path) + self.dirty_source_names = set() def addEntry(self, entry): if (self.previous.bake_time and entry.path_mtime >= self.previous.bake_time): - entry.flags |= FLAG_SOURCE_MODIFIED + entry.flags |= BakeRecordPageEntry.FLAG_SOURCE_MODIFIED + self.dirty_source_names.add(entry.source_name) super(TransitionalBakeRecord, self).addEntry(entry) def getTransitionKey(self, entry): @@ -91,18 +149,13 @@ def getOverrideEntry(self, factory, uri): for pair in self.transitions.values(): - prev = pair[0] cur = pair[1] if (cur and (cur.source_name != factory.source.name or - cur.rel_path != factory.rel_path) and - len(cur.out_uris) > 0 and cur.out_uris[0] == uri): - return cur - if (prev and - (prev.source_name != factory.source.name or - prev.rel_path != factory.rel_path) and - len(prev.out_uris) > 0 and prev.out_uris[0] == uri): - return prev + cur.rel_path != factory.rel_path)): + for o in cur.subs: + if o.out_uri == uri: + return cur return None def getPreviousEntry(self, source_name, rel_path, taxonomy_info=None): @@ -112,32 +165,25 @@ return pair[0] return None - def getCurrentEntries(self, source_name): - return [e for e in self.current.entries - if e.source_name == source_name] - - def collapseRecords(self): - for prev, cur in self.transitions.values(): - if prev and cur and not cur.was_baked: - # This page wasn't baked, so the information from last - # time is still valid (we didn't get any information - # since we didn't bake). - cur.flags = prev.flags - if prev.config: - cur.config = prev.config.copy() - cur.out_uris = list(prev.out_uris) - cur.out_paths = list(prev.out_paths) - cur.errors = list(prev.errors) - cur.used_source_names = set(prev.used_source_names) - cur.used_taxonomy_terms = set(prev.used_taxonomy_terms) + def collapseEntry(self, prev_entry): + cur_entry = copy.deepcopy(prev_entry) + cur_entry.flags = BakeRecordPageEntry.FLAG_NONE + for o in cur_entry.subs: + o.flags = BakeRecordSubPageEntry.FLAG_NONE + self.addEntry(cur_entry) def getDeletions(self): for prev, cur in self.transitions.values(): if prev and not cur: - for p in prev.out_paths: - yield (p, 'previous source file was removed') - elif prev and cur and cur.was_baked_successfully: - diff = set(prev.out_paths) - set(cur.out_paths) + for sub in prev.subs: + yield (sub.out_path, 'previous source file was removed') + elif prev and cur: + prev_out_paths = [o.out_path for o in prev.subs] + cur_out_paths = [o.out_path for o in cur.subs] + diff = set(prev_out_paths) - set(cur_out_paths) for p in diff: yield (p, 'source file changed outputs') + def _onNewEntryAdded(self, entry): + entry.flags |= BakeRecordPageEntry.FLAG_NEW +
--- a/piecrust/baking/scheduler.py Sat Apr 04 07:55:49 2015 -0700 +++ b/piecrust/baking/scheduler.py Mon Apr 06 19:59:54 2015 -0700 @@ -84,13 +84,16 @@ def _isJobReady(self, job): e = self.record.getPreviousEntry( job.factory.source.name, - job.factory.rel_path) + job.factory.rel_path, + taxonomy_info=job.record_entry.taxonomy_info) if not e: return (True, None) - for sn, rp in e.used_source_names: + used_source_names = e.getAllUsedSourceNames() + for sn in used_source_names: if sn == job.factory.source.name: continue - if any(filter(lambda j: j.factory.source.name == sn, self.jobs)): + if any(filter(lambda j: j.factory.source.name == sn, + self.jobs)): return (False, sn) if any(filter(lambda j: j.factory.source.name == sn, self._active_jobs)):
--- a/piecrust/baking/single.py Sat Apr 04 07:55:49 2015 -0700 +++ b/piecrust/baking/single.py Mon Apr 06 19:59:54 2015 -0700 @@ -4,13 +4,16 @@ import logging import urllib.parse from piecrust.baking.records import ( - FLAG_OVERRIDEN, FLAG_SOURCE_MODIFIED, FLAG_FORCED_BY_SOURCE) -from piecrust.data.filters import (PaginationFilter, HasFilterClause, + BakeRecordPassInfo, BakeRecordPageEntry, BakeRecordSubPageEntry) +from piecrust.data.filters import ( + PaginationFilter, HasFilterClause, IsFilterClause, AndBooleanClause, page_value_accessor) -from piecrust.rendering import (PageRenderingContext, render_page, +from piecrust.rendering import ( + PageRenderingContext, render_page, PASS_FORMATTING, PASS_RENDERING) -from piecrust.sources.base import (PageFactory, +from piecrust.sources.base import ( + PageFactory, REALM_NAMES, REALM_USER, REALM_THEME) from piecrust.uriutil import split_uri @@ -60,6 +63,8 @@ def bake(self, factory, route, record_entry): bake_taxonomy_info = None route_metadata = dict(factory.metadata) + + # Add taxonomy metadata for generating the URL if needed. if record_entry.taxonomy_info: tax_name, tax_term, tax_source_name = record_entry.taxonomy_info taxonomy = self.app.getTaxonomy(tax_name) @@ -71,6 +76,12 @@ page = factory.buildPage() uri = route.getUri(route_metadata, provider=page) + # See if this URL has been overriden by a previously baked page. + # If that page is from another realm (e.g. a user page vs. a theme + # page), we silently skip this page. If they're from the same realm, + # we don't allow overriding and raise an error (this is probably + # because of a misconfigured configuration that allows for ambiguous + # URLs between 2 routes or sources). override = self.record.getOverrideEntry(factory, uri) if override is not None: override_source = self.app.getSource(override.source_name) @@ -83,51 +94,84 @@ logger.debug("'%s' [%s] is overriden by '%s:%s'. Skipping" % (factory.ref_spec, uri, override.source_name, override.rel_path)) - record_entry.flags |= FLAG_OVERRIDEN + record_entry.flags |= BakeRecordPageEntry.FLAG_OVERRIDEN return + # Setup the record entry. + record_entry.config = copy_public_page_config(page.config) + + # Start baking the sub-pages. cur_sub = 1 has_more_subs = True force_this = self.force invalidate_formatting = False - record_entry.config = copy_public_page_config(page.config) prev_record_entry = self.record.getPreviousEntry( factory.source.name, factory.rel_path, record_entry.taxonomy_info) logger.debug("Baking '%s'..." % uri) - # If the current page is known to use pages from other sources, - # see if any of those got baked, or are going to be baked for some - # reason. If so, we need to bake this one too. - # (this happens for instance with the main page of a blog). - if prev_record_entry and prev_record_entry.was_baked_successfully: - invalidated_render_passes = set() - used_src_names = list(prev_record_entry.used_source_names) - for src_name, rdr_pass in used_src_names: - entries = self.record.getCurrentEntries(src_name) - for e in entries: - if e.was_baked or e.flags & FLAG_SOURCE_MODIFIED: - invalidated_render_passes.add(rdr_pass) - break - if len(invalidated_render_passes) > 0: - logger.debug("'%s' is known to use sources %s, at least one " - "of which got baked. Will force bake this page. " - % (uri, used_src_names)) - record_entry.flags |= FLAG_FORCED_BY_SOURCE - force_this = True - - if PASS_FORMATTING in invalidated_render_passes: - logger.debug("Will invalidate cached formatting for '%s' " - "since sources were using during that pass." - % uri) - invalidate_formatting = True - while has_more_subs: + # Get the URL and path for this sub-page. sub_uri = route.getUri(route_metadata, sub_num=cur_sub, provider=page) out_path = self.getOutputPath(sub_uri) + # Create the sub-entry for the bake record. + record_sub_entry = BakeRecordSubPageEntry(sub_uri, out_path) + record_entry.subs.append(record_sub_entry) + + # Find a corresponding sub-entry in the previous bake record. + prev_record_sub_entry = None + if prev_record_entry: + try: + prev_record_sub_entry = prev_record_entry.getSub(cur_sub) + except IndexError: + pass + + # Figure out what to do with this page. + if (prev_record_sub_entry and + (prev_record_sub_entry.was_baked_successfully or + prev_record_sub_entry.was_clean)): + # If the current page is known to use pages from other sources, + # see if any of those got baked, or are going to be baked for + # some reason. If so, we need to bake this one too. + # (this happens for instance with the main page of a blog). + dirty_src_names, invalidated_render_passes = ( + self._getDirtySourceNamesAndRenderPasses( + prev_record_sub_entry)) + if len(invalidated_render_passes) > 0: + logger.debug( + "'%s' is known to use sources %s, which have " + "items that got (re)baked. Will force bake this " + "page. " % (uri, dirty_src_names)) + record_sub_entry.flags |= \ + BakeRecordSubPageEntry.FLAG_FORCED_BY_SOURCE + force_this = True + + if PASS_FORMATTING in invalidated_render_passes: + logger.debug( + "Will invalidate cached formatting for '%s' " + "since sources were using during that pass." + % uri) + invalidate_formatting = True + elif (prev_record_sub_entry and + prev_record_sub_entry.errors): + # Previous bake failed. We'll have to bake it again. + logger.debug( + "Previous record entry indicates baking failed for " + "'%s'. Will bake it again." % uri) + record_sub_entry.flags |= \ + BakeRecordSubPageEntry.FLAG_FORCED_BY_PREVIOUS_ERRORS + force_this = True + elif not prev_record_sub_entry: + # No previous record. We'll have to bake it. + logger.debug("No previous record entry found for '%s'. Will " + "force bake it." % uri) + record_sub_entry.flags |= \ + BakeRecordSubPageEntry.FLAG_FORCED_BY_NO_PREVIOUS + force_this = True + # Check for up-to-date outputs. do_bake = True if not force_this: @@ -143,18 +187,16 @@ # If this page didn't bake because it's already up-to-date. # Keep trying for as many subs as we know this page has. if not do_bake: - if (prev_record_entry is not None and - prev_record_entry.num_subs < cur_sub): - logger.debug("") + prev_record_sub_entry.collapseRenderPasses(record_sub_entry) + record_sub_entry.flags = BakeRecordSubPageEntry.FLAG_NONE + + if prev_record_entry.num_subs >= cur_sub + 1: cur_sub += 1 has_more_subs = True logger.debug(" %s is up to date, skipping to next " "sub-page." % out_path) - record_entry.clean_uris.append(sub_uri) - record_entry.clean_out_paths.append(out_path) continue - # We don't know how many subs to expect... just skip. logger.debug(" %s is up to date, skipping bake." % out_path) break @@ -164,6 +206,8 @@ cache_key = sub_uri self.app.env.rendered_segments_repository.invalidate( cache_key) + record_sub_entry.flags |= \ + BakeRecordSubPageEntry.FLAG_FORMATTING_INVALIDATED logger.debug(" p%d -> %s" % (cur_sub, out_path)) ctx, rp = self._bakeSingle(page, sub_uri, cur_sub, out_path, @@ -175,6 +219,17 @@ raise BakingError("%s: error baking '%s'." % (page_rel_path, uri)) from ex + # Record what we did. + record_sub_entry.flags |= BakeRecordSubPageEntry.FLAG_BAKED + self.record.dirty_source_names.add(record_entry.source_name) + for p, pinfo in ctx.render_passes.items(): + brpi = BakeRecordPassInfo() + brpi.used_source_names = set(pinfo.used_source_names) + brpi.used_taxonomy_terms = set(pinfo.used_taxonomy_terms) + record_sub_entry.render_passes[p] = brpi + if prev_record_sub_entry: + prev_record_sub_entry.collapseRenderPasses(record_sub_entry) + # Copy page assets. if (cur_sub == 1 and self.copy_assets and ctx.used_assets is not None): @@ -190,21 +245,15 @@ if not os.path.isdir(out_assets_dir): os.makedirs(out_assets_dir, 0o755) for ap in ctx.used_assets: - dest_ap = os.path.join(out_assets_dir, os.path.basename(ap)) + dest_ap = os.path.join(out_assets_dir, + os.path.basename(ap)) logger.debug(" %s -> %s" % (ap, dest_ap)) shutil.copy(ap, dest_ap) + record_entry.assets.append(ap) - # Record what we did and figure out if we have more work. - record_entry.out_uris.append(sub_uri) - record_entry.out_paths.append(out_path) - record_entry.used_source_names |= ctx.used_source_names - record_entry.used_taxonomy_terms |= ctx.used_taxonomy_terms - + # Figure out if we have more work. has_more_subs = False if ctx.used_pagination is not None: - if cur_sub == 1: - record_entry.used_pagination_item_count = \ - ctx.used_pagination.total_item_count if ctx.used_pagination.has_more: cur_sub += 1 has_more_subs = True @@ -227,3 +276,15 @@ return ctx, rp + def _getDirtySourceNamesAndRenderPasses(self, record_sub_entry): + dirty_src_names = set() + invalidated_render_passes = set() + for p, pinfo in record_sub_entry.render_passes.items(): + for src_name in pinfo.used_source_names: + is_dirty = (src_name in self.record.dirty_source_names) + if is_dirty: + invalidated_render_passes.add(p) + dirty_src_names.add(src_name) + break + return dirty_src_names, invalidated_render_passes +
--- a/piecrust/commands/builtin/baking.py Sat Apr 04 07:55:49 2015 -0700 +++ b/piecrust/commands/builtin/baking.py Mon Apr 06 19:59:54 2015 -0700 @@ -6,10 +6,7 @@ import datetime from piecrust.baking.baker import Baker from piecrust.baking.records import ( - BakeRecord, - FLAG_OVERRIDEN as BAKE_FLAG_OVERRIDEN, - FLAG_SOURCE_MODIFIED as BAKE_FLAG_SOURCE_MODIFIED, - FLAG_FORCED_BY_SOURCE as BAKE_FLAG_FORCED_BY_SOURCE) + BakeRecord, BakeRecordPageEntry, BakeRecordSubPageEntry) from piecrust.chefutil import format_timed from piecrust.commands.base import ChefCommand from piecrust.processing.base import ProcessorPipeline @@ -154,37 +151,54 @@ continue flags = [] - if entry.flags & BAKE_FLAG_OVERRIDEN: + if entry.flags & BakeRecordPageEntry.FLAG_OVERRIDEN: flags.append('overriden') - if entry.flags & BAKE_FLAG_SOURCE_MODIFIED: - flags.append('overriden') - if entry.flags & BAKE_FLAG_FORCED_BY_SOURCE: - flags.append('forced by source') passes = {PASS_RENDERING: 'render', PASS_FORMATTING: 'format'} - used_srcs = ['%s (%s)' % (s[0], passes[s[1]]) - for s in entry.used_source_names] logging.info(" - ") logging.info(" path: %s" % entry.rel_path) logging.info(" spec: %s:%s" % (entry.source_name, entry.rel_path)) if entry.taxonomy_info: - logging.info(" taxonomy: %s:%s for %s" % - entry.taxonomy_info) + tn, t, sn = entry.taxonomy_info + logging.info(" taxonomy: %s (%s:%s)" % + (t, sn, tn)) else: logging.info(" taxonomy: <none>") logging.info(" flags: %s" % ', '.join(flags)) logging.info(" config: %s" % entry.config) - logging.info(" out URLs: %s" % entry.out_uris) - logging.info(" out paths: %s" % [os.path.relpath(p, out_dir) - for p in entry.out_paths]) - logging.info(" clean URLs:%s" % entry.clean_uris) - logging.info(" used srcs: %s" % used_srcs) - logging.info(" used terms:%s" % entry.used_taxonomy_terms) - logging.info(" used pgn: %d" % entry.used_pagination_item_count) - if entry.errors: - logging.error(" errors: %s" % entry.errors) + + logging.info(" %d sub-pages:" % len(entry.subs)) + for sub in entry.subs: + logging.info(" - ") + logging.info(" URL: %s" % sub.out_uri) + logging.info(" path: %s" % os.path.relpath(sub.out_path, + out_dir)) + logging.info(" baked?: %s" % sub.was_baked) + + sub_flags = [] + if sub.flags & BakeRecordSubPageEntry.FLAG_FORCED_BY_SOURCE: + sub_flags.append('forced by source') + if sub.flags & BakeRecordSubPageEntry.FLAG_FORCED_BY_NO_PREVIOUS: + sub_flags.append('forced by missing previous record entry') + if sub.flags & BakeRecordSubPageEntry.FLAG_FORCED_BY_PREVIOUS_ERRORS: + sub_flags.append('forced by previous errors') + logging.info(" flags: %s" % ', '.join(sub_flags)) + + for p, pi in sub.render_passes.items(): + logging.info(" %s pass:" % passes[p]) + logging.info(" used srcs: %s" % + ', '.join(pi.used_source_names)) + logging.info(" used terms: %s" % + ', '.join( + ['%s (%s:%s)' % (t, sn, tn) + for sn, tn, t in pi.used_taxonomy_terms])) + + if sub.errors: + logging.error(" errors: %s" % sub.errors) + + logging.info(" assets: %s" % ', '.join(entry.assets)) record_cache = ctx.app.cache.getCache('proc') if not record_cache.has(record_name):
--- a/piecrust/records.py Sat Apr 04 07:55:49 2015 -0700 +++ b/piecrust/records.py Mon Apr 06 19:59:54 2015 -0700 @@ -118,6 +118,7 @@ if te is None: logger.debug("Adding new record entry: %s" % key) self.transitions[key] = (None, entry) + self._onNewEntryAdded(entry) return if te[1] is not None: @@ -126,3 +127,6 @@ logger.debug("Setting current record entry: %s" % key) self.transitions[key] = (te[0], entry) + def _onNewEntryAdded(self, entry): + pass +
--- a/piecrust/rendering.py Sat Apr 04 07:55:49 2015 -0700 +++ b/piecrust/rendering.py Mon Apr 06 19:59:54 2015 -0700 @@ -32,7 +32,6 @@ self.num = num self.data = None self.content = None - self.execution_info = None @property def app(self): @@ -44,6 +43,15 @@ PASS_RENDERING = 2 +RENDER_PASSES = [PASS_FORMATTING, PASS_RENDERING] + + +class RenderPassInfo(object): + def __init__(self): + self.used_source_names = set() + self.used_taxonomy_terms = set() + + class PageRenderingContext(object): def __init__(self, page, uri, page_num=1, force_render=False): self.page = page @@ -53,12 +61,11 @@ self.pagination_source = None self.pagination_filter = None self.custom_data = None - self.use_cache = False - self.used_assets = None + self._current_pass = PASS_NONE + + self.render_passes = {} self.used_pagination = None - self.used_source_names = set() - self.used_taxonomy_terms = set() - self.current_pass = PASS_NONE + self.used_assets = None @property def app(self): @@ -68,15 +75,27 @@ def source_metadata(self): return self.page.source_metadata + @property + def current_pass_info(self): + return self.render_passes.get(self._current_pass) + + def setCurrentPass(self, rdr_pass): + if rdr_pass != PASS_NONE: + self.render_passes.setdefault(rdr_pass, RenderPassInfo()) + self._current_pass = rdr_pass + def setPagination(self, paginator): + self._raiseIfNoCurrentPass() if self.used_pagination is not None: raise Exception("Pagination has already been used.") self.used_pagination = paginator self.addUsedSource(paginator._source) def addUsedSource(self, source): + self._raiseIfNoCurrentPass() if isinstance(source, PageSource): - self.used_source_names.add((source.name, self.current_pass)) + pass_info = self.render_passes[self._current_pass] + pass_info.used_source_names.add(source.name) def setTaxonomyFilter(self, taxonomy, term_value): is_combination = isinstance(term_value, tuple) @@ -98,6 +117,10 @@ taxonomy.term_name: term_value, 'is_multiple_%s' % taxonomy.term_name: is_combination} + def _raiseIfNoCurrentPass(self): + if self._current_pass == PASS_NONE: + raise Exception("No rendering pass is currently active.") + def render_page(ctx): eis = ctx.app.env.exec_info_stack @@ -114,7 +137,7 @@ page_data.update(ctx.custom_data) # Render content segments. - ctx.current_pass = PASS_FORMATTING + ctx.setCurrentPass(PASS_FORMATTING) repo = ctx.app.env.rendered_segments_repository if repo and not ctx.force_render: cache_key = ctx.uri @@ -127,7 +150,7 @@ contents = _do_render_page_segments(page, page_data) # Render layout. - ctx.current_pass = PASS_RENDERING + ctx.setCurrentPass(PASS_RENDERING) layout_name = page.config.get('layout') if layout_name is None: layout_name = page.source.config.get('default_layout', 'default') @@ -141,10 +164,9 @@ rp = RenderedPage(page, ctx.uri, ctx.page_num) rp.data = page_data rp.content = output - rp.execution_info = eis.current_page_info return rp finally: - ctx.current_pass = PASS_NONE + ctx.setCurrentPass(PASS_NONE) eis.popPage() @@ -162,13 +184,13 @@ def _do_render_page_segments_from_ctx(ctx): eis = ctx.app.env.exec_info_stack eis.pushPage(ctx.page, ctx) - ctx.current_pass = PASS_FORMATTING + ctx.setCurrentPass(PASS_FORMATTING) try: data_ctx = DataBuildingContext(ctx.page, ctx.uri, ctx.page_num) page_data = build_page_data(data_ctx) return _do_render_page_segments(ctx.page, page_data) finally: - ctx.current_pass = PASS_NONE + ctx.setCurrentPass(PASS_NONE) eis.popPage()
--- a/piecrust/routing.py Sat Apr 04 07:55:49 2015 -0700 +++ b/piecrust/routing.py Mon Apr 06 19:59:54 2015 -0700 @@ -239,7 +239,8 @@ else: registered_values = tuple(values) eis = self.app.env.exec_info_stack - eis.current_page_info.render_ctx.used_taxonomy_terms.add( + cpi = eis.current_page_info.render_ctx.current_pass_info + cpi.used_taxonomy_terms.add( (self.source_name, self.taxonomy_name, registered_values))
--- a/piecrust/serving.py Sat Apr 04 07:55:49 2015 -0700 +++ b/piecrust/serving.py Mon Apr 06 19:59:54 2015 -0700 @@ -285,7 +285,8 @@ if entry is None: entry = ServeRecordPageEntry(req_path, page_num) self._page_record.addEntry(entry) - entry.used_source_names = set(render_ctx.used_source_names) + rdr_pass = render_ctx.current_pass_info + entry.used_source_names = set(rdr_pass.used_source_names) # Profiling. if app.config.get('site/show_debug_info'):
--- a/piecrust/templating/jinjaengine.py Sat Apr 04 07:55:49 2015 -0700 +++ b/piecrust/templating/jinjaengine.py Mon Apr 06 19:59:54 2015 -0700 @@ -386,24 +386,25 @@ exc_stack = self.environment.app.env.exec_info_stack render_ctx = exc_stack.current_page_info.render_ctx + rdr_pass = render_ctx.current_pass_info # try to load the block from the cache # if there is no fragment in the cache, render it and store # it in the cache. pair = self.environment.piecrust_cache.get(key) if pair is not None: - render_ctx.used_source_names.update(pair[1]) + rdr_pass.used_source_names.update(pair[1]) return pair[0] with self._lock: pair = self.environment.piecrust_cache.get(key) if pair is not None: - render_ctx.used_source_names.update(pair[1]) + rdr_pass.used_source_names.update(pair[1]) return pair[0] - prev_used = render_ctx.used_source_names.copy() + prev_used = rdr_pass.used_source_names.copy() rv = caller() - after_used = render_ctx.used_source_names.copy() + after_used = rdr_pass.used_source_names.copy() used_delta = after_used.difference(prev_used) self.environment.piecrust_cache[key] = (rv, used_delta) return rv