Mercurial > piecrust2
comparison piecrust/commands/builtin/baking.py @ 334:b034f6f15e22
bake: Several bug taxonomy-related fixes for incorrect incremental bakes.
* Improve how the baker processes taxonomy terms and figures out what needs
to be re-baked or not.
* Create bake entries for clean taxnomy terms so they're not deleted by an
incremental bake.
* Add more information to bake records.
* Slugify taxonomy terms is now done by the route in one place.
* Fix a bug where the cache key for invalidating rendered segments was not
computed the same way as when the caching was done.
* Fix how term combinations are passed around, rendered, printed, parsed, etc.
(TODO: more word needed in the routing functions)
* Expose to the template whether a taxonomy term is a combination or not.
* Display term combinations better in the built-in theme.
* Rename `route.taxonomy` to `route.taxonomy_name` to prevent confusion.
* Add options to show bake records for previous bakes.
author | Ludovic Chabant <ludovic@chabant.com> |
---|---|
date | Fri, 03 Apr 2015 10:59:50 -0700 |
parents | f0fc2a9d3191 |
children | 938be93215cb |
comparison
equal
deleted
inserted
replaced
333:91b07f9efdc1 | 334:b034f6f15e22 |
---|---|
3 import logging | 3 import logging |
4 import hashlib | 4 import hashlib |
5 import fnmatch | 5 import fnmatch |
6 import datetime | 6 import datetime |
7 from piecrust.baking.baker import Baker | 7 from piecrust.baking.baker import Baker |
8 from piecrust.baking.records import BakeRecord | 8 from piecrust.baking.records import ( |
9 BakeRecord, | |
10 FLAG_OVERRIDEN as BAKE_FLAG_OVERRIDEN, | |
11 FLAG_SOURCE_MODIFIED as BAKE_FLAG_SOURCE_MODIFIED, | |
12 FLAG_FORCED_BY_SOURCE as BAKE_FLAG_FORCED_BY_SOURCE) | |
9 from piecrust.chefutil import format_timed | 13 from piecrust.chefutil import format_timed |
10 from piecrust.commands.base import ChefCommand | 14 from piecrust.commands.base import ChefCommand |
11 from piecrust.processing.base import ProcessorPipeline | 15 from piecrust.processing.base import ProcessorPipeline |
12 from piecrust.processing.records import ( | 16 from piecrust.processing.records import ( |
13 ProcessorPipelineRecord, | 17 ProcessorPipelineRecord, |
14 FLAG_PREPARED, FLAG_PROCESSED, FLAG_OVERRIDEN, | 18 FLAG_PREPARED, FLAG_PROCESSED, FLAG_OVERRIDEN, |
15 FLAG_BYPASSED_STRUCTURED_PROCESSING) | 19 FLAG_BYPASSED_STRUCTURED_PROCESSING) |
20 from piecrust.rendering import PASS_FORMATTING, PASS_RENDERING | |
16 | 21 |
17 | 22 |
18 logger = logging.getLogger(__name__) | 23 logger = logging.getLogger(__name__) |
19 | 24 |
20 | 25 |
102 "of entries to show.") | 107 "of entries to show.") |
103 parser.add_argument( | 108 parser.add_argument( |
104 '-t', '--out', | 109 '-t', '--out', |
105 help="A pattern that will be used to filter the output path " | 110 help="A pattern that will be used to filter the output path " |
106 "of entries to show.") | 111 "of entries to show.") |
112 parser.add_argument( | |
113 '--last', | |
114 type=int, | |
115 default=0, | |
116 help="Show the last Nth bake record.") | |
107 | 117 |
108 def run(self, ctx): | 118 def run(self, ctx): |
109 out_dir = ctx.args.output or os.path.join(ctx.app.root_dir, '_counter') | 119 out_dir = ctx.args.output or os.path.join(ctx.app.root_dir, '_counter') |
110 record_name = (hashlib.md5(out_dir.encode('utf8')).hexdigest() + | 120 record_id = hashlib.md5(out_dir.encode('utf8')).hexdigest() |
111 '.record') | 121 suffix = '' if ctx.args.last == 0 else '.%d' % ctx.args.last |
122 record_name = '%s%s.record' % (record_id, suffix) | |
112 | 123 |
113 pattern = None | 124 pattern = None |
114 if ctx.args.path: | 125 if ctx.args.path: |
115 pattern = '*%s*' % ctx.args.path.strip('*') | 126 pattern = '*%s*' % ctx.args.path.strip('*') |
116 | 127 |
124 "Did you bake there yet?") | 135 "Did you bake there yet?") |
125 | 136 |
126 # Show the bake record. | 137 # Show the bake record. |
127 record = BakeRecord.load(record_cache.getCachePath(record_name)) | 138 record = BakeRecord.load(record_cache.getCachePath(record_name)) |
128 logging.info("Bake record for: %s" % record.out_dir) | 139 logging.info("Bake record for: %s" % record.out_dir) |
140 logging.info("From: %s" % record_name) | |
129 logging.info("Last baked: %s" % | 141 logging.info("Last baked: %s" % |
130 datetime.datetime.fromtimestamp(record.bake_time)) | 142 datetime.datetime.fromtimestamp(record.bake_time)) |
131 if record.success: | 143 if record.success: |
132 logging.info("Status: success") | 144 logging.info("Status: success") |
133 else: | 145 else: |
139 if out_pattern and not ( | 151 if out_pattern and not ( |
140 any([o for o in entry.out_paths | 152 any([o for o in entry.out_paths |
141 if fnmatch.fnmatch(o, out_pattern)])): | 153 if fnmatch.fnmatch(o, out_pattern)])): |
142 continue | 154 continue |
143 | 155 |
156 flags = [] | |
157 if entry.flags & BAKE_FLAG_OVERRIDEN: | |
158 flags.append('overriden') | |
159 if entry.flags & BAKE_FLAG_SOURCE_MODIFIED: | |
160 flags.append('overriden') | |
161 if entry.flags & BAKE_FLAG_FORCED_BY_SOURCE: | |
162 flags.append('forced by source') | |
163 | |
164 passes = {PASS_RENDERING: 'render', PASS_FORMATTING: 'format'} | |
165 used_srcs = ['%s (%s)' % (s[0], passes[s[1]]) | |
166 for s in entry.used_source_names] | |
167 | |
144 logging.info(" - ") | 168 logging.info(" - ") |
145 logging.info(" path: %s" % entry.rel_path) | 169 logging.info(" path: %s" % entry.rel_path) |
146 logging.info(" spec: %s:%s" % (entry.source_name, | 170 logging.info(" spec: %s:%s" % (entry.source_name, |
147 entry.rel_path)) | 171 entry.rel_path)) |
148 logging.info(" taxonomy: %s:%s" % (entry.taxonomy_name, | 172 if entry.taxonomy_info: |
149 entry.taxonomy_term)) | 173 logging.info(" taxonomy: %s:%s for %s" % |
174 entry.taxonomy_info) | |
175 else: | |
176 logging.info(" taxonomy: <none>") | |
177 logging.info(" flags: %s" % ', '.join(flags)) | |
150 logging.info(" config: %s" % entry.config) | 178 logging.info(" config: %s" % entry.config) |
151 logging.info(" out URLs: %s" % entry.out_uris) | 179 logging.info(" out URLs: %s" % entry.out_uris) |
152 logging.info(" out paths: %s" % [os.path.relpath(p, out_dir) | 180 logging.info(" out paths: %s" % [os.path.relpath(p, out_dir) |
153 for p in entry.out_paths]) | 181 for p in entry.out_paths]) |
154 logging.info(" used srcs: %s" % entry.used_source_names) | 182 logging.info(" clean URLs:%s" % entry.clean_uris) |
183 logging.info(" used srcs: %s" % used_srcs) | |
184 logging.info(" used terms:%s" % entry.used_taxonomy_terms) | |
185 logging.info(" used pgn: %d" % entry.used_pagination_item_count) | |
155 if entry.errors: | 186 if entry.errors: |
156 logging.error(" errors: %s" % entry.errors) | 187 logging.error(" errors: %s" % entry.errors) |
157 | 188 |
158 record_cache = ctx.app.cache.getCache('proc') | 189 record_cache = ctx.app.cache.getCache('proc') |
159 if not record_cache.has(record_name): | 190 if not record_cache.has(record_name): |