comparison piecrust/pipelines/page.py @ 991:1857dbd4580f

bake: Fix bugs introduced by bake optimizations, of course. - Make the execution stats JSON-serializable. - Re-add ability to differentiate between sources used during segment rendering and during layout rendering. Fixes problems with cache invalidation of pages that use other sources. - Make taxonomy-related stuff JSON-serializable.
author Ludovic Chabant <ludovic@chabant.com>
date Mon, 20 Nov 2017 23:06:47 -0800
parents 8adc27285d93
children fa489c5e829e
comparison
equal deleted inserted replaced
990:22cf13b86cc3 991:1857dbd4580f
1 import copy
1 import time 2 import time
2 import logging 3 import logging
3 from piecrust.pipelines.base import ( 4 from piecrust.pipelines.base import (
4 ContentPipeline, create_job, content_item_from_job) 5 ContentPipeline, create_job, content_item_from_job)
5 from piecrust.pipelines._pagebaker import PageBaker, get_output_path 6 from piecrust.pipelines._pagebaker import PageBaker, get_output_path
46 cur_entry = record_fac(item.spec) 47 cur_entry = record_fac(item.spec)
47 cur_entry.config = page.config.getAll() 48 cur_entry.config = page.config.getAll()
48 cur_entry.route_params = item.metadata['route_params'] 49 cur_entry.route_params = item.metadata['route_params']
49 cur_entry.timestamp = page.datetime.timestamp() 50 cur_entry.timestamp = page.datetime.timestamp()
50 51
52 if page.was_modified:
53 cur_entry.flags |= PagePipelineRecordEntry.FLAG_SOURCE_MODIFIED
51 if page.config.get(self._draft_setting): 54 if page.config.get(self._draft_setting):
52 cur_entry.flags |= PagePipelineRecordEntry.FLAG_IS_DRAFT 55 cur_entry.flags |= PagePipelineRecordEntry.FLAG_IS_DRAFT
53 56
54 yield cur_entry 57 yield cur_entry
55 58
83 if cur.flags & PagePipelineRecordEntry.FLAG_IS_DRAFT: 86 if cur.flags & PagePipelineRecordEntry.FLAG_IS_DRAFT:
84 continue 87 continue
85 88
86 # Skip pages that are known to use other sources... we'll 89 # Skip pages that are known to use other sources... we'll
87 # schedule them in the second pass. 90 # schedule them in the second pass.
88 if prev and prev.getAllUsedSourceNames(): 91 if prev:
89 continue 92 usn1, usn2 = prev.getAllUsedSourceNames()
93 if usn1 or usn2:
94 continue
90 95
91 # Check if this item has been overriden by a previous pipeline 96 # Check if this item has been overriden by a previous pipeline
92 # run... for instance, we could be the pipeline for a "theme pages" 97 # run... for instance, we could be the pipeline for a "theme pages"
93 # source, and some of our pages have been overriden by a user 98 # source, and some of our pages have been overriden by a user
94 # page that writes out to the same URL. 99 # page that writes out to the same URL.
95 uri = uri_getter(cur.route_params) 100 uri = uri_getter(cur.route_params)
96 path = get_output_path(app, out_dir, uri, pretty_urls) 101 path = get_output_path(app, out_dir, uri, pretty_urls)
97
98 override = used_paths.get(path) 102 override = used_paths.get(path)
99 if override is not None: 103 if override is not None:
100 override_source_name, override_entry = override 104 override_source_name, override_entry = override
101 override_source = app.getSource(override_source_name) 105 override_source = app.getSource(override_source_name)
102 if override_source.config['realm'] == \ 106 if override_source.config['realm'] == \
141 jobs = [] 145 jobs = []
142 pass_num = ctx.pass_num 146 pass_num = ctx.pass_num
143 history = ctx.record_histories.getHistory(ctx.record_name).copy() 147 history = ctx.record_histories.getHistory(ctx.record_name).copy()
144 history.build() 148 history.build()
145 for prev, cur in history.diffs: 149 for prev, cur in history.diffs:
146 if cur and cur.was_any_sub_baked: 150 if not cur:
147 continue 151 continue
148 if prev and any(map( 152 if cur.was_any_sub_baked:
149 lambda usn: usn in dirty_source_names, 153 continue
150 prev.getAllUsedSourceNames())): 154 if prev:
151 jobs.append(create_job(self, prev.item_spec, 155 if any(map(
152 pass_num=pass_num, 156 lambda usn: usn in dirty_source_names,
153 force_bake=True)) 157 prev.getAllUsedSourceNames()[0])):
158 jobs.append(create_job(self, prev.item_spec,
159 pass_num=pass_num,
160 force_bake=True))
161 else:
162 # This page uses other sources, but no source was dirty
163 # this time around (it was a null build, maybe). We
164 # don't have any work to do, but we need to carry over
165 # any information we have, otherwise the post bake step
166 # will think we need to delete last bake's outputs.
167 cur.subs = copy.deepcopy(prev.subs)
168
154 if len(jobs) > 0: 169 if len(jobs) > 0:
155 return jobs 170 return jobs
156 return None 171 return None
157 172
158 def handleJobResult(self, result, ctx): 173 def handleJobResult(self, result, ctx):