Mercurial > piecrust2
comparison piecrust/pipelines/page.py @ 1132:3bcb2d446397
fix: Correctly invalidate pages that use dirtied sources.
author | Ludovic Chabant <ludovic@chabant.com> |
---|---|
date | Mon, 16 Apr 2018 22:22:54 -0700 |
parents | d85de09f40c7 |
children | 5f97b5b59dfe |
comparison
equal
deleted
inserted
replaced
1131:32f71dbf5cb1 | 1132:3bcb2d446397 |
---|---|
35 self._pagebaker.startWriterQueue() | 35 self._pagebaker.startWriterQueue() |
36 | 36 |
37 def createJobs(self, ctx): | 37 def createJobs(self, ctx): |
38 pass_num = ctx.pass_num | 38 pass_num = ctx.pass_num |
39 if pass_num == 0: | 39 if pass_num == 0: |
40 ctx.current_record.user_data['dirty_source_names'] = set() | |
40 return self._createLoadJobs(ctx) | 41 return self._createLoadJobs(ctx) |
41 if pass_num == 1: | 42 if pass_num == 1: |
42 return self._createSecondPassJobs(ctx) | 43 return self._createSecondPassJobs(ctx) |
43 if pass_num == 2: | 44 if pass_num == 2: |
44 return self._createThirdPassJobs(ctx) | 45 return self._createThirdPassJobs(ctx) |
66 ctx.record_histories.current.records) | 67 ctx.record_histories.current.records) |
67 history = ctx.record_histories.getHistory(ctx.record_name).copy() | 68 history = ctx.record_histories.getHistory(ctx.record_name).copy() |
68 history.build() | 69 history.build() |
69 | 70 |
70 pass_num = ctx.pass_num | 71 pass_num = ctx.pass_num |
71 record = ctx.current_record | |
72 record.user_data['dirty_source_names'] = set() | |
73 | 72 |
74 for prev, cur in history.diffs: | 73 for prev, cur in history.diffs: |
75 # Ignore pages that disappeared since last bake. | 74 # Ignore pages that disappeared since last bake. |
76 if cur is None: | 75 if cur is None: |
77 continue | 76 continue |
149 if not cur: | 148 if not cur: |
150 continue | 149 continue |
151 if cur.was_any_sub_baked: | 150 if cur.was_any_sub_baked: |
152 continue | 151 continue |
153 if prev: | 152 if prev: |
154 if any(map( | 153 usn1, usn2 = prev.getAllUsedSourceNames() |
155 lambda usn: usn in dirty_source_names, | 154 force_segments = any(map(lambda u: u in dirty_source_names, |
156 prev.getAllUsedSourceNames()[0])): | 155 usn1)) |
156 force_layout = any(map(lambda u: u in dirty_source_names, | |
157 usn2)) | |
158 if force_segments or force_layout: | |
157 jobs.append(create_job(self, prev.item_spec, | 159 jobs.append(create_job(self, prev.item_spec, |
158 pass_num=pass_num, | 160 pass_num=pass_num, |
159 force_bake=True)) | 161 force_segments=force_segments, |
162 force_layout=force_layout)) | |
160 else: | 163 else: |
161 # This page uses other sources, but no source was dirty | 164 # This page uses other sources, but no source was dirty |
162 # this time around (it was a null build, maybe). We | 165 # this time around (it was a null build, maybe). We |
163 # don't have any work to do, but we need to carry over | 166 # don't have any work to do, but we need to carry over |
164 # any information we have, otherwise the post bake step | 167 # any information we have, otherwise the post bake step |
180 new_entry.flags = result['flags'] | 183 new_entry.flags = result['flags'] |
181 new_entry.config = result['config'] | 184 new_entry.config = result['config'] |
182 new_entry.route_params = result['route_params'] | 185 new_entry.route_params = result['route_params'] |
183 new_entry.timestamp = result['timestamp'] | 186 new_entry.timestamp = result['timestamp'] |
184 ctx.record.addEntry(new_entry) | 187 ctx.record.addEntry(new_entry) |
188 | |
189 # If this page was modified, flag its entire source as "dirty", | |
190 # so any pages using that source can be re-baked. | |
191 if (new_entry.flags & PagePipelineRecordEntry.FLAG_SOURCE_MODIFIED): | |
192 ctx.record.user_data['dirty_source_names'].add( | |
193 self.source.name) | |
185 else: | 194 else: |
186 # Update the entry with the new information. | 195 # Update the entry with the new information. |
187 existing = ctx.record_entry | 196 existing = ctx.record_entry |
188 if not result.get('postponed', False): | 197 if not result.get('postponed', False): |
189 merge_job_result_into_record_entry(existing, result) | 198 merge_job_result_into_record_entry(existing, result) |
190 if existing.was_any_sub_baked: | |
191 ctx.record.user_data['dirty_source_names'].add(self.source.name) | |
192 | 199 |
193 def run(self, job, ctx, result): | 200 def run(self, job, ctx, result): |
194 pass_num = job.get('pass_num', 0) | 201 pass_num = job.get('pass_num', 0) |
195 step_num = job.get('step_num', 0) | 202 step_num = job.get('step_num', 0) |
196 | 203 |
282 def _renderAlways(self, job, ctx, result): | 289 def _renderAlways(self, job, ctx, result): |
283 content_item = content_item_from_job(self, job) | 290 content_item = content_item_from_job(self, job) |
284 logger.debug("Full render for: %s" % content_item.spec) | 291 logger.debug("Full render for: %s" % content_item.spec) |
285 page = self.app.getPage(self.source, content_item) | 292 page = self.app.getPage(self.source, content_item) |
286 prev_entry = ctx.previous_entry | 293 prev_entry = ctx.previous_entry |
287 rdr_subs = self._pagebaker.bake(page, prev_entry, | 294 rdr_subs = self._pagebaker.bake( |
288 force=job.get('force_bake')) | 295 page, prev_entry, |
296 force_segments=job.get('force_segments'), | |
297 force_layout=job.get('force_layout')) | |
289 | 298 |
290 add_page_job_result(result) | 299 add_page_job_result(result) |
291 result['subs'] = rdr_subs | 300 result['subs'] = rdr_subs |
292 | 301 |
293 def _get_used_paths_from_records(records): | 302 def _get_used_paths_from_records(records): |