Mercurial > piecrust2
comparison piecrust/baking/single.py @ 415:0e9a94b7fdfa
bake: Improve bake record information.
* Store things in the bake record that require less interaction between the
master process and the workers. For instance, don't store the paginator
object in the render pass info -- instead, just store whether pagination
was used, and whether it had more items.
* Simplify information passing between workers and bake passes by saving the
rendering info to the JSON cache. This means the "render first sub" job
doesn't have to return anything except errors now.
* Add more performance counter info.
author | Ludovic Chabant <ludovic@chabant.com> |
---|---|
date | Sat, 20 Jun 2015 19:23:16 -0700 |
parents | e7b865f8f335 |
children | 21e26ed867b6 |
comparison
equal
deleted
inserted
replaced
414:c4b3a7fd2f87 | 415:0e9a94b7fdfa |
---|---|
1 import os.path | 1 import os.path |
2 import shutil | 2 import shutil |
3 import codecs | 3 import codecs |
4 import logging | 4 import logging |
5 import urllib.parse | 5 import urllib.parse |
6 from piecrust.baking.records import ( | 6 from piecrust import ASSET_DIR_SUFFIX |
7 PageBakeInfo, SubPageBakeInfo, BakePassInfo) | 7 from piecrust.baking.records import SubPageBakeInfo |
8 from piecrust.rendering import ( | 8 from piecrust.rendering import ( |
9 QualifiedPage, PageRenderingContext, render_page, | 9 QualifiedPage, PageRenderingContext, render_page, |
10 PASS_FORMATTING) | 10 PASS_FORMATTING) |
11 from piecrust.uriutil import split_uri | 11 from piecrust.uriutil import split_uri |
12 | 12 |
49 bake_path.append(decoded_uri) | 49 bake_path.append(decoded_uri) |
50 | 50 |
51 return os.path.normpath(os.path.join(*bake_path)) | 51 return os.path.normpath(os.path.join(*bake_path)) |
52 | 52 |
53 def bake(self, factory, route, route_metadata, prev_entry, | 53 def bake(self, factory, route, route_metadata, prev_entry, |
54 first_render_info, dirty_source_names, tax_info=None): | 54 dirty_source_names, tax_info=None): |
55 # Get the page. | 55 # Get the page. |
56 page = factory.buildPage() | 56 page = factory.buildPage() |
57 | 57 |
58 # Start baking the sub-pages. | 58 # Start baking the sub-pages. |
59 cur_sub = 1 | 59 cur_sub = 1 |
60 has_more_subs = True | 60 has_more_subs = True |
61 report = PageBakeInfo() | 61 sub_entries = [] |
62 | 62 |
63 while has_more_subs: | 63 while has_more_subs: |
64 # Get the URL and path for this sub-page. | 64 # Get the URL and path for this sub-page. |
65 sub_uri = route.getUri(route_metadata, sub_num=cur_sub, | 65 sub_uri = route.getUri(route_metadata, sub_num=cur_sub, |
66 provider=page) | 66 provider=page) |
67 logger.debug("Baking '%s' [%d]..." % (sub_uri, cur_sub)) | 67 logger.debug("Baking '%s' [%d]..." % (sub_uri, cur_sub)) |
68 out_path = self.getOutputPath(sub_uri) | 68 out_path = self.getOutputPath(sub_uri) |
69 | 69 |
70 # Create the sub-entry for the bake record. | 70 # Create the sub-entry for the bake record. |
71 sub_entry = SubPageBakeInfo(sub_uri, out_path) | 71 sub_entry = SubPageBakeInfo(sub_uri, out_path) |
72 report.subs.append(sub_entry) | 72 sub_entries.append(sub_entry) |
73 | 73 |
74 # Find a corresponding sub-entry in the previous bake record. | 74 # Find a corresponding sub-entry in the previous bake record. |
75 prev_sub_entry = None | 75 prev_sub_entry = None |
76 if prev_entry: | 76 if prev_entry: |
77 try: | 77 try: |
97 pass | 97 pass |
98 | 98 |
99 # If this page didn't bake because it's already up-to-date. | 99 # If this page didn't bake because it's already up-to-date. |
100 # Keep trying for as many subs as we know this page has. | 100 # Keep trying for as many subs as we know this page has. |
101 if not do_bake: | 101 if not do_bake: |
102 prev_sub_entry.collapseRenderPasses(sub_entry) | 102 sub_entry.render_info = prev_sub_entry.copyRenderInfo() |
103 sub_entry.flags = SubPageBakeInfo.FLAG_NONE | 103 sub_entry.flags = SubPageBakeInfo.FLAG_NONE |
104 | 104 |
105 if prev_entry.num_subs >= cur_sub + 1: | 105 if prev_entry.num_subs >= cur_sub + 1: |
106 cur_sub += 1 | 106 cur_sub += 1 |
107 has_more_subs = True | 107 has_more_subs = True |
121 sub_entry.flags |= \ | 121 sub_entry.flags |= \ |
122 SubPageBakeInfo.FLAG_FORMATTING_INVALIDATED | 122 SubPageBakeInfo.FLAG_FORMATTING_INVALIDATED |
123 | 123 |
124 logger.debug(" p%d -> %s" % (cur_sub, out_path)) | 124 logger.debug(" p%d -> %s" % (cur_sub, out_path)) |
125 qp = QualifiedPage(page, route, route_metadata) | 125 qp = QualifiedPage(page, route, route_metadata) |
126 ctx, rp = self._bakeSingle(qp, cur_sub, out_path, tax_info) | 126 rp = self._bakeSingle(qp, cur_sub, out_path, tax_info) |
127 except Exception as ex: | 127 except Exception as ex: |
128 if self.app.debug: | |
129 logger.exception(ex) | |
130 page_rel_path = os.path.relpath(page.path, self.app.root_dir) | 128 page_rel_path = os.path.relpath(page.path, self.app.root_dir) |
131 raise BakingError("%s: error baking '%s'." % | 129 raise BakingError("%s: error baking '%s'." % |
132 (page_rel_path, sub_uri)) from ex | 130 (page_rel_path, sub_uri)) from ex |
133 | 131 |
134 # Record what we did. | 132 # Record what we did. |
135 sub_entry.flags |= SubPageBakeInfo.FLAG_BAKED | 133 sub_entry.flags |= SubPageBakeInfo.FLAG_BAKED |
136 # self.record.dirty_source_names.add(record_entry.source_name) | 134 sub_entry.render_info = rp.copyRenderInfo() |
137 for p, pinfo in ctx.render_passes.items(): | |
138 bpi = BakePassInfo() | |
139 bpi.used_source_names = set(pinfo.used_source_names) | |
140 bpi.used_taxonomy_terms = set(pinfo.used_taxonomy_terms) | |
141 sub_entry.render_passes[p] = bpi | |
142 if prev_sub_entry: | |
143 prev_sub_entry.collapseRenderPasses(sub_entry) | |
144 | |
145 # If this page has had its first sub-page rendered already, we | |
146 # have that information from the baker. Otherwise (e.g. for | |
147 # taxonomy pages), we have that information from the result | |
148 # of the render. | |
149 info = ctx | |
150 if cur_sub == 1 and first_render_info is not None: | |
151 info = first_render_info | |
152 | 135 |
153 # Copy page assets. | 136 # Copy page assets. |
154 if cur_sub == 1 and self.copy_assets and info.used_assets: | 137 if (cur_sub == 1 and self.copy_assets and |
138 sub_entry.anyPass(lambda p: p.used_assets)): | |
155 if self.pretty_urls: | 139 if self.pretty_urls: |
156 out_assets_dir = os.path.dirname(out_path) | 140 out_assets_dir = os.path.dirname(out_path) |
157 else: | 141 else: |
158 out_assets_dir, out_name = os.path.split(out_path) | 142 out_assets_dir, out_name = os.path.split(out_path) |
159 if sub_uri != self.site_root: | 143 if sub_uri != self.site_root: |
161 out_assets_dir += out_name_noext | 145 out_assets_dir += out_name_noext |
162 | 146 |
163 logger.debug("Copying page assets to: %s" % out_assets_dir) | 147 logger.debug("Copying page assets to: %s" % out_assets_dir) |
164 _ensure_dir_exists(out_assets_dir) | 148 _ensure_dir_exists(out_assets_dir) |
165 | 149 |
166 used_assets = info.used_assets | 150 page_dirname = os.path.dirname(page.path) |
167 for ap in used_assets: | 151 page_pathname, _ = os.path.splitext(page.path) |
168 dest_ap = os.path.join(out_assets_dir, | 152 in_assets_dir = page_pathname + ASSET_DIR_SUFFIX |
169 os.path.basename(ap)) | 153 for fn in os.listdir(in_assets_dir): |
170 logger.debug(" %s -> %s" % (ap, dest_ap)) | 154 full_fn = os.path.join(page_dirname, fn) |
171 shutil.copy(ap, dest_ap) | 155 if os.path.isfile(full_fn): |
172 report.assets.append(ap) | 156 dest_ap = os.path.join(out_assets_dir, fn) |
157 logger.debug(" %s -> %s" % (full_fn, dest_ap)) | |
158 shutil.copy(full_fn, dest_ap) | |
173 | 159 |
174 # Figure out if we have more work. | 160 # Figure out if we have more work. |
175 has_more_subs = False | 161 has_more_subs = False |
176 if info.pagination_has_more: | 162 if sub_entry.anyPass(lambda p: p.pagination_has_more): |
177 cur_sub += 1 | 163 cur_sub += 1 |
178 has_more_subs = True | 164 has_more_subs = True |
179 | 165 |
180 return report | 166 return sub_entries |
181 | 167 |
182 def _bakeSingle(self, qualified_page, num, out_path, tax_info=None): | 168 def _bakeSingle(self, qualified_page, num, out_path, tax_info=None): |
183 ctx = PageRenderingContext(qualified_page, page_num=num) | 169 ctx = PageRenderingContext(qualified_page, page_num=num) |
184 if tax_info: | 170 if tax_info: |
185 tax = self.app.getTaxonomy(tax_info.taxonomy_name) | 171 tax = self.app.getTaxonomy(tax_info.taxonomy_name) |
191 _ensure_dir_exists(out_dir) | 177 _ensure_dir_exists(out_dir) |
192 | 178 |
193 with codecs.open(out_path, 'w', 'utf8') as fp: | 179 with codecs.open(out_path, 'w', 'utf8') as fp: |
194 fp.write(rp.content) | 180 fp.write(rp.content) |
195 | 181 |
196 return ctx, rp | 182 return rp |
197 | 183 |
198 | 184 |
199 def _compute_force_flags(prev_sub_entry, sub_entry, dirty_source_names): | 185 def _compute_force_flags(prev_sub_entry, sub_entry, dirty_source_names): |
200 # Figure out what to do with this page. | 186 # Figure out what to do with this page. |
201 force_this_sub = False | 187 force_this_sub = False |
244 force_this_sub = True | 230 force_this_sub = True |
245 | 231 |
246 return force_this_sub, invalidate_formatting | 232 return force_this_sub, invalidate_formatting |
247 | 233 |
248 | 234 |
249 def _get_dirty_source_names_and_render_passes( | 235 def _get_dirty_source_names_and_render_passes(sub_entry, dirty_source_names): |
250 sub_entry, dirty_source_names): | |
251 dirty_for_this = set() | 236 dirty_for_this = set() |
252 invalidated_render_passes = set() | 237 invalidated_render_passes = set() |
253 for p, pinfo in sub_entry.render_passes.items(): | 238 assert sub_entry.render_info is not None |
239 for p, pinfo in sub_entry.render_info.items(): | |
254 for src_name in pinfo.used_source_names: | 240 for src_name in pinfo.used_source_names: |
255 is_dirty = (src_name in dirty_source_names) | 241 is_dirty = (src_name in dirty_source_names) |
256 if is_dirty: | 242 if is_dirty: |
257 invalidated_render_passes.add(p) | 243 invalidated_render_passes.add(p) |
258 dirty_for_this.add(src_name) | 244 dirty_for_this.add(src_name) |