Mercurial > piecrust2
comparison piecrust/pipelines/asset.py @ 989:8adc27285d93
bake: Big pass on bake performance.
- Reduce the amount of data passed between processes.
- Make inter-process data simple objects to make it easier to test with
alternatives to pickle.
- Make sources have the basic requirement to be able to find a content item
from an item spec (path).
- Make Hoedown the default Markdown formatter.
author | Ludovic Chabant <ludovic@chabant.com> |
---|---|
date | Sun, 19 Nov 2017 14:29:17 -0800 |
parents | 45ad976712ec |
children | 1a7f3ae09c53 |
comparison
equal
deleted
inserted
replaced
988:f83ae0a5d793 | 989:8adc27285d93 |
---|---|
1 import os | 1 import os |
2 import os.path | 2 import os.path |
3 import re | 3 import re |
4 import logging | 4 import logging |
5 from piecrust.pipelines._procrecords import AssetPipelineRecordEntry | 5 from piecrust.pipelines._procrecords import ( |
6 AssetPipelineRecordEntry, | |
7 add_asset_job_result, merge_job_result_into_record_entry) | |
6 from piecrust.pipelines._proctree import ( | 8 from piecrust.pipelines._proctree import ( |
7 ProcessingTreeBuilder, ProcessingTreeRunner, | 9 ProcessingTreeBuilder, ProcessingTreeRunner, |
8 get_node_name_tree, print_node, | 10 get_node_name_tree, print_node, |
9 STATE_DIRTY) | 11 STATE_DIRTY) |
10 from piecrust.pipelines.base import ContentPipeline | 12 from piecrust.pipelines.base import ContentPipeline |
62 stats = self.app.env.stats | 64 stats = self.app.env.stats |
63 stats.registerTimer('BuildProcessingTree', raise_if_registered=False) | 65 stats.registerTimer('BuildProcessingTree', raise_if_registered=False) |
64 stats.registerTimer('RunProcessingTree', raise_if_registered=False) | 66 stats.registerTimer('RunProcessingTree', raise_if_registered=False) |
65 | 67 |
66 def run(self, job, ctx, result): | 68 def run(self, job, ctx, result): |
69 # Create the result stuff. | |
70 item_spec = job['job_spec'][1] | |
71 add_asset_job_result(result) | |
72 result['item_spec'] = item_spec | |
73 | |
67 # See if we need to ignore this item. | 74 # See if we need to ignore this item. |
68 rel_path = os.path.relpath(job.content_item.spec, self._base_dir) | 75 rel_path = os.path.relpath(item_spec, self._base_dir) |
69 if re_matchany(rel_path, self._ignore_patterns): | 76 if re_matchany(rel_path, self._ignore_patterns): |
70 return | 77 return |
71 | 78 |
72 record_entry = result.record_entry | 79 # Build the processing tree for this job. |
73 stats = self.app.env.stats | 80 stats = self.app.env.stats |
74 out_dir = self.ctx.out_dir | |
75 | |
76 # Build the processing tree for this job. | |
77 with stats.timerScope('BuildProcessingTree'): | 81 with stats.timerScope('BuildProcessingTree'): |
78 builder = ProcessingTreeBuilder(self._processors) | 82 builder = ProcessingTreeBuilder(self._processors) |
79 tree_root = builder.build(rel_path) | 83 tree_root = builder.build(rel_path) |
80 record_entry.flags |= AssetPipelineRecordEntry.FLAG_PREPARED | 84 result['flags'] |= AssetPipelineRecordEntry.FLAG_PREPARED |
81 | 85 |
82 # Prepare and run the tree. | 86 # Prepare and run the tree. |
87 out_dir = self.ctx.out_dir | |
83 print_node(tree_root, recursive=True) | 88 print_node(tree_root, recursive=True) |
84 leaves = tree_root.getLeaves() | 89 leaves = tree_root.getLeaves() |
85 record_entry.out_paths = [os.path.join(out_dir, l.path) | 90 result['out_paths'] = [os.path.join(out_dir, l.path) |
86 for l in leaves] | 91 for l in leaves] |
87 record_entry.proc_tree = get_node_name_tree(tree_root) | 92 result['proc_tree'] = get_node_name_tree(tree_root) |
88 if tree_root.getProcessor().is_bypassing_structured_processing: | 93 if tree_root.getProcessor().is_bypassing_structured_processing: |
89 record_entry.flags |= ( | 94 result['flags'] |= ( |
90 AssetPipelineRecordEntry.FLAG_BYPASSED_STRUCTURED_PROCESSING) | 95 AssetPipelineRecordEntry.FLAG_BYPASSED_STRUCTURED_PROCESSING) |
91 | 96 |
92 if self.ctx.force: | 97 if self.ctx.force: |
93 tree_root.setState(STATE_DIRTY, True) | 98 tree_root.setState(STATE_DIRTY, True) |
94 | 99 |
95 with stats.timerScope('RunProcessingTree'): | 100 with stats.timerScope('RunProcessingTree'): |
96 runner = ProcessingTreeRunner( | 101 runner = ProcessingTreeRunner( |
97 self._base_dir, self.tmp_dir, out_dir) | 102 self._base_dir, self.tmp_dir, out_dir) |
98 if runner.processSubTree(tree_root): | 103 if runner.processSubTree(tree_root): |
99 record_entry.flags |= ( | 104 result['flags'] |= ( |
100 AssetPipelineRecordEntry.FLAG_PROCESSED) | 105 AssetPipelineRecordEntry.FLAG_PROCESSED) |
106 | |
107 def handleJobResult(self, result, ctx): | |
108 entry = self.createRecordEntry(result['item_spec']) | |
109 merge_job_result_into_record_entry(entry, result) | |
110 ctx.record.addEntry(entry) | |
101 | 111 |
102 def getDeletions(self, ctx): | 112 def getDeletions(self, ctx): |
103 for prev, cur in ctx.record_history.diffs: | 113 for prev, cur in ctx.record_history.diffs: |
104 if prev and not cur: | 114 if prev and not cur: |
105 for p in prev.out_paths: | 115 for p in prev.out_paths: |