comparison piecrust/pipelines/asset.py @ 979:45ad976712ec

tests: Big push to get the tests to pass again. - Lots of fixes everywhere in the code. - Try to handle debug logging in the multiprocessing worker pool when running in pytest. Not perfect, but usable for now. - Replace all `.md` test files with `.html` since now a auto-format extension always sets the format. - Replace `out` with `outfiles` in most places since now blog archives are added to the bake output and I don't want to add expected outputs for blog archives everywhere.
author Ludovic Chabant <ludovic@chabant.com>
date Sun, 29 Oct 2017 22:51:57 -0700
parents 08e02c2a2a1a
children 8adc27285d93
comparison
equal deleted inserted replaced
978:7e51d14097cb 979:45ad976712ec
23 if not isinstance(source, FSContentSourceBase): 23 if not isinstance(source, FSContentSourceBase):
24 raise Exception( 24 raise Exception(
25 "The asset pipeline only support file-system sources.") 25 "The asset pipeline only support file-system sources.")
26 26
27 super().__init__(source, ppctx) 27 super().__init__(source, ppctx)
28 self.enabled_processors = None 28 self._ignore_patterns = []
29 self.ignore_patterns = []
30 self._processors = None 29 self._processors = None
31 self._base_dir = source.fs_endpoint_path 30 self._base_dir = source.fs_endpoint_path
32 31
33 def initialize(self): 32 def initialize(self):
34 # Get the list of processors for this run. 33 # Get the list of processors for this run.
35 processors = self.app.plugin_loader.getProcessors() 34 processors = self.app.plugin_loader.getProcessors()
36 if self.enabled_processors is not None: 35 enabled_processors = self.app.config.get('pipelines/asset/processors')
37 logger.debug("Filtering processors to: %s" % 36 if enabled_processors is not None:
38 self.enabled_processors) 37 logger.debug("Filtering processors to: %s" % enabled_processors)
39 processors = get_filtered_processors(processors, 38 processors = get_filtered_processors(processors,
40 self.enabled_processors) 39 enabled_processors)
41 40
42 # Invoke pre-processors. 41 # Invoke pre-processors.
43 proc_ctx = ProcessorContext(self) 42 proc_ctx = ProcessorContext(self)
44 for proc in processors: 43 for proc in processors:
45 proc.onPipelineStart(proc_ctx) 44 proc.onPipelineStart(proc_ctx)
53 # Ok, that's the list of processors for this run. 52 # Ok, that's the list of processors for this run.
54 self._processors = processors 53 self._processors = processors
55 54
56 # Pre-processors can define additional ignore patterns so let's 55 # Pre-processors can define additional ignore patterns so let's
57 # add them to what we had already. 56 # add them to what we had already.
58 self.ignore_patterns += make_re(proc_ctx.ignore_patterns) 57 ignores = self.app.config.get('pipelines/asset/ignore', [])
58 ignores += proc_ctx.ignore_patterns
59 self._ignore_patterns += make_re(ignores)
59 60
60 # Register timers. 61 # Register timers.
61 stats = self.app.env.stats 62 stats = self.app.env.stats
62 stats.registerTimer('BuildProcessingTree', raise_if_registered=False) 63 stats.registerTimer('BuildProcessingTree', raise_if_registered=False)
63 stats.registerTimer('RunProcessingTree', raise_if_registered=False) 64 stats.registerTimer('RunProcessingTree', raise_if_registered=False)
64 65
65 def run(self, job, ctx, result): 66 def run(self, job, ctx, result):
66 # See if we need to ignore this item. 67 # See if we need to ignore this item.
67 rel_path = os.path.relpath(job.content_item.spec, self._base_dir) 68 rel_path = os.path.relpath(job.content_item.spec, self._base_dir)
68 if re_matchany(rel_path, self.ignore_patterns): 69 if re_matchany(rel_path, self._ignore_patterns):
69 return 70 return
70 71
71 record_entry = result.record_entry 72 record_entry = result.record_entry
72 stats = self.app.env.stats 73 stats = self.app.env.stats
73 out_dir = self.ctx.out_dir 74 out_dir = self.ctx.out_dir