comparison piecrust/processing/base.py @ 117:6827dcc9d3fb

Changes to the asset processing pipeline: * Add semi-functional RequireJS processor. * Processors now match on the relative path. * Support for processors that add more processors of their own. * A couple of related fixes.
author Ludovic Chabant <ludovic@chabant.com>
date Tue, 28 Oct 2014 08:20:38 -0700
parents 45828c4167ad
children 133845647083
comparison
equal deleted inserted replaced
116:1c13f3389fcb 117:6827dcc9d3fb
34 pass 34 pass
35 35
36 def onPipelineEnd(self, pipeline): 36 def onPipelineEnd(self, pipeline):
37 pass 37 pass
38 38
39 def matches(self, filename): 39 def matches(self, path):
40 return False 40 return False
41 41
42 def getDependencies(self, path): 42 def getDependencies(self, path):
43 return None 43 return None
44 44
54 54
55 def __init__(self): 55 def __init__(self):
56 super(CopyFileProcessor, self).__init__() 56 super(CopyFileProcessor, self).__init__()
57 self.priority = PRIORITY_LAST 57 self.priority = PRIORITY_LAST
58 58
59 def matches(self, filename): 59 def matches(self, path):
60 return True 60 return True
61 61
62 def getOutputFilenames(self, filename): 62 def getOutputFilenames(self, filename):
63 return [filename] 63 return [filename]
64 64
72 class SimpleFileProcessor(Processor): 72 class SimpleFileProcessor(Processor):
73 def __init__(self, extensions=None): 73 def __init__(self, extensions=None):
74 super(SimpleFileProcessor, self).__init__() 74 super(SimpleFileProcessor, self).__init__()
75 self.extensions = extensions or {} 75 self.extensions = extensions or {}
76 76
77 def matches(self, filename): 77 def matches(self, path):
78 for ext in self.extensions: 78 for ext in self.extensions:
79 if filename.endswith('.' + ext): 79 if path.endswith('.' + ext):
80 return True 80 return True
81 return False 81 return False
82 82
83 def getOutputFilenames(self, filename): 83 def getOutputFilenames(self, filename):
84 basename, ext = os.path.splitext(filename) 84 basename, ext = os.path.splitext(filename)
167 self.processors = list(filter( 167 self.processors = list(filter(
168 lambda p: p.PROCESSOR_NAME in authorized_names, 168 lambda p: p.PROCESSOR_NAME in authorized_names,
169 self.processors)) 169 self.processors))
170 170
171 def run(self, src_dir_or_file=None): 171 def run(self, src_dir_or_file=None):
172 record = ProcessorPipelineRecord() 172 # Invoke pre-processors.
173 for proc in self.processors:
174 proc.onPipelineStart(self)
175
176 # Sort our processors again in case the pre-process step involved
177 # patching the processors with some new ones.
178 self.processors.sort(key=lambda p: p.priority)
173 179
174 # Create the workers. 180 # Create the workers.
175 pool = [] 181 pool = []
176 queue = Queue() 182 queue = Queue()
177 abort = threading.Event() 183 abort = threading.Event()
178 pipeline_lock = threading.Lock() 184 pipeline_lock = threading.Lock()
185 record = ProcessorPipelineRecord()
179 for i in range(self.num_workers): 186 for i in range(self.num_workers):
180 ctx = ProcessingWorkerContext(self, record, queue, abort, 187 ctx = ProcessingWorkerContext(self, record, queue, abort,
181 pipeline_lock) 188 pipeline_lock)
182 worker = ProcessingWorker(i, ctx) 189 worker = ProcessingWorker(i, ctx)
183 worker.start() 190 worker.start()
184 pool.append(worker) 191 pool.append(worker)
185
186 # Invoke pre-processors.
187 for proc in self.processors:
188 proc.onPipelineStart(self)
189 192
190 if src_dir_or_file is not None: 193 if src_dir_or_file is not None:
191 # Process only the given path. 194 # Process only the given path.
192 # Find out what mount point this is in. 195 # Find out what mount point this is in.
193 for path in self.mounts: 196 for path in self.mounts: