comparison piecrust/sources/taxonomy.py @ 882:acd9c3e8533f

bake: Correctly setup unbaked entries for taxonomy pages.
author Ludovic Chabant <ludovic@chabant.com>
date Sat, 17 Jun 2017 09:14:36 -0700
parents d6d35b2efd04
children 85d2b386b971
comparison
equal deleted inserted replaced
881:b4e10471e970 882:acd9c3e8533f
1 import re 1 import re
2 import copy
2 import logging 3 import logging
3 import unidecode 4 import unidecode
4 from piecrust.configuration import ConfigurationError 5 from piecrust.configuration import ConfigurationError
5 from piecrust.data.filters import ( 6 from piecrust.data.filters import (
6 PaginationFilter, SettingFilterClause) 7 PaginationFilter, SettingFilterClause)
307 # We create bake entries for all the terms that were *not* dirty. 308 # We create bake entries for all the terms that were *not* dirty.
308 # This is because otherwise, on the next incremental bake, we wouldn't 309 # This is because otherwise, on the next incremental bake, we wouldn't
309 # find any entry for those things, and figure that we need to delete 310 # find any entry for those things, and figure that we need to delete
310 # their outputs. 311 # their outputs.
311 analyzer = self._analyzer 312 analyzer = self._analyzer
313 record = ctx.record_history.current
312 for prev, cur in ctx.record_history.diffs: 314 for prev, cur in ctx.record_history.diffs:
313 # Only consider entries that don't have any current version 315 # Only consider entries that don't have any current version
314 # (i.e. they weren't baked just now). 316 # (i.e. they weren't baked just now).
315 if prev and not cur: 317 if prev and not cur:
316 t = prev.term 318 t = prev.term
317 if analyzer.isKnownSlugifiedTerm(t): 319 if analyzer.isKnownSlugifiedTerm(t):
318 logger.debug("Creating unbaked entry for '%s' term: %s" % 320 logger.debug("Creating unbaked entry for '%s' term: %s" %
319 (self.taxonomy.name, t)) 321 (self.taxonomy.name, t))
320 cur.term = t 322 cur = copy.deepcopy(prev)
321 cur.out_paths = list(prev.out_paths) 323 cur.flags = \
322 cur.errors = list(prev.errors) 324 PagePipelineRecordEntry.FLAG_COLLAPSED_FROM_LAST_RUN
325 record.addEntry(cur)
323 else: 326 else:
324 logger.debug("Term '%s' in '%s' isn't used anymore." % 327 logger.debug("Term '%s' in '%s' isn't used anymore." %
325 (t, self.taxonomy.name)) 328 (t, self.taxonomy.name))
326 329
327 330
379 entries = [cur_entry] 382 entries = [cur_entry]
380 if prev_entry: 383 if prev_entry:
381 entries.append(prev_entry) 384 entries.append(prev_entry)
382 385
383 for e in entries: 386 for e in entries:
384 entry_terms = e.config.get(taxonomy.setting_name) 387 if e.was_any_sub_baked:
385 if entry_terms: 388 entry_terms = e.config.get(taxonomy.setting_name)
386 if not taxonomy.is_multiple: 389 if entry_terms:
387 self._single_dirty_slugified_terms.add( 390 if not taxonomy.is_multiple:
388 slugifier.slugify(entry_terms)) 391 self._single_dirty_slugified_terms.add(
389 else: 392 slugifier.slugify(entry_terms))
390 self._single_dirty_slugified_terms.update( 393 else:
391 (slugifier.slugify(t) 394 self._single_dirty_slugified_terms.update(
392 for t in entry_terms)) 395 (slugifier.slugify(t)
396 for t in entry_terms))
393 397
394 self._all_dirty_slugified_terms = list( 398 self._all_dirty_slugified_terms = list(
395 self._single_dirty_slugified_terms) 399 self._single_dirty_slugified_terms)
396 logger.debug("Gathered %d dirty taxonomy terms", 400 logger.debug("Gathered %d dirty taxonomy terms",
397 len(self._all_dirty_slugified_terms)) 401 len(self._all_dirty_slugified_terms))