Mercurial > piecrust2
changeset 735:6c500fd3194f
Merge changes.
author | Ludovic Chabant <ludovic@chabant.com> |
---|---|
date | Wed, 01 Jun 2016 22:24:35 -0700 |
parents | 3f01f63b7247 (diff) e67da1f7293b (current diff) |
children | 13ec290bfc13 |
files | |
diffstat | 51 files changed, 1494 insertions(+), 965 deletions(-) [+] |
line wrap: on
line diff
--- a/piecrust/__init__.py Sat May 14 18:18:54 2016 -0700 +++ b/piecrust/__init__.py Wed Jun 01 22:24:35 2016 -0700 @@ -3,6 +3,7 @@ ASSETS_DIR = 'assets' TEMPLATES_DIR = 'templates' THEME_DIR = 'theme' +THEMES_DIR = 'themes' CONFIG_PATH = 'config.yml' THEME_CONFIG_PATH = 'theme_config.yml' @@ -13,11 +14,11 @@ DEFAULT_TEMPLATE_ENGINE = 'jinja2' DEFAULT_POSTS_FS = 'flat' DEFAULT_DATE_FORMAT = '%b %d, %Y' -DEFAULT_THEME_SOURCE = 'http://bitbucket.org/ludovicchabant/' +DEFAULT_THEME_SOURCE = 'https://bitbucket.org/ludovicchabant/' -PIECRUST_URL = 'http://bolt80.com/piecrust/' +PIECRUST_URL = 'https://bolt80.com/piecrust/' -CACHE_VERSION = 23 +CACHE_VERSION = 26 try: from piecrust.__version__ import APP_VERSION
--- a/piecrust/app.py Sat May 14 18:18:54 2016 -0700 +++ b/piecrust/app.py Wed Jun 01 22:24:35 2016 -0700 @@ -15,7 +15,6 @@ from piecrust.configuration import ConfigurationError, merge_dicts from piecrust.routing import Route from piecrust.sources.base import REALM_THEME -from piecrust.taxonomies import Taxonomy logger = logging.getLogger(__name__) @@ -103,11 +102,23 @@ @cached_property def theme_dir(self): + # No theme if the curent site is already a theme. if self.theme_site: return None + + # See if there's a theme we absolutely want. td = self._get_dir(THEME_DIR) if td is not None: return td + + # Try to load a theme specified in the configuration. + from piecrust.themes.base import ThemeLoader + loader = ThemeLoader(self.root_dir) + theme_dir = loader.getThemeDir() + if theme_dir is not None: + return theme_dir + + # Nothing... use the default theme. return os.path.join(RESOURCES_DIR, 'theme') @cached_property @@ -139,12 +150,20 @@ return routes @cached_property - def taxonomies(self): - taxonomies = [] - for tn, tc in self.config.get('site/taxonomies').items(): - tax = Taxonomy(self, tn, tc) - taxonomies.append(tax) - return taxonomies + def generators(self): + defs = {} + for cls in self.plugin_loader.getPageGenerators(): + defs[cls.GENERATOR_NAME] = cls + + gens = [] + for n, g in self.config.get('site/generators').items(): + cls = defs.get(g['type']) + if cls is None: + raise ConfigurationError("No such page generator type: %s" % + g['type']) + gen = cls(self, n, g) + gens.append(gen) + return gens def getSource(self, source_name): for source in self.sources: @@ -152,33 +171,30 @@ return source return None - def getRoutes(self, source_name, *, skip_taxonomies=False): + def getGenerator(self, generator_name): + for gen in self.generators: + if gen.name == generator_name: + return gen + return None + + def getSourceRoutes(self, source_name): for route in self.routes: if route.source_name == source_name: - if not skip_taxonomies or route.taxonomy_name is None: - yield route + yield route - def getRoute(self, source_name, route_metadata, *, skip_taxonomies=False): - for route in self.getRoutes(source_name, - skip_taxonomies=skip_taxonomies): + def getSourceRoute(self, source_name, route_metadata): + for route in self.getSourceRoutes(source_name): if (route_metadata is None or route.matchesMetadata(route_metadata)): return route return None - def getTaxonomyRoute(self, tax_name, source_name): + def getGeneratorRoute(self, generator_name): for route in self.routes: - if (route.taxonomy_name == tax_name and - route.source_name == source_name): + if route.generator_name == generator_name: return route return None - def getTaxonomy(self, tax_name): - for tax in self.taxonomies: - if tax.name == tax_name: - return tax - return None - def _get_dir(self, default_rel_dir): abs_dir = os.path.join(self.root_dir, default_rel_dir) if os.path.isdir(abs_dir):
--- a/piecrust/appconfig.py Sat May 14 18:18:54 2016 -0700 +++ b/piecrust/appconfig.py Wed Jun 01 22:24:35 2016 -0700 @@ -142,6 +142,7 @@ self._values = self._validateAll(values) except Exception as ex: + logger.exception(ex) raise Exception( "Error loading configuration from: %s" % ', '.join(paths)) from ex @@ -221,6 +222,7 @@ try: val2 = callback(val, values, cache_writer) except Exception as ex: + logger.exception(ex) raise Exception("Error raised in validator '%s'." % callback_name) from ex if val2 is None: @@ -288,7 +290,9 @@ } ], 'theme_tag_page': 'theme_pages:_tag.%ext%', - 'theme_category_page': 'theme_pages:_category.%ext%' + 'theme_category_page': 'theme_pages:_category.%ext%', + 'theme_month_page': 'theme_pages:_month.%ext%', + 'theme_year_page': 'theme_pages:_year.%ext%' }) }) @@ -332,9 +336,10 @@ 'posts_fs': DEFAULT_POSTS_FS, 'default_page_layout': 'default', 'default_post_layout': 'post', - 'post_url': '%year%/%month%/%day%/%slug%', - 'tag_url': 'tag/%tag%', - 'category_url': '%category%', + 'post_url': '/%int4:year%/%int2:month%/%int2:day%/%slug%', + 'year_url': '/archives/%int4:year%', + 'tag_url': '/tag/%path:tag%', + 'category_url': '/%category%', 'posts_per_page': 5 }) }) @@ -362,25 +367,25 @@ 'func': 'pcurl(slug)' } ], - 'taxonomies': collections.OrderedDict({ - 'tags': { + 'taxonomies': collections.OrderedDict([ + ('tags', { 'multiple': True, 'term': 'tag' - }, - 'categories': { + }), + ('categories', { 'term': 'category' - } - }) + }) + ]) }) }) def get_default_content_model_for_blog( blog_name, is_only_blog, values, user_overrides, theme_site=False): - # Get the global values for various things we're interested in. + # Get the global (default) values for various things we're interested in. defs = {} names = ['posts_fs', 'posts_per_page', 'date_format', - 'default_post_layout', 'post_url', 'tag_url', 'category_url'] + 'default_post_layout', 'post_url', 'year_url'] for n in names: defs[n] = try_get_dict_value( user_overrides, 'site/%s' % n, @@ -389,10 +394,11 @@ # More stuff we need. if is_only_blog: url_prefix = '' - tax_page_prefix = '' + page_prefix = '' fs_endpoint = 'posts' data_endpoint = 'blog' item_name = 'post' + tpl_func_prefix = 'pc' if theme_site: # If this is a theme site, show posts from a `sample` directory @@ -401,10 +407,15 @@ fs_endpoint = 'sample/posts' else: url_prefix = blog_name + '/' - tax_page_prefix = blog_name + '/' + page_prefix = blog_name + '/' + data_endpoint = blog_name fs_endpoint = 'posts/%s' % blog_name - data_endpoint = blog_name - item_name = '%s-post' % blog_name + item_name = try_get_dict_value(user_overrides, + '%s/item_name' % blog_name, + '%spost' % blog_name) + tpl_func_prefix = try_get_dict_value(user_overrides, + '%s/func_prefix' % blog_name, + 'pc%s' % blog_name) # Figure out the settings values for this blog, specifically. # The value could be set on the blog config itself, globally, or left at @@ -414,28 +425,21 @@ blog_values = {} for n in names: blog_values[n] = blog_cfg.get(n, defs[n]) - if n in ['post_url', 'tag_url', 'category_url']: - blog_values[n] = url_prefix + blog_values[n] posts_fs = blog_values['posts_fs'] posts_per_page = blog_values['posts_per_page'] date_format = blog_values['date_format'] default_layout = blog_values['default_post_layout'] - post_url = '/' + blog_values['post_url'].lstrip('/') - tag_url = '/' + blog_values['tag_url'].lstrip('/') - category_url = '/' + blog_values['category_url'].lstrip('/') + post_url = '/' + url_prefix + blog_values['post_url'].lstrip('/') + year_url = '/' + url_prefix + blog_values['year_url'].lstrip('/') - tags_taxonomy = 'pages:%s_tag.%%ext%%' % tax_page_prefix - category_taxonomy = 'pages:%s_category.%%ext%%' % tax_page_prefix + year_archive = 'pages:%s_year.%%ext%%' % page_prefix if not theme_site: - theme_tag_page = values['site'].get('theme_tag_page') - if theme_tag_page: - tags_taxonomy += ';' + theme_tag_page - theme_category_page = values['site'].get('theme_category_page') - if theme_category_page: - category_taxonomy += ';' + theme_category_page + theme_year_page = values['site'].get('theme_year_page') + if theme_year_page: + year_archive += ';' + theme_year_page - return collections.OrderedDict({ + cfg = collections.OrderedDict({ 'site': collections.OrderedDict({ 'sources': collections.OrderedDict({ blog_name: collections.OrderedDict({ @@ -447,35 +451,79 @@ 'data_type': 'blog', 'items_per_page': posts_per_page, 'date_format': date_format, - 'default_layout': default_layout, - 'taxonomy_pages': collections.OrderedDict({ - 'tags': tags_taxonomy, - 'categories': category_taxonomy - }) + 'default_layout': default_layout + }) + }), + 'generators': collections.OrderedDict({ + ('%s_archives' % blog_name): collections.OrderedDict({ + 'type': 'blog_archives', + 'source': blog_name, + 'page': year_archive }) }), 'routes': [ { 'url': post_url, 'source': blog_name, - 'func': 'pcposturl(year,month,day,slug)' + 'func': ( + '%sposturl(int:year,int:month,int:day,slug)' % + tpl_func_prefix) }, { - 'url': tag_url, - 'source': blog_name, - 'taxonomy': 'tags', - 'func': 'pctagurl(tag)' - }, - { - 'url': category_url, - 'source': blog_name, - 'taxonomy': 'categories', - 'func': 'pccaturl(category)' + 'url': year_url, + 'generator': ('%s_archives' % blog_name), + 'func': ('%syearurl(year)' % tpl_func_prefix) } ] }) }) + # Add a generator and a route for each taxonomy. + taxonomies_cfg = values.get('site', {}).get('taxonomies', {}).copy() + taxonomies_cfg.update( + user_overrides.get('site', {}).get('taxonomies', {})) + for tax_name, tax_cfg in taxonomies_cfg.items(): + term = tax_cfg.get('term', tax_name) + + # Generator. + page_ref = 'pages:%s_%s.%%ext%%' % (page_prefix, term) + if not theme_site: + theme_page_ref = values['site'].get('theme_%s_page' % term) + if theme_page_ref: + page_ref += ';' + theme_page_ref + tax_gen_name = '%s_%s' % (blog_name, tax_name) + tax_gen = collections.OrderedDict({ + 'type': 'taxonomy', + 'source': blog_name, + 'taxonomy': tax_name, + 'page': page_ref + }) + cfg['site']['generators'][tax_gen_name] = tax_gen + + # Route. + tax_url_cfg_name = '%s_url' % term + tax_url = blog_cfg.get(tax_url_cfg_name, + try_get_dict_value( + user_overrides, + 'site/%s' % tax_url_cfg_name, + values['site'].get( + tax_url_cfg_name, + '%s/%%%s%%' % (term, term)))) + tax_url = '/' + url_prefix + tax_url.lstrip('/') + term_arg = term + if tax_cfg.get('multiple') is True: + term_arg = '+' + term + tax_func = '%s%surl(%s)' % (tpl_func_prefix, term, term_arg) + tax_route = collections.OrderedDict({ + 'url': tax_url, + 'generator': tax_gen_name, + 'taxonomy': tax_name, + 'func': tax_func + }) + cfg['site']['routes'].append(tax_route) + + return cfg + # Configuration value validators. # @@ -490,8 +538,12 @@ taxonomies = v.get('taxonomies') if taxonomies is None: v['taxonomies'] = {} + generators = v.get('generators') + if generators is None: + v['generators'] = {} return v + # Make sure the site root starts and ends with a slash. def _validate_site_root(v, values, cache): if not v.startswith('/'): @@ -583,6 +635,14 @@ "Source '%s' is using a reserved endpoint name: %s" % (sn, endpoint)) + # Validate generators. + for gn, gc in sc.get('generators', {}).items(): + if not isinstance(gc, dict): + raise ConfigurationError( + "Generators for source '%s' should be defined in a " + "dictionary." % sn) + gc['source'] = sn + return v @@ -605,23 +665,46 @@ "have an 'url'.") if rc_url[0] != '/': raise ConfigurationError("Route URLs must start with '/'.") - if rc.get('source') is None: - raise ConfigurationError("Routes must specify a source.") - if rc['source'] not in list(values['site']['sources'].keys()): + + r_source = rc.get('source') + r_generator = rc.get('generator') + if r_source is None and r_generator is None: + raise ConfigurationError("Routes must specify a source or " + "generator.") + if (r_source and + r_source not in list(values['site']['sources'].keys())): raise ConfigurationError("Route is referencing unknown " - "source: %s" % rc['source']) - rc.setdefault('taxonomy', None) + "source: %s" % r_source) + if (r_generator and + r_generator not in list(values['site']['generators'].keys())): + raise ConfigurationError("Route is referencing unknown " + "generator: %s" % r_generator) + + rc.setdefault('generator', None) rc.setdefault('page_suffix', '/%num%') return v def _validate_site_taxonomies(v, values, cache): + if not isinstance(v, dict): + raise ConfigurationError( + "The 'site/taxonomies' setting must be a mapping.") for tn, tc in v.items(): tc.setdefault('multiple', False) tc.setdefault('term', tn) tc.setdefault('page', '_%s.%%ext%%' % tc['term']) + return v + +def _validate_site_generators(v, values, cache): + if not isinstance(v, dict): + raise ConfigurationError( + "The 'site/generators' setting must be a mapping.") + for gn, gc in v.items(): + if 'type' not in gc: + raise ConfigurationError( + "Generator '%s' doesn't specify a type." % gn) return v
--- a/piecrust/baking/baker.py Sat May 14 18:18:54 2016 -0700 +++ b/piecrust/baking/baker.py Wed Jun 01 22:24:35 2016 -0700 @@ -3,13 +3,14 @@ import hashlib import logging from piecrust.baking.records import ( - BakeRecordEntry, TransitionalBakeRecord, TaxonomyInfo) + BakeRecordEntry, TransitionalBakeRecord) from piecrust.baking.worker import ( save_factory, JOB_LOAD, JOB_RENDER_FIRST, JOB_BAKE) from piecrust.chefutil import ( format_timed_scope, format_timed) from piecrust.environment import ExecutionStats +from piecrust.generation.base import PageGeneratorBakeContext from piecrust.routing import create_route_metadata from piecrust.sources.base import ( REALM_NAMES, REALM_USER, REALM_THEME) @@ -29,16 +30,13 @@ self.applied_config_variant = applied_config_variant self.applied_config_values = applied_config_values - # Remember what taxonomy pages we should skip - # (we'll bake them repeatedly later with each taxonomy term) - self.taxonomy_pages = [] - logger.debug("Gathering taxonomy page paths:") - for tax in self.app.taxonomies: - for src in self.app.sources: - tax_page_ref = tax.getPageRef(src) - for path in tax_page_ref.possible_paths: - self.taxonomy_pages.append(path) - logger.debug(" - %s" % path) + # Remember what generator pages we should skip. + self.generator_pages = [] + logger.debug("Gathering generator page paths:") + for gen in self.app.generators: + for path in gen.page_ref.possible_paths: + self.generator_pages.append(path) + logger.debug(" - %s" % path) # Register some timers. self.app.env.registerTimer('LoadJob', raise_if_registered=False) @@ -101,8 +99,8 @@ if srclist is not None: self._bakeRealm(record, pool, realm, srclist) - # Bake taxonomies. - self._bakeTaxonomies(record, pool) + # Call all the page generators. + self._bakePageGenerators(record, pool) # All done with the workers. Close the pool and get reports. reports = pool.close() @@ -172,7 +170,7 @@ if reason is not None: # We have to bake everything from scratch. - self.app.cache.clearCaches(except_names=['app']) + self.app.cache.clearCaches(except_names=['app', 'baker']) self.force = True record.incremental_count = 0 record.clearPrevious() @@ -197,7 +195,7 @@ for source in srclist: factories = source.getPageFactories() all_factories += [f for f in factories - if f.path not in self.taxonomy_pages] + if f.path not in self.generator_pages] self._loadRealmPages(record, pool, all_factories) self._renderRealmPages(record, pool, all_factories) @@ -219,6 +217,7 @@ # than the last bake. record_entry = BakeRecordEntry(res['source_name'], res['path']) record_entry.config = res['config'] + record_entry.timestamp = res['timestamp'] if res['errors']: record_entry.errors += res['errors'] record.current.success = False @@ -272,8 +271,7 @@ logger.error(record_entry.errors[-1]) continue - route = self.app.getRoute(fac.source.name, fac.metadata, - skip_taxonomies=True) + route = self.app.getSourceRoute(fac.source.name, fac.metadata) if route is None: record_entry.errors.append( "Can't get route for page: %s" % fac.ref_spec) @@ -281,9 +279,14 @@ continue # All good, queue the job. + route_index = self.app.routes.index(route) job = { 'type': JOB_RENDER_FIRST, - 'job': save_factory(fac)} + 'job': { + 'factory_info': save_factory(fac), + 'route_index': route_index + } + } jobs.append(job) ar = pool.queueJobs(jobs, handler=_handler) @@ -291,7 +294,7 @@ def _bakeRealmPages(self, record, pool, realm, factories): def _handler(res): - entry = record.getCurrentEntry(res['path'], res['taxonomy_info']) + entry = record.getCurrentEntry(res['path']) entry.subs = res['sub_entries'] if res['errors']: entry.errors += res['errors'] @@ -317,158 +320,14 @@ ar = pool.queueJobs(jobs, handler=_handler) ar.wait() - def _bakeTaxonomies(self, record, pool): - logger.debug("Baking taxonomy pages...") - with format_timed_scope(logger, 'built taxonomy buckets', - level=logging.DEBUG, colored=False): - buckets = self._buildTaxonomyBuckets(record) - - start_time = time.perf_counter() - page_count = self._bakeTaxonomyBuckets(record, pool, buckets) - logger.info(format_timed(start_time, - "baked %d taxonomy pages." % page_count)) - - def _buildTaxonomyBuckets(self, record): - # Let's see all the taxonomy terms for which we must bake a - # listing page... first, pre-populate our big map of used terms. - # For each source name, we have a list of taxonomies, and for each - # taxonomies, a list of terms, some being 'dirty', some used last - # time, etc. - buckets = {} - tax_names = [t.name for t in self.app.taxonomies] - source_names = [s.name for s in self.app.sources] - for sn in source_names: - source_taxonomies = {} - buckets[sn] = source_taxonomies - for tn in tax_names: - source_taxonomies[tn] = _TaxonomyTermsInfo() - - # Now see which ones are 'dirty' based on our bake record. - logger.debug("Gathering dirty taxonomy terms") - for prev_entry, cur_entry in record.transitions.values(): - # Re-bake all taxonomy pages that include new or changed - # pages. - if cur_entry and cur_entry.was_any_sub_baked: - entries = [cur_entry] - if prev_entry: - entries.append(prev_entry) - - for tax in self.app.taxonomies: - changed_terms = set() - for e in entries: - terms = e.config.get(tax.setting_name) - if terms: - if not tax.is_multiple: - terms = [terms] - changed_terms |= set(terms) - - if len(changed_terms) > 0: - tt_info = buckets[cur_entry.source_name][tax.name] - tt_info.dirty_terms |= changed_terms - - # Remember all terms used. - for tax in self.app.taxonomies: - if cur_entry and not cur_entry.was_overriden: - cur_terms = cur_entry.config.get(tax.setting_name) - if cur_terms: - if not tax.is_multiple: - cur_terms = [cur_terms] - tt_info = buckets[cur_entry.source_name][tax.name] - tt_info.all_terms |= set(cur_terms) - - # Re-bake the combination pages for terms that are 'dirty'. - known_combinations = set() - logger.debug("Gathering dirty term combinations") - for prev_entry, cur_entry in record.transitions.values(): - if not cur_entry: - continue - used_taxonomy_terms = cur_entry.getAllUsedTaxonomyTerms() - for sn, tn, terms in used_taxonomy_terms: - if isinstance(terms, tuple): - known_combinations.add((sn, tn, terms)) - for sn, tn, terms in known_combinations: - tt_info = buckets[sn][tn] - tt_info.all_terms.add(terms) - if not tt_info.dirty_terms.isdisjoint(set(terms)): - tt_info.dirty_terms.add(terms) + def _bakePageGenerators(self, record, pool): + for gen in self.app.generators: + ctx = PageGeneratorBakeContext(self.app, record, pool, gen) + gen.bake(ctx) - return buckets - - def _bakeTaxonomyBuckets(self, record, pool, buckets): - def _handler(res): - entry = record.getCurrentEntry(res['path'], res['taxonomy_info']) - entry.subs = res['sub_entries'] - if res['errors']: - entry.errors += res['errors'] - if entry.has_any_error: - record.current.success = False - - # Start baking those terms. - jobs = [] - for source_name, source_taxonomies in buckets.items(): - for tax_name, tt_info in source_taxonomies.items(): - terms = tt_info.dirty_terms - if len(terms) == 0: - continue - - logger.debug( - "Baking '%s' for source '%s': %s" % - (tax_name, source_name, terms)) - tax = self.app.getTaxonomy(tax_name) - source = self.app.getSource(source_name) - tax_page_ref = tax.getPageRef(source) - if not tax_page_ref.exists: - logger.debug( - "No taxonomy page found at '%s', skipping." % - tax.page_ref) - continue - - logger.debug( - "Using taxonomy page: %s:%s" % - (tax_page_ref.source_name, tax_page_ref.rel_path)) - fac = tax_page_ref.getFactory() - - for term in terms: - logger.debug( - "Queuing: %s [%s=%s]" % - (fac.ref_spec, tax_name, term)) - tax_info = TaxonomyInfo(tax_name, source_name, term) - - cur_entry = BakeRecordEntry( - fac.source.name, fac.path, tax_info) - record.addEntry(cur_entry) - - job = self._makeBakeJob(record, fac, tax_info) - if job is not None: - jobs.append(job) - - ar = pool.queueJobs(jobs, handler=_handler) - ar.wait() - - # Now we create bake entries for all the terms that were *not* dirty. - # This is because otherwise, on the next incremental bake, we wouldn't - # find any entry for those things, and figure that we need to delete - # their outputs. - for prev_entry, cur_entry in record.transitions.values(): - # Only consider taxonomy-related entries that don't have any - # current version. - if (prev_entry and prev_entry.taxonomy_info and - not cur_entry): - ti = prev_entry.taxonomy_info - tt_info = buckets[ti.source_name][ti.taxonomy_name] - if ti.term in tt_info.all_terms: - logger.debug("Creating unbaked entry for taxonomy " - "term '%s:%s'." % (ti.taxonomy_name, ti.term)) - record.collapseEntry(prev_entry) - else: - logger.debug("Taxonomy term '%s:%s' isn't used anymore." % - (ti.taxonomy_name, ti.term)) - - return len(jobs) - - def _makeBakeJob(self, record, fac, tax_info=None): + def _makeBakeJob(self, record, fac): # Get the previous (if any) and current entry for this page. - pair = record.getPreviousAndCurrentEntries(fac.path, tax_info) + pair = record.getPreviousAndCurrentEntries(fac.path) assert pair is not None prev_entry, cur_entry = pair assert cur_entry is not None @@ -482,16 +341,7 @@ # Build the route metadata and find the appropriate route. page = fac.buildPage() route_metadata = create_route_metadata(page) - if tax_info is not None: - tax = self.app.getTaxonomy(tax_info.taxonomy_name) - route = self.app.getTaxonomyRoute(tax_info.taxonomy_name, - tax_info.source_name) - - slugified_term = route.slugifyTaxonomyTerm(tax_info.term) - route_metadata[tax.term_name] = slugified_term - else: - route = self.app.getRoute(fac.source.name, route_metadata, - skip_taxonomies=True) + route = self.app.getSourceRoute(fac.source.name, route_metadata) assert route is not None # Figure out if this page is overriden by another previously @@ -511,11 +361,14 @@ cur_entry.flags |= BakeRecordEntry.FLAG_OVERRIDEN return None + route_index = self.app.routes.index(route) job = { 'type': JOB_BAKE, 'job': { 'factory_info': save_factory(fac), - 'taxonomy_info': tax_info, + 'generator_name': None, + 'generator_record_key': None, + 'route_index': route_index, 'route_metadata': route_metadata, 'dirty_source_names': record.dirty_source_names } @@ -569,15 +422,3 @@ initargs=(ctx,)) return pool - -class _TaxonomyTermsInfo(object): - def __init__(self): - self.dirty_terms = set() - self.all_terms = set() - - def __str__(self): - return 'dirty:%s, all:%s' % (self.dirty_terms, self.all_terms) - - def __repr__(self): - return 'dirty:%s, all:%s' % (self.dirty_terms, self.all_terms) -
--- a/piecrust/baking/records.py Sat May 14 18:18:54 2016 -0700 +++ b/piecrust/baking/records.py Wed Jun 01 22:24:35 2016 -0700 @@ -8,20 +8,15 @@ logger = logging.getLogger(__name__) -def _get_transition_key(path, taxonomy_info=None): +def _get_transition_key(path, extra_key=None): key = path - if taxonomy_info: - key += '+%s:%s=' % (taxonomy_info.source_name, - taxonomy_info.taxonomy_name) - if isinstance(taxonomy_info.term, tuple): - key += '/'.join(taxonomy_info.term) - else: - key += taxonomy_info.term + if extra_key: + key += '+%s' % extra_key return hashlib.md5(key.encode('utf8')).hexdigest() class BakeRecord(Record): - RECORD_VERSION = 18 + RECORD_VERSION = 19 def __init__(self): super(BakeRecord, self).__init__() @@ -69,30 +64,21 @@ return copy.deepcopy(self.render_info) -class TaxonomyInfo(object): - def __init__(self, taxonomy_name, source_name, term): - self.taxonomy_name = taxonomy_name - self.source_name = source_name - self.term = term - - class BakeRecordEntry(object): """ An entry in the bake record. - - The `taxonomy_info` attribute should be a tuple of the form: - (taxonomy name, term, source name) """ FLAG_NONE = 0 FLAG_NEW = 2**0 FLAG_SOURCE_MODIFIED = 2**1 FLAG_OVERRIDEN = 2**2 - def __init__(self, source_name, path, taxonomy_info=None): + def __init__(self, source_name, path, extra_key=None): self.source_name = source_name self.path = path - self.taxonomy_info = taxonomy_info + self.extra_key = extra_key self.flags = self.FLAG_NONE self.config = None + self.timestamp = None self.errors = [] self.subs = [] @@ -145,14 +131,6 @@ res |= pinfo.used_source_names return res - def getAllUsedTaxonomyTerms(self): - res = set() - for o in self.subs: - for pinfo in o.render_info: - if pinfo: - res |= pinfo.used_taxonomy_terms - return res - class TransitionalBakeRecord(TransitionalRecord): def __init__(self, previous_path=None): @@ -168,10 +146,10 @@ super(TransitionalBakeRecord, self).addEntry(entry) def getTransitionKey(self, entry): - return _get_transition_key(entry.path, entry.taxonomy_info) + return _get_transition_key(entry.path, entry.extra_key) - def getPreviousAndCurrentEntries(self, path, taxonomy_info=None): - key = _get_transition_key(path, taxonomy_info) + def getPreviousAndCurrentEntries(self, path, extra_key=None): + key = _get_transition_key(path, extra_key) pair = self.transitions.get(key) return pair @@ -184,14 +162,14 @@ return cur return None - def getPreviousEntry(self, path, taxonomy_info=None): - pair = self.getPreviousAndCurrentEntries(path, taxonomy_info) + def getPreviousEntry(self, path, extra_key=None): + pair = self.getPreviousAndCurrentEntries(path, extra_key) if pair is not None: return pair[0] return None - def getCurrentEntry(self, path, taxonomy_info=None): - pair = self.getPreviousAndCurrentEntries(path, taxonomy_info) + def getCurrentEntry(self, path, extra_key=None): + pair = self.getPreviousAndCurrentEntries(path, extra_key) if pair is not None: return pair[1] return None
--- a/piecrust/baking/scheduler.py Sat May 14 18:18:54 2016 -0700 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,102 +0,0 @@ -import logging -import threading - - -logger = logging.getLogger(__name__) - - -class BakeScheduler(object): - _EMPTY = object() - _WAIT = object() - - def __init__(self, record, jobs=None): - self.record = record - self.jobs = list(jobs) if jobs is not None else [] - self._active_jobs = [] - self._lock = threading.Lock() - self._added_event = threading.Event() - self._done_event = threading.Event() - - def addJob(self, job): - logger.debug("Queuing job '%s:%s'." % ( - job.factory.source.name, job.factory.rel_path)) - with self._lock: - self.jobs.append(job) - self._added_event.set() - - def onJobFinished(self, job): - logger.debug("Removing job '%s:%s'." % ( - job.factory.source.name, job.factory.rel_path)) - with self._lock: - self._active_jobs.remove(job) - self._done_event.set() - - def getNextJob(self, wait_timeout=None, empty_timeout=None): - self._added_event.clear() - self._done_event.clear() - job = self._doGetNextJob() - while job in (self._EMPTY, self._WAIT): - if job == self._EMPTY: - if empty_timeout is None: - return None - logger.debug("Waiting for a new job to be added...") - res = self._added_event.wait(empty_timeout) - elif job == self._WAIT: - if wait_timeout is None: - return None - logger.debug("Waiting for a job to be finished...") - res = self._done_event.wait(wait_timeout) - if not res: - logger.debug("Timed-out. No job found.") - return None - job = self._doGetNextJob() - return job - - def _doGetNextJob(self): - with self._lock: - if len(self.jobs) == 0: - return self._EMPTY - - job = self.jobs.pop(0) - first_job = job - while True: - ready, wait_on_src = self._isJobReady(job) - if ready: - break - - logger.debug("Job '%s:%s' isn't ready yet: waiting on pages " - "from source '%s' to finish baking." % - (job.factory.source.name, - job.factory.rel_path, wait_on_src)) - self.jobs.append(job) - job = self.jobs.pop(0) - if job == first_job: - # None of the jobs are ready... we need to wait. - self.jobs.append(job) - return self._WAIT - - logger.debug( - "Job '%s:%s' is ready to go, moving to active queue." % - (job.factory.source.name, job.factory.rel_path)) - self._active_jobs.append(job) - return job - - def _isJobReady(self, job): - e = self.record.getPreviousEntry( - job.factory.source.name, - job.factory.rel_path, - taxonomy_info=job.record_entry.taxonomy_info) - if not e: - return (True, None) - used_source_names = e.getAllUsedSourceNames() - for sn in used_source_names: - if sn == job.factory.source.name: - continue - if any(filter(lambda j: j.factory.source.name == sn, - self.jobs)): - return (False, sn) - if any(filter(lambda j: j.factory.source.name == sn, - self._active_jobs)): - return (False, sn) - return (True, None) -
--- a/piecrust/baking/single.py Sat May 14 18:18:54 2016 -0700 +++ b/piecrust/baking/single.py Wed Jun 01 22:24:35 2016 -0700 @@ -72,7 +72,7 @@ return os.path.normpath(os.path.join(*bake_path)) def bake(self, qualified_page, prev_entry, dirty_source_names, - tax_info=None): + generator_name=None): # Start baking the sub-pages. cur_sub = 1 has_more_subs = True @@ -140,8 +140,9 @@ logger.debug(" p%d -> %s" % (cur_sub, out_path)) rp = self._bakeSingle(qualified_page, cur_sub, out_path, - tax_info) + generator_name) except Exception as ex: + logger.exception(ex) page_rel_path = os.path.relpath(qualified_page.path, self.app.root_dir) raise BakingError("%s: error baking '%s'." % @@ -183,10 +184,11 @@ return sub_entries - def _bakeSingle(self, qualified_page, num, out_path, tax_info=None): - ctx = PageRenderingContext(qualified_page, page_num=num) - if tax_info: - ctx.setTaxonomyFilter(tax_info.term) + def _bakeSingle(self, qp, num, out_path, + generator_name=None): + ctx = PageRenderingContext(qp, page_num=num) + if qp.route.is_generator_route: + qp.route.generator.prepareRenderContext(ctx) with self.app.env.timerScope("PageRender"): rp = render_page(ctx)
--- a/piecrust/baking/worker.py Sat May 14 18:18:54 2016 -0700 +++ b/piecrust/baking/worker.py Wed Jun 01 22:24:35 2016 -0700 @@ -53,7 +53,7 @@ self.ctx.previous_record_path) self.ctx.previous_record_index = {} for e in self.ctx.previous_record.entries: - key = _get_transition_key(e.path, e.taxonomy_info) + key = _get_transition_key(e.path, e.extra_key) self.ctx.previous_record_index[key] = e # Create the job handlers. @@ -134,11 +134,13 @@ 'source_name': fac.source.name, 'path': fac.path, 'config': None, + 'timestamp': None, 'errors': None} try: page = fac.buildPage() page._load() result['config'] = page.config.getAll() + result['timestamp'] = page.datetime.timestamp() except Exception as ex: logger.debug("Got loading error. Sending it to master.") result['errors'] = _get_errors(ex) @@ -150,13 +152,11 @@ class RenderFirstSubJobHandler(JobHandler): def handleJob(self, job): # Render the segments for the first sub-page of this page. - fac = load_factory(self.app, job) + fac = load_factory(self.app, job['factory_info']) self.app.env.addManifestEntry('RenderJobs', fac.ref_spec) - # These things should be OK as they're checked upstream by the baker. - route = self.app.getRoute(fac.source.name, fac.metadata, - skip_taxonomies=True) - assert route is not None + route_index = job['route_index'] + route = self.app.routes[route_index] page = fac.buildPage() route_metadata = create_route_metadata(page) @@ -198,35 +198,37 @@ fac = load_factory(self.app, job['factory_info']) self.app.env.addManifestEntry('BakeJobs', fac.ref_spec) + route_index = job['route_index'] route_metadata = job['route_metadata'] - tax_info = job['taxonomy_info'] - if tax_info is not None: - route = self.app.getTaxonomyRoute(tax_info.taxonomy_name, - tax_info.source_name) - else: - route = self.app.getRoute(fac.source.name, route_metadata, - skip_taxonomies=True) - assert route is not None + route = self.app.routes[route_index] + + gen_name = job['generator_name'] + gen_key = job['generator_record_key'] + dirty_source_names = job['dirty_source_names'] page = fac.buildPage() qp = QualifiedPage(page, route, route_metadata) result = { 'path': fac.path, - 'taxonomy_info': tax_info, + 'generator_name': gen_name, + 'generator_record_key': gen_key, 'sub_entries': None, 'errors': None} - dirty_source_names = job['dirty_source_names'] + + if job.get('needs_config', False): + result['config'] = page.config.getAll() previous_entry = None if self.ctx.previous_record_index is not None: - key = _get_transition_key(fac.path, tax_info) + key = _get_transition_key(fac.path, gen_key) previous_entry = self.ctx.previous_record_index.get(key) logger.debug("Baking page: %s" % fac.ref_spec) + logger.debug("With route metadata: %s" % route_metadata) try: sub_entries = self.page_baker.bake( - qp, previous_entry, dirty_source_names, tax_info) + qp, previous_entry, dirty_source_names, gen_name) result['sub_entries'] = sub_entries except BakingError as ex:
--- a/piecrust/cache.py Sat May 14 18:18:54 2016 -0700 +++ b/piecrust/cache.py Wed Jun 01 22:24:35 2016 -0700 @@ -201,9 +201,7 @@ logger.debug("'%s' found in file-system cache." % key) item_raw = self.fs_cache.read(fs_key) - item = json.loads( - item_raw, - object_pairs_hook=collections.OrderedDict) + item = json.loads(item_raw) self.cache.put(key, item) self._hits += 1 return item
--- a/piecrust/commands/builtin/baking.py Sat May 14 18:18:54 2016 -0700 +++ b/piecrust/commands/builtin/baking.py Wed Jun 01 22:24:35 2016 -0700 @@ -274,12 +274,8 @@ rel_path = os.path.relpath(entry.path, ctx.app.root_dir) logging.info(" path: %s" % rel_path) logging.info(" source: %s" % entry.source_name) - if entry.taxonomy_info: - ti = entry.taxonomy_info - logging.info(" taxonomy: %s = %s (in %s)" % - (ti.taxonomy_name, ti.term, ti.source_name)) - else: - logging.info(" taxonomy: <none>") + if entry.extra_key: + logging.info(" extra key: %s" % entry.extra_key) logging.info(" flags: %s" % _join(flags)) logging.info(" config: %s" % entry.config) @@ -326,11 +322,9 @@ logging.info(" used pagination: %s", pgn_info) logging.info(" used assets: %s", 'yes' if ri.used_assets else 'no') - logging.info(" used terms: %s" % - _join( - ['%s=%s (%s)' % (tn, t, sn) - for sn, tn, t in - ri.used_taxonomy_terms])) + logging.info(" other info:") + for k, v in ri._custom_info.items(): + logging.info(" - %s: %s" % (k, v)) if sub.errors: logging.error(" errors: %s" % sub.errors)
--- a/piecrust/commands/builtin/info.py Sat May 14 18:18:54 2016 -0700 +++ b/piecrust/commands/builtin/info.py Wed Jun 01 22:24:35 2016 -0700 @@ -81,9 +81,12 @@ def run(self, ctx): for route in ctx.app.routes: logger.info("%s:" % route.uri_pattern) - logger.info(" source: %s" % route.source_name) - logger.info(" taxonomy: %s" % (route.taxonomy_name or '')) + logger.info(" source: %s" % (route.source_name or '')) + logger.info(" generator: %s" % (route.generator_name or '')) logger.info(" regex: %s" % route.uri_re.pattern) + logger.info(" function: %s(%s)" % ( + route.template_func_name, + ', '.join(route.template_func_args))) class ShowPathsCommand(ChefCommand):
--- a/piecrust/commands/builtin/util.py Sat May 14 18:18:54 2016 -0700 +++ b/piecrust/commands/builtin/util.py Wed Jun 01 22:24:35 2016 -0700 @@ -4,6 +4,7 @@ import codecs import logging import yaml +from piecrust import CACHE_DIR from piecrust.app import CONFIG_PATH, THEME_CONFIG_PATH from piecrust.commands.base import ChefCommand @@ -63,7 +64,7 @@ pass def run(self, ctx): - cache_dir = ctx.app.cache_dir + cache_dir = os.path.join(ctx.app.root_dir, CACHE_DIR) if cache_dir and os.path.isdir(cache_dir): logger.info("Purging cache: %s" % cache_dir) shutil.rmtree(cache_dir)
--- a/piecrust/data/base.py Sat May 14 18:18:54 2016 -0700 +++ b/piecrust/data/base.py Wed Jun 01 22:24:35 2016 -0700 @@ -19,10 +19,18 @@ values = [] for d in self._dicts: try: + val = getattr(d, name) + values.append(val) + continue + except AttributeError: + pass + + try: val = d[name] + values.append(val) + continue except KeyError: - continue - values.append(val) + pass if len(values) == 0: raise KeyError("No such item: %s" % self._subp(name))
--- a/piecrust/data/builder.py Sat May 14 18:18:54 2016 -0700 +++ b/piecrust/data/builder.py Wed Jun 01 22:24:35 2016 -0700 @@ -7,7 +7,6 @@ from piecrust.data.paginator import Paginator from piecrust.data.piecrustdata import PieCrustData from piecrust.data.providersdata import DataProvidersData -from piecrust.uriutil import split_sub_uri logger = logging.getLogger(__name__) @@ -32,8 +31,8 @@ def build_page_data(ctx): app = ctx.app page = ctx.page - first_uri, _ = split_sub_uri(app, ctx.uri) pgn_source = ctx.pagination_source or get_default_pagination_source(page) + first_uri = ctx.page.getUri(1) pc_data = PieCrustData() config_data = PageData(page, ctx)
--- a/piecrust/data/iterators.py Sat May 14 18:18:54 2016 -0700 +++ b/piecrust/data/iterators.py Wed Jun 01 22:24:35 2016 -0700 @@ -109,13 +109,27 @@ yield page +class GenericSortIterator(object): + def __init__(self, it, sorter): + self.it = it + self.sorter = sorter + self._sorted_it = None + + def __iter__(self): + if self._sorted_it is None: + self._sorted_it = self.sorter(self.it) + return iter(self._sorted_it) + + class PageIterator(object): debug_render = [] debug_render_doc_dynamic = ['_debugRenderDoc'] debug_render_not_empty = True - def __init__(self, source, current_page=None, pagination_filter=None, - offset=0, limit=-1, locked=False): + def __init__(self, source, *, + current_page=None, + pagination_filter=None, sorter=None, + offset=0, limit=-1, locked=False): self._source = source self._current_page = current_page self._locked = False @@ -151,6 +165,10 @@ self._simpleNonSortedWrap(PaginationFilterIterator, pagination_filter) + if sorter is not None: + self._simpleNonSortedWrap(GenericSortIterator, sorter) + self._has_sorter = True + if offset > 0 or limit > 0: self.slice(offset, limit)
--- a/piecrust/data/pagedata.py Sat May 14 18:18:54 2016 -0700 +++ b/piecrust/data/pagedata.py Wed Jun 01 22:24:35 2016 -0700 @@ -1,7 +1,11 @@ import time +import logging import collections.abc +logger = logging.getLogger(__name__) + + class LazyPageConfigLoaderHasNoValue(Exception): """ An exception that can be returned when a loader for `LazyPageConfig` can't return any value. @@ -69,6 +73,7 @@ except LazyPageConfigLoaderHasNoValue: raise except Exception as ex: + logger.exception(ex) raise Exception( "Error while loading attribute '%s' for: %s" % (name, self._page.rel_path)) from ex @@ -88,13 +93,14 @@ except LazyPageConfigLoaderHasNoValue: raise except Exception as ex: + logger.exception(ex) raise Exception( "Error while loading attribute '%s' for: %s" % (name, self._page.rel_path)) from ex # We always keep the wildcard loader in the loaders list. return self._values[name] - raise LazyPageConfigLoaderHasNoValue() + raise LazyPageConfigLoaderHasNoValue("No such value: %s" % name) def _setValue(self, name, value): self._values[name] = value @@ -125,6 +131,7 @@ try: self._load() except Exception as ex: + logger.exception(ex) raise Exception( "Error while loading data for: %s" % self._page.rel_path) from ex @@ -150,10 +157,14 @@ def _load(self): page = self._page + dt = page.datetime for k, v in page.source_metadata.items(): self._setValue(k, v) self._setValue('url', self._ctx.uri) - self._setValue('timestamp', time.mktime(page.datetime.timetuple())) + self._setValue('timestamp', time.mktime(dt.timetuple())) + self._setValue('datetime', { + 'year': dt.year, 'month': dt.month, 'day': dt.day, + 'hour': dt.hour, 'minute': dt.minute, 'second': dt.second}) date_format = page.app.config.get('site/date_format') if date_format: self._setValue('date', page.datetime.strftime(date_format))
--- a/piecrust/data/paginationdata.py Sat May 14 18:18:54 2016 -0700 +++ b/piecrust/data/paginationdata.py Wed Jun 01 22:24:35 2016 -0700 @@ -1,10 +1,14 @@ import time +import logging from piecrust.data.assetor import Assetor from piecrust.data.pagedata import LazyPageConfigData from piecrust.routing import create_route_metadata from piecrust.uriutil import split_uri +logger = logging.getLogger(__name__) + + class PaginationData(LazyPageConfigData): def __init__(self, page): super(PaginationData, self).__init__(page) @@ -17,7 +21,7 @@ # TODO: this is not quite correct, as we're missing parts of the # route metadata if the current page is a taxonomy page. route_metadata = create_route_metadata(page) - self._route = page.app.getRoute(page.source.name, route_metadata) + self._route = page.app.getSourceRoute(page.source.name, route_metadata) self._route_metadata = route_metadata if self._route is None: raise Exception("Can't get route for page: %s" % page.path) @@ -25,6 +29,7 @@ def _load(self): page = self._page + dt = page.datetime page_url = self._get_uri() _, slug = split_uri(page.app, page_url) self._setValue('url', page_url) @@ -32,6 +37,9 @@ self._setValue( 'timestamp', time.mktime(page.datetime.timetuple())) + self._setValue('datetime', { + 'year': dt.year, 'month': dt.month, 'day': dt.day, + 'hour': dt.hour, 'minute': dt.minute, 'second': dt.second}) date_format = page.app.config.get('site/date_format') if date_format: self._setValue('date', page.datetime.strftime(date_format)) @@ -66,9 +74,10 @@ ctx = PageRenderingContext(qp) render_result = render_page_segments(ctx) segs = render_result.segments - except Exception as e: + except Exception as ex: + logger.exception(ex) raise Exception( - "Error rendering segments for '%s'" % uri) from e + "Error rendering segments for '%s'" % uri) from ex else: segs = {} for name in self._page.config.get('segments'):
--- a/piecrust/data/provider.py Sat May 14 18:18:54 2016 -0700 +++ b/piecrust/data/provider.py Wed Jun 01 22:24:35 2016 -0700 @@ -1,6 +1,7 @@ import time import collections.abc from piecrust.data.iterators import PageIterator +from piecrust.generation.taxonomy import Taxonomy from piecrust.sources.array import ArraySource @@ -72,27 +73,33 @@ self._taxonomies = {} self._ctx_set = False + @property + def posts(self): + return self._posts() + + @property + def years(self): + return self._buildYearlyArchive() + + @property + def months(self): + return self._buildMonthlyArchive() + def __getitem__(self, name): - if name == 'posts': - return self._posts() - elif name == 'years': - return self._buildYearlyArchive() - elif name == 'months': - return self._buildMonthlyArchive() - elif self._source.app.getTaxonomy(name) is not None: + if self._source.app.config.get('site/taxonomies/' + name) is not None: return self._buildTaxonomy(name) raise KeyError("No such item: %s" % name) def __iter__(self): keys = ['posts', 'years', 'months'] - keys += [t.name for t in self._source.app.taxonomies] + keys += list(self._source.app.config.get('site/taxonomies').keys()) return iter(keys) def __len__(self): - return 3 + len(self._source.app.taxonomies) + return 3 + len(self._source.app.config.get('site/taxonomies')) def _debugRenderTaxonomies(self): - return [t.name for t in self._source.app.taxonomies] + return list(self._source.app.config.get('site/taxonomies').keys()) def _posts(self): it = PageIterator(self._source, current_page=self._page) @@ -152,19 +159,19 @@ if tax_name in self._taxonomies: return self._taxonomies[tax_name] - tax_info = self._page.app.getTaxonomy(tax_name) - setting_name = tax_info.setting_name + tax_cfg = self._page.app.config.get('site/taxonomies/' + tax_name) + tax = Taxonomy(tax_name, tax_cfg) posts_by_tax_value = {} for post in self._source.getPages(): - tax_values = post.config.get(setting_name) + tax_values = post.config.get(tax.setting_name) if tax_values is None: continue if not isinstance(tax_values, list): tax_values = [tax_values] for val in tax_values: - posts_by_tax_value.setdefault(val, []) - posts_by_tax_value[val].append(post) + posts = posts_by_tax_value.setdefault(val, []) + posts.append(post) entries = [] for value, ds in posts_by_tax_value.items(): @@ -184,6 +191,9 @@ class BlogArchiveEntry(object): + debug_render = ['name', 'timestamp', 'posts'] + debug_render_invoke = ['name', 'timestamp', 'posts'] + def __init__(self, page, name, timestamp): self.name = name self.timestamp = timestamp @@ -208,6 +218,9 @@ class BlogTaxonomyEntry(object): + debug_render = ['name', 'post_count', 'posts'] + debug_render_invoke = ['name', 'post_count', 'posts'] + def __init__(self, page, source, property_value): self._page = page self._source = source @@ -235,5 +248,5 @@ if self._iterator is not None: return - self._iterator = PageIterator(self._source, self._page) + self._iterator = PageIterator(self._source, current_page=self._page)
--- a/piecrust/fastpickle.py Sat May 14 18:18:54 2016 -0700 +++ b/piecrust/fastpickle.py Wed Jun 01 22:24:35 2016 -0700 @@ -11,6 +11,12 @@ return data.encode('utf8') +def pickle_obj(obj): + if obj is not None: + return _pickle_object(obj) + return None + + def pickle_intob(obj, buf): data = _pickle_object(obj) buf = _WriteWrapper(buf) @@ -22,6 +28,12 @@ return _unpickle_object(data) +def unpickle_obj(data): + if data is not None: + return _unpickle_object(data) + return None + + def unpickle_fromb(buf, bufsize): with buf.getbuffer() as innerbuf: data = codecs.decode(innerbuf[:bufsize], 'utf8') @@ -70,7 +82,8 @@ elif op == _UNPICKLING: res = collections.OrderedDict() for k, v in obj.items(): - res[k] = func(v) + if k != '__type__': + res[k] = func(v) return res @@ -230,13 +243,11 @@ class_def = getattr(mod, class_name) obj = class_def.__new__(class_def) - del state['__class__'] - del state['__module__'] attr_names = list(state.keys()) for name in attr_names: - state[name] = _unpickle_object(state[name]) - - obj.__dict__.update(state) + if name == '__class__' or name == '__module__': + continue + obj.__dict__[name] = _unpickle_object(state[name]) return obj
--- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/piecrust/generation/base.py Wed Jun 01 22:24:35 2016 -0700 @@ -0,0 +1,141 @@ +from werkzeug.utils import cached_property +from piecrust.baking.records import BakeRecordEntry +from piecrust.baking.worker import save_factory, JOB_BAKE +from piecrust.configuration import ConfigurationError +from piecrust.routing import create_route_metadata +from piecrust.sources.pageref import PageRef + + +class InvalidRecordExtraKey(Exception): + pass + + +class PageGeneratorBakeContext(object): + def __init__(self, app, record, pool, generator): + self._app = app + self._record = record + self._pool = pool + self._generator = generator + self._job_queue = [] + self._is_running = False + + def getRecordExtraKey(self, seed): + return '%s:%s' % (self._generator.name, seed) + + def matchesRecordExtraKey(self, extra_key): + return (extra_key is not None and + extra_key.startswith(self._generator.name + ':')) + + def getSeedFromRecordExtraKey(self, extra_key): + if not self.matchesRecordExtraKey(extra_key): + raise InvalidRecordExtraKey("Invalid extra key: %s" % extra_key) + return extra_key[len(self._generator.name) + 1:] + + def getAllPageRecords(self): + return self._record.transitions.values() + + def getBakedPageRecords(self): + for prev, cur in self.getAllPageRecords(): + if cur and cur.was_any_sub_baked: + yield (prev, cur) + + def collapseRecord(self, entry): + self._record.collapseEntry(entry) + + def queueBakeJob(self, page_fac, route, extra_route_metadata, seed): + if self._is_running: + raise Exception("The job queue is running.") + + extra_key = self.getRecordExtraKey(seed) + entry = BakeRecordEntry( + page_fac.source.name, + page_fac.path, + extra_key) + self._record.addEntry(entry) + + page = page_fac.buildPage() + route_metadata = create_route_metadata(page) + route_metadata.update(extra_route_metadata) + uri = route.getUri(route_metadata) + override_entry = self._record.getOverrideEntry(page.path, uri) + if override_entry is not None: + override_source = self.app.getSource( + override_entry.source_name) + if override_source.realm == fac.source.realm: + cur_entry.errors.append( + "Page '%s' maps to URL '%s' but is overriden " + "by page '%s'." % + (fac.ref_spec, uri, override_entry.path)) + logger.error(cur_entry.errors[-1]) + cur_entry.flags |= BakeRecordEntry.FLAG_OVERRIDEN + return + + route_index = self._app.routes.index(route) + job = { + 'type': JOB_BAKE, + 'job': { + 'factory_info': save_factory(page_fac), + 'generator_name': self._generator.name, + 'generator_record_key': extra_key, + 'route_index': route_index, + 'route_metadata': route_metadata, + 'dirty_source_names': self._record.dirty_source_names, + 'needs_config': True + } + } + self._job_queue.append(job) + + def runJobQueue(self): + def _handler(res): + entry = self._record.getCurrentEntry( + res['path'], res['generator_record_key']) + entry.config = res['config'] + entry.subs = res['sub_entries'] + if res['errors']: + entry.errors += res['errors'] + if entry.has_any_error: + self._record.current.success = False + + self._is_running = True + try: + ar = self._pool.queueJobs(self._job_queue, handler=_handler) + ar.wait() + finally: + self._is_running = False + + +class PageGenerator(object): + def __init__(self, app, name, config): + self.app = app + self.name = name + self.config = config or {} + + self.source_name = config.get('source') + if self.source_name is None: + raise ConfigurationError( + "Generator '%s' requires a source name" % name) + + page_ref = config.get('page') + if page_ref is None: + raise ConfigurationError( + "Generator '%s' requires a listing page ref." % name) + self.page_ref = PageRef(app, page_ref) + + @cached_property + def source(self): + for src in self.app.sources: + if src.name == self.source_name: + return src + raise Exception("Can't find source '%s' for generator '%s'." % ( + self.source_name, self.name)) + + def getPageFactory(self, route_metadata): + # This will raise `PageNotFoundError` naturally if not found. + return self.page_ref.getFactory() + + def bake(self, ctx): + raise NotImplementedError() + + def onRouteFunctionUsed(self, route, route_metadata): + pass +
--- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/piecrust/generation/blogarchives.py Wed Jun 01 22:24:35 2016 -0700 @@ -0,0 +1,116 @@ +import logging +import datetime +from piecrust.chefutil import format_timed_scope +from piecrust.data.filters import PaginationFilter, IFilterClause +from piecrust.data.iterators import PageIterator +from piecrust.generation.base import PageGenerator + + +logger = logging.getLogger(__name__) + + +class BlogArchivesPageGenerator(PageGenerator): + GENERATOR_NAME = 'blog_archives' + + def __init__(self, app, name, config): + super(BlogArchivesPageGenerator, self).__init__(app, name, config) + + def onRouteFunctionUsed(self, route, route_metadata): + pass + + def prepareRenderContext(self, ctx): + ctx.pagination_source = self.source + + year = ctx.page.route_metadata.get('year') + if year is None: + raise Exception( + "Can't find the archive year in the route metadata") + if type(year) is not int: + raise Exception( + "The route for generator '%s' should specify an integer " + "parameter for 'year'." % self.name) + + flt = PaginationFilter() + flt.addClause(IsFromYearFilterClause(year)) + ctx.pagination_filter = flt + + ctx.custom_data['year'] = year + + flt2 = PaginationFilter() + flt2.addClause(IsFromYearFilterClause(year)) + it = PageIterator(self.source, pagination_filter=flt2, + sorter=_date_sorter) + ctx.custom_data['archives'] = it + + def bake(self, ctx): + if not self.page_ref.exists: + logger.debug( + "No page found at '%s', skipping %s archives." % + (self.page_ref, self.source_name)) + return + + logger.debug("Baking %s archives...", self.source_name) + with format_timed_scope(logger, 'gathered archive years', + level=logging.DEBUG, colored=False): + all_years, dirty_years = self._buildDirtyYears(ctx) + + with format_timed_scope(logger, "baked %d %s archives." % + (len(dirty_years), self.source_name)): + self._bakeDirtyYears(ctx, all_years, dirty_years) + + def _buildDirtyYears(self, ctx): + logger.debug("Gathering dirty post years.") + all_years = set() + dirty_years = set() + for _, cur_entry in ctx.getAllPageRecords(): + if cur_entry.source_name == self.source_name: + dt = datetime.datetime.fromtimestamp(cur_entry.timestamp) + all_years.add(dt.year) + if cur_entry.was_any_sub_baked: + dirty_years.add(dt.year) + return all_years, dirty_years + + def _bakeDirtyYears(self, ctx, all_years, dirty_years): + route = self.app.getGeneratorRoute(self.name) + if route is None: + raise Exception( + "No routes have been defined for generator: %s" % + self.name) + + logger.debug("Using archive page: %s" % self.page_ref) + fac = self.page_ref.getFactory() + + for y in dirty_years: + extra_route_metadata = {'year': y} + + logger.debug("Queuing: %s [%s]" % (fac.ref_spec, y)) + ctx.queueBakeJob(fac, route, extra_route_metadata, str(y)) + ctx.runJobQueue() + + # Create bake entries for the years that were *not* dirty. + # Otherwise, when checking for deleted pages, we would not find any + # outputs and would delete those files. + for prev_entry, cur_entry in ctx.getAllPageRecords(): + if prev_entry and not cur_entry: + try: + y = ctx.getSeedFromRecordExtraKey(prev_entry.extra_key) + except InvalidRecordExtraKey: + continue + if y in all_years: + logger.debug("Creating unbaked entry for %d archive." % y) + ctx.collapseRecord(prev_entry) + else: + logger.debug("No page references year %d anymore." % y) + + +class IsFromYearFilterClause(IFilterClause): + def __init__(self, year): + self.year = year + + def pageMatches(self, fil, page): + return (page.datetime.year == self.year) + + +def _date_sorter(it): + return sorted(it, key=lambda x: x.datetime) +
--- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/piecrust/generation/taxonomy.py Wed Jun 01 22:24:35 2016 -0700 @@ -0,0 +1,342 @@ +import re +import time +import logging +import unidecode +from piecrust.chefutil import format_timed, format_timed_scope +from piecrust.configuration import ConfigurationError +from piecrust.data.filters import ( + PaginationFilter, SettingFilterClause, + page_value_accessor) +from piecrust.generation.base import PageGenerator, InvalidRecordExtraKey +from piecrust.sources.pageref import PageRef, PageNotFoundError + + +logger = logging.getLogger(__name__) + + +SLUGIFY_ENCODE = 1 +SLUGIFY_TRANSLITERATE = 2 +SLUGIFY_LOWERCASE = 4 +SLUGIFY_DOT_TO_DASH = 8 +SLUGIFY_SPACE_TO_DASH = 16 + + +re_first_dot_to_dash = re.compile(r'^\.+') +re_dot_to_dash = re.compile(r'\.+') +re_space_to_dash = re.compile(r'\s+') + + +class Taxonomy(object): + def __init__(self, name, config): + self.name = name + self.config = config + self.term_name = config.get('term', name) + self.is_multiple = bool(config.get('multiple', False)) + self.separator = config.get('separator', '/') + self.page_ref = config.get('page') + self._source_page_refs = {} + + @property + def setting_name(self): + if self.is_multiple: + return self.name + return self.term_name + + +class TaxonomyPageGenerator(PageGenerator): + GENERATOR_NAME = 'taxonomy' + + def __init__(self, app, name, config): + super(TaxonomyPageGenerator, self).__init__(app, name, config) + + tax_name = config.get('taxonomy') + if tax_name is None: + raise ConfigurationError( + "Generator '%s' requires a taxonomy name." % name) + tax_config = app.config.get('site/taxonomies/' + tax_name) + if tax_config is None: + raise ConfigurationError( + "Error initializing generator '%s', no such taxonomy: %s", + (name, tax_name)) + self.taxonomy = Taxonomy(tax_name, tax_config) + + sm = config.get('slugify_mode') + if not sm: + sm = app.config.get('site/slugify_mode', 'encode') + self.slugify_mode = _parse_slugify_mode(sm) + + def prepareRenderContext(self, ctx): + self._setPaginationSource(ctx) + + tax_terms, is_combination = self._getTaxonomyTerms( + ctx.page.route_metadata) + self._setTaxonomyFilter(ctx, tax_terms, is_combination) + + ctx.custom_data.update({ + self.taxonomy.term_name: tax_terms, + 'is_multiple_%s' % self.taxonomy.term_name: is_combination}) + if (self.taxonomy.is_multiple and + self.taxonomy.name != self.taxonomy.term_name): + mult_val = tax_terms + if not is_combination: + mult_val = (mult_val,) + ctx.custom_data[self.taxonomy.name] = mult_val + logger.debug("Prepared render context with: %s" % ctx.custom_data) + + def _getTaxonomyTerms(self, route_metadata): + all_values = route_metadata.get(self.taxonomy.term_name) + if all_values is None: + raise Exception("'%s' values couldn't be found in route metadata" % + self.taxonomy.term_name) + + if self.taxonomy.is_multiple: + sep = self.taxonomy.separator + if sep in all_values: + return tuple(all_values.split(sep)), True + return all_values, False + + def _setTaxonomyFilter(self, ctx, term_value, is_combination): + flt = PaginationFilter(value_accessor=page_value_accessor) + flt.addClause(HasTaxonomyTermsFilterClause( + self.taxonomy, self.slugify_mode, term_value, is_combination)) + ctx.pagination_filter = flt + + def _setPaginationSource(self, ctx): + ctx.pagination_source = self.source + + def onRouteFunctionUsed(self, route, route_metadata): + # Get the values. + values = route_metadata[self.taxonomy.term_name] + if self.taxonomy.is_multiple: + #TODO: here we assume the route has been properly configured. + values = tuple([str(v) for v in values]) + else: + values = (str(values),) + + # We need to register this use of a taxonomy term. + eis = self.app.env.exec_info_stack + cpi = eis.current_page_info.render_ctx.current_pass_info + if cpi: + utt = cpi.getCustomInfo('used_taxonomy_terms', [], True) + utt.append(values) + + # We need to slugify the terms before they get transformed + # into URL-bits. + s = _Slugifier(self.taxonomy, self.slugify_mode) + str_values = s.slugify(values) + route_metadata[self.taxonomy.term_name] = str_values + logger.debug("Changed route metadata to: %s" % route_metadata) + + def bake(self, ctx): + if not self.page_ref.exists: + logger.debug( + "No page found at '%s', skipping taxonomy '%s'." % + (self.page_ref, self.taxonomy.name)) + return + + logger.debug("Baking %s pages...", self.taxonomy.name) + with format_timed_scope(logger, 'gathered taxonomy terms', + level=logging.DEBUG, colored=False): + all_terms, dirty_terms = self._buildDirtyTaxonomyTerms(ctx) + + start_time = time.perf_counter() + page_count = self._bakeTaxonomyTerms(ctx, all_terms, dirty_terms) + logger.info(format_timed( + start_time, + "baked %d %s pages." % (page_count, self.taxonomy.term_name))) + + def _buildDirtyTaxonomyTerms(self, ctx): + # Build the list of terms for our taxonomy, and figure out which ones + # are 'dirty' for the current bake. + logger.debug("Gathering dirty taxonomy terms") + all_terms = set() + single_dirty_terms = set() + + # Re-bake all taxonomy terms that include new or changed pages. + for prev_entry, cur_entry in ctx.getBakedPageRecords(): + entries = [cur_entry] + if prev_entry: + entries.append(prev_entry) + + terms = [] + for e in entries: + entry_terms = e.config.get(self.taxonomy.setting_name) + if entry_terms: + if not self.taxonomy.is_multiple: + terms.append(entry_terms) + else: + terms += entry_terms + single_dirty_terms.update(terms) + + # Remember all terms used. + for _, cur_entry in ctx.getAllPageRecords(): + if cur_entry and not cur_entry.was_overriden: + cur_terms = cur_entry.config.get(self.taxonomy.setting_name) + if cur_terms: + if not self.taxonomy.is_multiple: + all_terms.add(cur_terms) + else: + all_terms |= set(cur_terms) + + # Re-bake the combination pages for terms that are 'dirty'. + # We make all terms into tuple, even those that are not actual + # combinations, so that we have less things to test further down the + # line. + dirty_terms = [(t,) for t in single_dirty_terms] + # Add the combinations to that list. + if self.taxonomy.is_multiple: + known_combinations = set() + logger.debug("Gathering dirty term combinations") + for _, cur_entry in ctx.getAllPageRecords(): + if cur_entry: + used_terms = _get_all_entry_taxonomy_terms(cur_entry) + for terms in used_terms: + if len(terms) > 1: + known_combinations.add(terms) + + for terms in known_combinations: + if not single_dirty_terms.isdisjoint(set(terms)): + dirty_terms.append(terms) + + return all_terms, dirty_terms + + def _bakeTaxonomyTerms(self, ctx, all_terms, dirty_terms): + # Start baking those terms. + logger.debug( + "Baking '%s' for source '%s': %s" % + (self.taxonomy.name, self.source_name, dirty_terms)) + + route = self.app.getGeneratorRoute(self.name) + if route is None: + raise Exception("No routes have been defined for generator: %s" % + self.name) + + logger.debug("Using taxonomy page: %s" % self.page_ref) + fac = self.page_ref.getFactory() + + job_count = 0 + s = _Slugifier(self.taxonomy, self.slugify_mode) + for term in dirty_terms: + if not self.taxonomy.is_multiple: + term = term[0] + slugified_term = s.slugify(term) + extra_route_metadata = {self.taxonomy.term_name: slugified_term} + + # Use the slugified term as the record extra key. + logger.debug( + "Queuing: %s [%s=%s]" % + (fac.ref_spec, self.taxonomy.name, slugified_term)) + ctx.queueBakeJob(fac, route, extra_route_metadata, slugified_term) + job_count += 1 + ctx.runJobQueue() + + # Now we create bake entries for all the terms that were *not* dirty. + # This is because otherwise, on the next incremental bake, we wouldn't + # find any entry for those things, and figure that we need to delete + # their outputs. + for prev_entry, cur_entry in ctx.getAllPageRecords(): + # Only consider taxonomy-related entries that don't have any + # current version (i.e. they weren't baked just now). + if prev_entry and not cur_entry: + try: + t = ctx.getSeedFromRecordExtraKey(prev_entry.extra_key) + except InvalidRecordExtraKey: + continue + + if t in all_terms: + logger.debug("Creating unbaked entry for %s term: %s" % + (self.name, t)) + ctx.collapseRecord(prev_entry) + else: + logger.debug("Term %s in %s isn't used anymore." % + (self.name, t)) + + return job_count + + +def _get_all_entry_taxonomy_terms(entry): + res = set() + for o in entry.subs: + for pinfo in o.render_info: + if pinfo: + terms = pinfo.getCustomInfo('used_taxonomy_terms') + if terms: + res |= set(terms) + return res + + +class HasTaxonomyTermsFilterClause(SettingFilterClause): + def __init__(self, taxonomy, slugify_mode, value, is_combination): + super(HasTaxonomyTermsFilterClause, self).__init__( + taxonomy.setting_name, value) + self._taxonomy = taxonomy + self._is_combination = is_combination + self._slugifier = _Slugifier(taxonomy, slugify_mode) + + def pageMatches(self, fil, page): + if self._taxonomy.is_multiple: + # Multiple taxonomy, i.e. it supports multiple terms, like tags. + page_values = fil.value_accessor(page, self.name) + if page_values is None or not isinstance(page_values, list): + return False + + page_set = set(map(self._slugifier.slugify, page_values)) + if self._is_combination: + # Multiple taxonomy, and multiple terms to match. Check that + # the ones to match are all in the page's terms. + value_set = set(self.value) + return value_set.issubset(page_set) + else: + # Multiple taxonomy, one term to match. + return self.value in page_set + else: + # Single taxonomy. Just compare the values. + page_value = fil.value_accessor(page, self.name) + if page_value is None: + return False + page_value = self._slugifier.slugify(page_value) + return page_value == self.value + + +class _Slugifier(object): + def __init__(self, taxonomy, mode): + self.taxonomy = taxonomy + self.mode = mode + + def slugify(self, term): + if isinstance(term, tuple): + return self.taxonomy.separator.join( + map(self._slugifyOne, term)) + return self._slugifyOne(term) + + def _slugifyOne(self, term): + if self.mode & SLUGIFY_TRANSLITERATE: + term = unidecode.unidecode(term) + if self.mode & SLUGIFY_LOWERCASE: + term = term.lower() + if self.mode & SLUGIFY_DOT_TO_DASH: + term = re_first_dot_to_dash.sub('', term) + term = re_dot_to_dash.sub('-', term) + if self.mode & SLUGIFY_SPACE_TO_DASH: + term = re_space_to_dash.sub('-', term) + return term + + +def _parse_slugify_mode(value): + mapping = { + 'encode': SLUGIFY_ENCODE, + 'transliterate': SLUGIFY_TRANSLITERATE, + 'lowercase': SLUGIFY_LOWERCASE, + 'dot_to_dash': SLUGIFY_DOT_TO_DASH, + 'space_to_dash': SLUGIFY_SPACE_TO_DASH} + mode = 0 + for v in value.split(','): + f = mapping.get(v.strip()) + if f is None: + if v == 'iconv': + raise Exception("'iconv' is not supported as a slugify mode " + "in PieCrust2. Use 'transliterate'.") + raise Exception("Unknown slugify flag: %s" % v) + mode |= f + return mode +
--- a/piecrust/page.py Sat May 14 18:18:54 2016 -0700 +++ b/piecrust/page.py Wed Jun 01 22:24:35 2016 -0700 @@ -113,6 +113,7 @@ # No idea what the date/time for this page is. self._datetime = datetime.datetime.fromtimestamp(0) except Exception as ex: + logger.exception(ex) raise Exception( "Error computing time for page: %s" % self.path) from ex @@ -155,6 +156,7 @@ try: parsed_d = dateutil.parser.parse(page_date) except Exception as ex: + logger.exception(ex) raise ConfigurationError("Invalid date: %s" % page_date) from ex return datetime.date( year=parsed_d.year, @@ -175,6 +177,7 @@ try: parsed_t = dateutil.parser.parse(page_time) except Exception as ex: + logger.exception(ex) raise ConfigurationError("Invalid time: %s" % page_time) from ex return datetime.timedelta( hours=parsed_t.hour, @@ -307,6 +310,10 @@ txtlen = len(txt) index = txt.find('-', offset) while index >= 0 and index < txtlen - 8: + # Look for a potential `<--format-->` + if index > 0 and txt[index - 1] == '<' and txt[index + 1] == '-': + return True + # Look for a potential `---segment---` if txt[index + 1] == '-' and txt[index + 2] == '-': return True index = txt.find('-', index + 1)
--- a/piecrust/plugins/base.py Sat May 14 18:18:54 2016 -0700 +++ b/piecrust/plugins/base.py Wed Jun 01 22:24:35 2016 -0700 @@ -33,6 +33,9 @@ def getSources(self): return [] + def getPageGenerators(self): + return [] + def getPublishers(self): return [] @@ -86,6 +89,9 @@ def getSources(self): return self._getPluginComponents('getSources') + def getPageGenerators(self): + return self._getPluginComponents('getPageGenerators') + def getPublishers(self): return self._getPluginComponents('getPublishers')
--- a/piecrust/plugins/builtin.py Sat May 14 18:18:54 2016 -0700 +++ b/piecrust/plugins/builtin.py Wed Jun 01 22:24:35 2016 -0700 @@ -21,6 +21,8 @@ from piecrust.formatting.markdownformatter import MarkdownFormatter from piecrust.formatting.textileformatter import TextileFormatter from piecrust.formatting.smartypantsformatter import SmartyPantsFormatter +from piecrust.generation.blogarchives import BlogArchivesPageGenerator +from piecrust.generation.taxonomy import TaxonomyPageGenerator from piecrust.importing.jekyll import JekyllImporter from piecrust.importing.piecrust import PieCrust1Importer from piecrust.importing.wordpress import WordpressXmlImporter @@ -87,6 +89,11 @@ OrderedPageSource, ProseSource] + def getPageGenerators(self): + return [ + TaxonomyPageGenerator, + BlogArchivesPageGenerator] + def getDataProviders(self): return [ IteratorDataProvider,
--- a/piecrust/rendering.py Sat May 14 18:18:54 2016 -0700 +++ b/piecrust/rendering.py Wed Jun 01 22:24:35 2016 -0700 @@ -7,6 +7,7 @@ DataBuildingContext, build_page_data, build_layout_data) from piecrust.data.filters import ( PaginationFilter, SettingFilterClause, page_value_accessor) +from piecrust.fastpickle import _pickle_object, _unpickle_object from piecrust.sources.base import PageSource from piecrust.templating.base import TemplateNotFoundError, TemplatingError @@ -79,56 +80,33 @@ class RenderPassInfo(object): def __init__(self): self.used_source_names = set() - self.used_taxonomy_terms = set() self.used_pagination = False self.pagination_has_more = False self.used_assets = False + self._custom_info = {} - def merge(self, other): - self.used_source_names |= other.used_source_names - self.used_taxonomy_terms |= other.used_taxonomy_terms - self.used_pagination = self.used_pagination or other.used_pagination - self.pagination_has_more = (self.pagination_has_more or - other.pagination_has_more) - self.used_assets = self.used_assets or other.used_assets + def setCustomInfo(self, key, info): + self._custom_info[key] = info - def _toJson(self): - data = { - 'used_source_names': list(self.used_source_names), - 'used_taxonomy_terms': list(self.used_taxonomy_terms), - 'used_pagination': self.used_pagination, - 'pagination_has_more': self.pagination_has_more, - 'used_assets': self.used_assets} - return data - - @staticmethod - def _fromJson(data): - assert data is not None - rpi = RenderPassInfo() - rpi.used_source_names = set(data['used_source_names']) - for i in data['used_taxonomy_terms']: - terms = i[2] - if isinstance(terms, list): - terms = tuple(terms) - rpi.used_taxonomy_terms.add((i[0], i[1], terms)) - rpi.used_pagination = data['used_pagination'] - rpi.pagination_has_more = data['pagination_has_more'] - rpi.used_assets = data['used_assets'] - return rpi + def getCustomInfo(self, key, default=None, create_if_missing=False): + if create_if_missing: + return self._custom_info.setdefault(key, default) + return self._custom_info.get(key, default) class PageRenderingContext(object): - def __init__(self, qualified_page, page_num=1, force_render=False): + def __init__(self, qualified_page, page_num=1, + force_render=False, is_from_request=False): self.page = qualified_page self.page_num = page_num self.force_render = force_render + self.is_from_request = is_from_request self.pagination_source = None self.pagination_filter = None - self.custom_data = None + self.custom_data = {} + self.render_passes = [None, None] # Same length as RENDER_PASSES self._current_pass = PASS_NONE - self.render_passes = [None, None] # Same length as RENDER_PASSES - @property def app(self): return self.page.app @@ -168,69 +146,11 @@ pass_info = self.current_pass_info pass_info.used_source_names.add(source.name) - def setTaxonomyFilter(self, term_value, *, needs_slugifier=False): - if not self.page.route.is_taxonomy_route: - raise Exception("The page for this context is not tied to a " - "taxonomy route: %s" % self.uri) - - slugifier = None - if needs_slugifier: - slugifier = self.page.route.slugifyTaxonomyTerm - taxonomy = self.app.getTaxonomy(self.page.route.taxonomy_name) - - flt = PaginationFilter(value_accessor=page_value_accessor) - flt.addClause(HasTaxonomyTermsFilterClause( - taxonomy, term_value, slugifier)) - self.pagination_filter = flt - - is_combination = isinstance(term_value, tuple) - self.custom_data = { - taxonomy.term_name: term_value, - 'is_multiple_%s' % taxonomy.term_name: is_combination} - def _raiseIfNoCurrentPass(self): if self._current_pass == PASS_NONE: raise Exception("No rendering pass is currently active.") -class HasTaxonomyTermsFilterClause(SettingFilterClause): - def __init__(self, taxonomy, value, slugifier): - super(HasTaxonomyTermsFilterClause, self).__init__( - taxonomy.setting_name, value) - self._taxonomy = taxonomy - self._slugifier = slugifier - self._is_combination = isinstance(self.value, tuple) - - def pageMatches(self, fil, page): - if self._taxonomy.is_multiple: - # Multiple taxonomy, i.e. it supports multiple terms, like tags. - page_values = fil.value_accessor(page, self.name) - if page_values is None or not isinstance(page_values, list): - return False - - if self._slugifier is not None: - page_set = set(map(self._slugifier, page_values)) - else: - page_set = set(page_values) - - if self._is_combination: - # Multiple taxonomy, and multiple terms to match. Check that - # the ones to match are all in the page's terms. - value_set = set(self.value) - return value_set.issubset(page_set) - else: - # Multiple taxonomy, one term to match. - return self.value in page_set - - # Single taxonomy. Just compare the values. - page_value = fil.value_accessor(page, self.name) - if page_value is None: - return False - if self._slugifier is not None: - page_value = self._slugifier(page_value) - return page_value == self.value - - def render_page(ctx): eis = ctx.app.env.exec_info_stack eis.pushPage(ctx.page, ctx) @@ -278,13 +198,16 @@ rp = RenderedPage(page, ctx.uri, ctx.page_num) rp.data = page_data rp.content = layout_result['content'] - rp.render_info[PASS_FORMATTING] = RenderPassInfo._fromJson( - render_result['pass_info']) + rp.render_info[PASS_FORMATTING] = _unpickle_object( + render_result['pass_info']) if layout_result['pass_info'] is not None: - rp.render_info[PASS_RENDERING] = RenderPassInfo._fromJson( - layout_result['pass_info']) + rp.render_info[PASS_RENDERING] = _unpickle_object( + layout_result['pass_info']) return rp except Exception as ex: + if ctx.app.debug: + raise + logger.exception(ex) page_rel_path = os.path.relpath(ctx.page.path, ctx.app.root_dir) raise Exception("Error rendering page: %s" % page_rel_path) from ex finally: @@ -317,7 +240,7 @@ rs = RenderedSegments( render_result['segments'], - RenderPassInfo._fromJson(render_result['pass_info'])) + _unpickle_object(render_result['pass_info'])) return rs @@ -376,7 +299,7 @@ pass_info = cpi.render_ctx.render_passes[PASS_FORMATTING] res = { 'segments': formatted_segments, - 'pass_info': pass_info._toJson()} + 'pass_info': _pickle_object(pass_info)} return res @@ -402,12 +325,13 @@ try: output = engine.renderFile(full_names, layout_data) except TemplateNotFoundError as ex: + logger.exception(ex) msg = "Can't find template for page: %s\n" % page.path msg += "Looked for: %s" % ', '.join(full_names) raise Exception(msg) from ex pass_info = cpi.render_ctx.render_passes[PASS_RENDERING] - res = {'content': output, 'pass_info': pass_info._toJson()} + res = {'content': output, 'pass_info': _pickle_object(pass_info)} return res
--- a/piecrust/routing.py Sat May 14 18:18:54 2016 -0700 +++ b/piecrust/routing.py Wed Jun 01 22:24:35 2016 -0700 @@ -3,17 +3,16 @@ import copy import logging import urllib.parse -import unidecode +from werkzeug.utils import cached_property logger = logging.getLogger(__name__) -route_re = re.compile(r'%((?P<qual>path):)?(?P<name>\w+)%') -route_esc_re = re.compile(r'\\%((?P<qual>path)\\:)?(?P<name>\w+)\\%') -template_func_re = re.compile(r'^(?P<name>\w+)\((?P<first_arg>\w+)' - r'(?P<other_args>.*)\)\s*$') -template_func_arg_re = re.compile(r',\s*(?P<arg>\w+)') +route_re = re.compile(r'%((?P<qual>[\w\d]+):)?(?P<name>\w+)%') +route_esc_re = re.compile(r'\\%((?P<qual>[\w\d]+)\\:)?(?P<name>\w+)\\%') +template_func_re = re.compile(r'^(?P<name>\w+)\((?P<args>.*)\)\s*$') +template_func_arg_re = re.compile(r'((?P<qual>[\w\d]+):)?(?P<arg>\+?\w+)') ugly_url_cleaner = re.compile(r'\.html$') @@ -21,15 +20,13 @@ pass +class InvalidRouteError(Exception): + pass + + def create_route_metadata(page): route_metadata = copy.deepcopy(page.source_metadata) route_metadata.update(page.getRouteMetadata()) - - # TODO: fix this hard-coded shit - for key in ['year', 'month', 'day']: - if key in route_metadata and isinstance(route_metadata[key], str): - route_metadata[key] = int(route_metadata[key]) - return route_metadata @@ -38,53 +35,23 @@ raise NotImplementedError() -SLUGIFY_ENCODE = 1 -SLUGIFY_TRANSLITERATE = 2 -SLUGIFY_LOWERCASE = 4 -SLUGIFY_DOT_TO_DASH = 8 -SLUGIFY_SPACE_TO_DASH = 16 - - -re_first_dot_to_dash = re.compile(r'^\.+') -re_dot_to_dash = re.compile(r'\.+') -re_space_to_dash = re.compile(r'\s+') - - -def _parse_slugify_mode(value): - mapping = { - 'encode': SLUGIFY_ENCODE, - 'transliterate': SLUGIFY_TRANSLITERATE, - 'lowercase': SLUGIFY_LOWERCASE, - 'dot_to_dash': SLUGIFY_DOT_TO_DASH, - 'space_to_dash': SLUGIFY_SPACE_TO_DASH} - mode = 0 - for v in value.split(','): - f = mapping.get(v.strip()) - if f is None: - if v == 'iconv': - raise Exception("'iconv' is not supported as a slugify mode " - "in PieCrust2. Use 'transliterate'.") - raise Exception("Unknown slugify flag: %s" % v) - mode |= f - return mode +ROUTE_TYPE_SOURCE = 0 +ROUTE_TYPE_GENERATOR = 1 class Route(object): """ Information about a route for a PieCrust application. Each route defines the "shape" of an URL and how it maps to - sources and taxonomies. + sources and generators. """ def __init__(self, app, cfg): self.app = app - self.source_name = cfg['source'] - self.taxonomy_name = cfg.get('taxonomy') - self.taxonomy_term_sep = cfg.get('term_separator', '/') - - sm = cfg.get('slugify_mode') - if not sm: - sm = app.config.get('site/slugify_mode', 'encode') - self.slugify_mode = _parse_slugify_mode(sm) + self.source_name = cfg.get('source') + self.generator_name = cfg.get('generator') + if not self.source_name and not self.generator_name: + raise InvalidRouteError( + "Both `source` and `generator` are specified.") self.pretty_urls = app.config.get('site/pretty_urls') self.trailing_slash = app.config.get('site/trailing_slash') @@ -102,6 +69,13 @@ re.escape(self.uri_pattern)) + '$' self.uri_re = re.compile(p) + # Get the types of the route parameters. + self.param_types = {} + for m in route_re.finditer(self.uri_pattern): + qual = m.group('qual') + if qual: + self.param_types[str(m.group('name'))] = qual + # If the URI pattern has a 'path'-type component, we'll need to match # the versions for which that component is empty. So for instance if # we have `/foo/%path:bar%`, we may need to match `/foo` (note the @@ -127,23 +101,45 @@ self.template_func = None self.template_func_name = None self.template_func_args = [] + self.template_func_vararg = None self._createTemplateFunc(cfg.get('func')) @property - def is_taxonomy_route(self): - return self.taxonomy_name is not None + def route_type(self): + if self.source_name: + return ROUTE_TYPE_SOURCE + elif self.generator_name: + return ROUTE_TYPE_GENERATOR + else: + raise InvalidRouteError() @property + def is_source_route(self): + return self.route_type == ROUTE_TYPE_SOURCE + + @property + def is_generator_route(self): + return self.route_type == ROUTE_TYPE_GENERATOR + + @cached_property def source(self): + if not self.is_source_route: + return InvalidRouteError("This is not a source route.") for src in self.app.sources: if src.name == self.source_name: return src - raise Exception("Can't find source '%s' for route '%'." % ( + raise Exception("Can't find source '%s' for route '%s'." % ( self.source_name, self.uri)) - @property - def source_realm(self): - return self.source.realm + @cached_property + def generator(self): + if not self.is_generator_route: + return InvalidRouteError("This is not a generator route.") + for gen in self.app.generators: + if gen.name == self.generator_name: + return gen + raise Exception("Can't find generator '%s' for route '%s'." % ( + self.generator_name, self.uri)) def matchesMetadata(self, route_metadata): return self.required_route_metadata.issubset(route_metadata.keys()) @@ -179,17 +175,18 @@ for k in missing_keys: route_metadata[k] = '' - # TODO: fix this hard-coded shit - for key in ['year', 'month', 'day']: - if key in route_metadata and isinstance(route_metadata[key], str): - try: - route_metadata[key] = int(route_metadata[key]) - except ValueError: - pass + for k in route_metadata: + route_metadata[k] = self._coerceRouteParameter( + k, route_metadata[k]) return route_metadata def getUri(self, route_metadata, *, sub_num=1): + route_metadata = dict(route_metadata) + for k in route_metadata: + route_metadata[k] = self._coerceRouteParameter( + k, route_metadata[k]) + uri = self.uri_format % route_metadata suffix = None if sub_num > 1: @@ -234,54 +231,24 @@ return uri - def getTaxonomyTerms(self, route_metadata): - if not self.is_taxonomy_route: - raise Exception("This route isn't a taxonomy route.") - - tax = self.app.getTaxonomy(self.taxonomy_name) - all_values = route_metadata.get(tax.term_name) - if all_values is None: - raise Exception("'%s' values couldn't be found in route metadata" % - tax.term_name) - - if self.taxonomy_term_sep in all_values: - return tuple(all_values.split(self.taxonomy_term_sep)) - return all_values - - def slugifyTaxonomyTerm(self, term): - if isinstance(term, tuple): - return self.taxonomy_term_sep.join( - map(self._slugifyOne, term)) - return self._slugifyOne(term) - - def _slugifyOne(self, term): - if self.slugify_mode & SLUGIFY_TRANSLITERATE: - term = unidecode.unidecode(term) - if self.slugify_mode & SLUGIFY_LOWERCASE: - term = term.lower() - if self.slugify_mode & SLUGIFY_DOT_TO_DASH: - term = re_first_dot_to_dash.sub('', term) - term = re_dot_to_dash.sub('-', term) - if self.slugify_mode & SLUGIFY_SPACE_TO_DASH: - term = re_space_to_dash.sub('-', term) - return term - def _uriFormatRepl(self, m): + qual = m.group('qual') name = m.group('name') - #TODO: fix this hard-coded shit - if name == 'year': - return '%(year)04d' - if name == 'month': - return '%(month)02d' - if name == 'day': - return '%(day)02d' - return '%(' + name + ')s' + if qual == 'int4': + return '%%(%s)04d' % name + elif qual == 'int2': + return '%%(%s)02d' % name + return '%%(%s)s' % name def _uriPatternRepl(self, m): name = m.group('name') - qualifier = m.group('qual') - if qualifier == 'path' or self.taxonomy_name: + qual = m.group('qual') + if qual == 'path': return r'(?P<%s>[^\?]*)' % name + elif qual == 'int4': + return r'(?P<%s>\d{4})' % name + elif qual == 'int2': + return r'(?P<%s>\d{2})' % name return r'(?P<%s>[^/\?]+)' % name def _uriNoPathRepl(self, m): @@ -291,6 +258,22 @@ return '' return r'(?P<%s>[^/\?]+)' % name + def _coerceRouteParameter(self, name, val): + param_type = self.param_types.get(name) + if param_type is None: + return val + if param_type in ['int', 'int2', 'int4']: + try: + return int(val) + except ValueError: + raise Exception( + "Expected route parameter '%s' to be of type " + "'%s', but was: %s" % + (k, param_type, route_metadata[k])) + if param_type == 'path': + return val + raise Exception("Unknown route parameter type: %s" % param_type) + def _createTemplateFunc(self, func_def): if func_def is None: return @@ -302,66 +285,54 @@ (self.uri_pattern, func_def)) self.template_func_name = m.group('name') - self.template_func_args.append(m.group('first_arg')) - arg_list = m.group('other_args') + self.template_func_args = [] + arg_list = m.group('args') if arg_list: - self.template_func_args += template_func_arg_re.findall(arg_list) + self.template_func_args = [] + for m2 in template_func_arg_re.finditer(arg_list): + self.template_func_args.append(m2.group('arg')) + for i in range(len(self.template_func_args) - 1): + if self.template_func_args[i][0] == '+': + raise Exception("Only the last route parameter can be a " + "variable argument (prefixed with `+`)") - if self.taxonomy_name: - # This will be a taxonomy route function... this means we can - # have a variable number of parameters, but only one parameter - # definition, which is the value. - if len(self.template_func_args) != 1: - raise Exception("Route '%s' is a taxonomy route and must have " - "only one argument, which is the term value." % - self.uri_pattern) - - def template_func(*args): - if len(args) == 0: - raise Exception( - "Route function '%s' expected at least one " - "argument." % func_def) - - # Term combinations can be passed as an array, or as multiple - # arguments. - values = args - if len(args) == 1 and isinstance(args[0], list): - values = args[0] + if (self.template_func_args and + self.template_func_args[-1][0] == '+'): + self.template_func_vararg = self.template_func_args[-1][1:] - # We need to register this use of a taxonomy term. - if len(values) == 1: - registered_values = str(values[0]) - else: - registered_values = tuple([str(v) for v in values]) - eis = self.app.env.exec_info_stack - cpi = eis.current_page_info.render_ctx.current_pass_info - if cpi: - cpi.used_taxonomy_terms.add( - (self.source_name, self.taxonomy_name, - registered_values)) - - str_values = self.slugifyTaxonomyTerm(registered_values) - term_name = self.template_func_args[0] - metadata = {term_name: str_values} + def template_func(*args): + is_variable = (self.template_func_vararg is not None) + if not is_variable and len(args) != len(self.template_func_args): + raise Exception( + "Route function '%s' expected %d arguments, " + "got %d: %s" % + (func_def, len(self.template_func_args), + len(args), args)) + elif is_variable and len(args) < len(self.template_func_args): + raise Exception( + "Route function '%s' expected at least %d arguments, " + "got %d: %s" % + (func_def, len(self.template_func_args), + len(args), args)) - return self.getUri(metadata) + metadata = {} + non_var_args = list(self.template_func_args) + if is_variable: + del non_var_args[-1] + + for arg_name, arg_val in zip(non_var_args, args): + metadata[arg_name] = self._coerceRouteParameter( + arg_name, arg_val) - else: - # Normal route function. - def template_func(*args): - if len(args) != len(self.template_func_args): - raise Exception( - "Route function '%s' expected %d arguments, " - "got %d." % - (func_def, len(self.template_func_args), - len(args))) - metadata = {} - for arg_name, arg_val in zip(self.template_func_args, args): - #TODO: fix this hard-coded shit. - if arg_name in ['year', 'month', 'day']: - arg_val = int(arg_val) - metadata[arg_name] = arg_val - return self.getUri(metadata) + if is_variable: + metadata[self.template_func_vararg] = [] + for i in range(len(non_var_args), len(args)): + metadata[self.template_func_vararg].append(args[i]) + + if self.is_generator_route: + self.generator.onRouteFunctionUsed(self, metadata) + + return self.getUri(metadata) self.template_func = template_func @@ -390,7 +361,7 @@ f_args = args[:-1] for r, f in self._funcs: if r.source_name == args[-1]: - return f(f_args, **kwargs) + return f(*f_args, **kwargs) raise Exception("No such source: %s" % args[-1]) raise Exception("Incorrect number of arguments for route function. "
--- a/piecrust/serving/server.py Sat May 14 18:18:54 2016 -0700 +++ b/piecrust/serving/server.py Wed Jun 01 22:24:35 2016 -0700 @@ -183,18 +183,17 @@ # We have a page, let's try to render it. render_ctx = PageRenderingContext(qp, page_num=req_page.page_num, - force_render=True) - if qp.route.taxonomy_name is not None: - taxonomy = app.getTaxonomy(qp.route.taxonomy_name) - tax_terms = qp.route.getTaxonomyTerms(qp.route_metadata) - render_ctx.setTaxonomyFilter(tax_terms, needs_slugifier=True) + force_render=True, + is_from_request=True) + if qp.route.is_generator_route: + qp.route.generator.prepareRenderContext(render_ctx) # See if this page is known to use sources. If that's the case, # just don't use cached rendered segments for that page (but still # use them for pages that are included in it). uri = qp.getUri() entry = self._page_record.getEntry(uri, req_page.page_num) - if (qp.route.taxonomy_name is not None or entry is None or + if (qp.route.is_generator_route or entry is None or entry.used_source_names): cache_key = '%s:%s' % (uri, req_page.page_num) app.env.rendered_segments_repository.invalidate(cache_key) @@ -202,18 +201,6 @@ # Render the page. rendered_page = render_page(render_ctx) - # Check if this page is a taxonomy page that actually doesn't match - # anything. - if qp.route.taxonomy_name is not None: - paginator = rendered_page.data.get('pagination') - if (paginator and paginator.is_loaded and - len(paginator.items) == 0): - taxonomy = app.getTaxonomy(qp.route.taxonomy_name) - message = ("This URL matched a route for taxonomy '%s' but " - "no pages have been found to have it. This page " - "won't be generated by a bake." % taxonomy.name) - raise NotFound(message) - # Remember stuff for next time. if entry is None: entry = ServeRecordPageEntry(req_page.req_path, req_page.page_num)
--- a/piecrust/serving/util.py Sat May 14 18:18:54 2016 -0700 +++ b/piecrust/serving/util.py Wed Jun 01 22:24:35 2016 -0700 @@ -24,66 +24,80 @@ class RequestedPage(object): - def __init__(self, qualified_page): - self.qualified_page = qualified_page + def __init__(self): + self.qualified_page = None self.req_path = None self.page_num = 1 self.not_found_errors = [] -def find_routes(routes, uri): +def find_routes(routes, uri, is_sub_page=False): + """ Returns routes matching the given URL, but puts generator routes + at the end. + """ res = [] - tax_res = [] + gen_res = [] for route in routes: metadata = route.matchUri(uri) if metadata is not None: - if route.is_taxonomy_route: - tax_res.append((route, metadata)) + if route.is_source_route: + res.append((route, metadata, is_sub_page)) else: - res.append((route, metadata)) - return res + tax_res + gen_res.append((route, metadata, is_sub_page)) + return res + gen_res def get_requested_page(app, req_path): + # Remove the trailing slash to simplify how we parse URLs. + if req_path != '/': + req_path = req_path.rstrip('/') + # Try to find what matches the requested URL. - req_path, page_num = split_sub_uri(app, req_path) + routes = find_routes(app.routes, req_path) - routes = find_routes(app.routes, req_path) + # It could also be a sub-page (i.e. the URL ends with a page number), so + # we try to also match the base URL (without the number). + req_path_no_num, page_num = split_sub_uri(app, req_path) + if page_num > 1: + routes += find_routes(app.routes, req_path_no_num, True) + if len(routes) == 0: raise RouteNotFoundError("Can't find route for: %s" % req_path) - qp = None - not_found_errors = [] - for route, route_metadata in routes: + req_page = RequestedPage() + for route, route_metadata, is_sub_page in routes: try: + cur_req_path = req_path + if is_sub_page: + cur_req_path = req_path_no_num + qp = _get_requested_page_for_route( - app, route, route_metadata, req_path) + app, route, route_metadata, cur_req_path) if qp is not None: + req_page.qualified_page = qp + req_page.req_path = cur_req_path + if is_sub_page: + req_page.page_num = page_num break except PageNotFoundError as nfe: - not_found_errors.append(nfe) - - req_page = RequestedPage(qp) - req_page.req_path = req_path - req_page.page_num = page_num - req_page.not_found_errors = not_found_errors + req_page.not_found_errors.append(nfe) return req_page def _get_requested_page_for_route(app, route, route_metadata, req_path): - taxonomy = None - source = app.getSource(route.source_name) - if route.taxonomy_name is None: + if not route.is_generator_route: + source = app.getSource(route.source_name) factory = source.findPageFactory(route_metadata, MODE_PARSING) if factory is None: - raise PageNotFoundError("No path found for '%s' in source '%s'." % - (req_path, source.name)) + raise PageNotFoundError( + "No path found for '%s' in source '%s'." % + (req_path, source.name)) else: - taxonomy = app.getTaxonomy(route.taxonomy_name) - - # This will raise `PageNotFoundError` naturally if not found. - tax_page_ref = taxonomy.getPageRef(source) - factory = tax_page_ref.getFactory() + factory = route.generator.getPageFactory(route_metadata) + if factory is None: + raise PageNotFoundError( + "No path found for '%s' in generator '%s'." % + (req_path, route.generator.name)) # Build the page. page = factory.buildPage()
--- a/piecrust/sources/array.py Sat May 14 18:18:54 2016 -0700 +++ b/piecrust/sources/array.py Wed Jun 01 22:24:35 2016 -0700 @@ -42,6 +42,3 @@ for p in self.inner_source: yield CachedPageFactory(p) - def getTaxonomyPageRef(self, tax_name): - return None -
--- a/piecrust/sources/base.py Sat May 14 18:18:54 2016 -0700 +++ b/piecrust/sources/base.py Wed Jun 01 22:24:35 2016 -0700 @@ -133,9 +133,3 @@ return self._provider_type(self, page, override) - def getTaxonomyPageRef(self, tax_name): - tax_pages = self.config.get('taxonomy_pages') - if tax_pages is None: - return None - return tax_pages.get(tax_name) -
--- a/piecrust/sources/mixins.py Sat May 14 18:18:54 2016 -0700 +++ b/piecrust/sources/mixins.py Wed Jun 01 22:24:35 2016 -0700 @@ -23,33 +23,31 @@ return self.source.getPages() -class SourceFactoryWithoutTaxonomiesIterator(object): +class SourceFactoryWithoutGeneratorsIterator(object): def __init__(self, source): self.source = source - self._taxonomy_pages = None + self._generator_pages = None # See comment above. self.it = None def __iter__(self): - self._cacheTaxonomyPages() + self._cacheGeneratorPages() for p in self.source.getPages(): - if p.rel_path in self._taxonomy_pages: + if p.rel_path in self._generator_pages: continue yield p - def _cacheTaxonomyPages(self): - if self._taxonomy_pages is not None: + def _cacheGeneratorPages(self): + if self._generator_pages is not None: return app = self.source.app - self._taxonomy_pages = set() + self._generator_pages = set() for src in app.sources: - for tax in app.taxonomies: - ref_spec = src.getTaxonomyPageRef(tax.name) - page_ref = PageRef(app, ref_spec) - for sn, rp in page_ref.possible_split_ref_specs: + for gen in app.generators: + for sn, rp in gen.page_ref.possible_split_ref_specs: if sn == self.source.name: - self._taxonomy_pages.add(rp) + self._generator_pages.add(rp) class DateSortIterator(object): @@ -82,9 +80,9 @@ return self.config['items_per_page'] def getSourceIterator(self): - if self.config.get('iteration_includes_taxonomies', False): + if self.config.get('iteration_includes_generator_pages', False): return SourceFactoryIterator(self) - return SourceFactoryWithoutTaxonomiesIterator(self) + return SourceFactoryWithoutGeneratorsIterator(self) def getSorterIterator(self, it): return DateSortIterator(it)
--- a/piecrust/sources/pageref.py Sat May 14 18:18:54 2016 -0700 +++ b/piecrust/sources/pageref.py Wed Jun 01 22:24:35 2016 -0700 @@ -32,6 +32,9 @@ self._first_valid_hit_index = self._INDEX_NEEDS_LOADING self._exts = list(app.config.get('site/auto_formats').keys()) + def __str__(self): + return self._page_ref + @property def exists(self): try:
--- a/piecrust/taxonomies.py Sat May 14 18:18:54 2016 -0700 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,34 +0,0 @@ -from piecrust.sources.pageref import PageRef, PageNotFoundError - - -class Taxonomy(object): - def __init__(self, app, name, config): - self.app = app - self.name = name - self.term_name = config.get('term', name) - self.is_multiple = config.get('multiple', False) - self.page_ref = config.get('page') - self._source_page_refs = {} - - @property - def setting_name(self): - if self.is_multiple: - return self.name - return self.term_name - - def resolvePagePath(self, source): - pr = self.getPageRef(source) - try: - return pr.path - except PageNotFoundError: - return None - - def getPageRef(self, source): - if source.name in self._source_page_refs: - return self._source_page_refs[source.name] - - ref_path = source.getTaxonomyPageRef(self.name) - page_ref = PageRef(self.app, ref_path) - self._source_page_refs[source.name] = page_ref - return page_ref -
--- a/piecrust/templating/jinjaengine.py Sat May 14 18:18:54 2016 -0700 +++ b/piecrust/templating/jinjaengine.py Wed Jun 01 22:24:35 2016 -0700 @@ -21,7 +21,7 @@ from piecrust.routing import CompositeRouteFunction from piecrust.templating.base import (TemplateEngine, TemplateNotFoundError, TemplatingError) -from piecrust.uriutil import multi_replace, split_sub_uri +from piecrust.uriutil import multi_replace logger = logging.getLogger(__name__) @@ -58,6 +58,8 @@ except AbortedSourceUseError: raise except Exception as ex: + if self.app.debug: + raise msg = "Error rendering Jinja markup" rel_path = os.path.relpath(path, self.app.root_dir) raise TemplatingError(msg, rel_path) from ex @@ -253,7 +255,6 @@ if cpi is None or cpi.page is None or cpi.render_ctx is None: raise Exception("Can't paginate when no page has been pushed " "on the execution stack.") - first_uri, _ = split_sub_uri(self.app, cpi.render_ctx.uri) return Paginator(cpi.page, value, page_num=cpi.render_ctx.page_num, items_per_page=items_per_page)
--- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/piecrust/themes/base.py Wed Jun 01 22:24:35 2016 -0700 @@ -0,0 +1,56 @@ +import sys +import os.path +import yaml +from piecrust import CONFIG_PATH, THEME_DIR, THEMES_DIR + + +class Theme(object): + def getPath(self): + mod_name = type(self).__module__ + mod_file = sys.modules[mod_name].__file__ + return os.path.dirname(mod_file) + + +class ThemeNotFoundError(Exception): + pass + + +class ThemeLoader(object): + def __init__(self, root_dir): + self.root_dir = root_dir + + def getThemeDir(self): + # Pre-load the config quickly to see if we're loading a specific + # theme from somehwere. + # TODO: make configs and themes load together to speed this up. + config_path = os.path.join(self.root_dir, CONFIG_PATH) + with open(config_path, 'r', encoding='utf8') as fp: + config = yaml.load(fp.read()) + site_config = config.get('site', {}) + theme = site_config.get('theme', None) + if theme is None: + return None + + # Get the list of directories in which themes are installed. + dirs = [] + themes_dirs = site_config.get('themes_dirs', []) + if isinstance(themes_dirs, str): + dirs.append(os.path.join(self.root_dir, themes_dirs)) + else: + dirs += [os.path.join(self.root_dir, p) for p in themes_dirs] + + # Add the default `themes` directory. + default_themes_dir = os.path.join(self.root_dir, THEMES_DIR) + if os.path.isdir(default_themes_dir): + dirs.append(default_themes_dir) + + # Try to find the theme the user wants. + for d in dirs: + theme_dir = os.path.join(d, theme) + if os.path.isdir(theme_dir): + return theme_dir + + raise ThemeNotFoundError( + "Can't find theme '%s'. Looked in: %s", + (theme, ', '.join(dirs))) +
--- a/piecrust/uriutil.py Sat May 14 18:18:54 2016 -0700 +++ b/piecrust/uriutil.py Wed Jun 01 22:24:35 2016 -0700 @@ -2,72 +2,11 @@ import os.path import string import logging -import functools logger = logging.getLogger(__name__) -class UriError(Exception): - def __init__(self, uri): - super(UriError, self).__init__("Invalid URI: %s" % uri) - - -@functools.total_ordering -class UriInfo(object): - def __init__(self, uri, source, args, taxonomy=None, page_num=1): - self.uri = uri - self.source = source - self.args = args - self.taxonomy = taxonomy - self.page_num = page_num - - def __eq__(self, other): - return ((self.uri, self.source, self.args, self.taxonomy, - self.page_num) == - (other.uri, other.source, other.args, other.taxonomy, - other.page_num)) - - def __lt__(self, other): - return ((self.uri, self.source, self.args, self.taxonomy, - self.page_num) < - (other.uri, other.source, other.args, other.taxonomy, - other.page_num)) - - -pagenum_pattern = re.compile(r'/(\d+)/?$') - - -def parse_uri(routes, uri): - if uri.find('..') >= 0: - raise UriError(uri) - - page_num = 1 - match = pagenum_pattern.search(uri) - if match is not None: - uri = uri[:match.start()] - page_num = int(match.group(1)) - - uri = '/' + uri.strip('/') - - for rn, rc in routes.items(): - pattern = route_to_pattern(rn) - m = re.match(pattern, uri) - if m is not None: - args = m.groupdict() - return UriInfo(uri, rc['source'], args, rc.get('taxonomy'), - page_num) - - return None - - -r2p_pattern = re.compile(r'%(\w+)%') - - -def route_to_pattern(route): - return r2p_pattern.sub(r'(?P<\1>[\w\-]+)', route) - - def multi_replace(text, replacements): reps = dict((re.escape(k), v) for k, v in replacements.items()) pattern = re.compile("|".join(list(reps.keys())))
--- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/tests/bakes/test_archives.yaml Wed Jun 01 22:24:35 2016 -0700 @@ -0,0 +1,43 @@ +--- +in: + pages/_year.html: | + Posts in {{year}} + {% for post in pagination.posts -%} + {{post.url}} + {% endfor %} + posts/2015-12-01_post0.html: '' + posts/2016-01-01_post1.html: '' + posts/2016-01-02_post2.html: '' + posts/2016-01-03_post3.html: '' + posts/2016-01-04_post4.html: '' + posts/2016-01-05_post5.html: '' + posts/2016-01-06_post6.html: '' + posts/2016-01-07_post7.html: '' +outfiles: + archives/2016.html: | + Posts in 2016 + /2016/01/07/post7.html + /2016/01/06/post6.html + /2016/01/05/post5.html + /2016/01/04/post4.html + /2016/01/03/post3.html + archives/2016/2.html: | + Posts in 2016 + /2016/01/02/post2.html + /2016/01/01/post1.html +--- +in: + pages/_year.html: | + Posts in {{year}} + {% for post in archives -%} + {{post.url}} + {% endfor %} + posts/2015-12-01_post0.html: '' + posts/2016-01-01_post1.html: '' + posts/2016-01-02_post2.html: '' +outfiles: + archives/2016.html: | + Posts in 2016 + /2016/01/01/post1.html + /2016/01/02/post2.html +
--- a/tests/bakes/test_data_provider.yaml Sat May 14 18:18:54 2016 -0700 +++ b/tests/bakes/test_data_provider.yaml Wed Jun 01 22:24:35 2016 -0700 @@ -14,3 +14,60 @@ /bar.html /foo.html / +--- +in: + posts/2016-06-01_one.md: "One!" + posts/2016-06-02_two.md: "Two!" + posts/2016-06-03_three.md: "Three!" + pages/_index.md: | + {% for p in blog.posts -%} + {{p.url}} + {% endfor %} +outfiles: + index.html: | + /2016/06/03/three.html + /2016/06/02/two.html + /2016/06/01/one.html +--- +config: + blog: + subtitle: "Forcing a Merged Mapping" +in: + posts/2016-06-01_one.md: "One!" + posts/2016-06-02_two.md: "Two!" + posts/2016-06-03_three.md: "Three!" + pages/_index.md: | + {{blog.subtitle}} + {% for p in blog.posts -%} + {{p.url}} + {% endfor %} +outfiles: + index.html: | + Forcing a Merged Mapping + /2016/06/03/three.html + /2016/06/02/two.html + /2016/06/01/one.html +--- +config: + site: + blogs: [aaa, xyz] +in: + posts/aaa/2016-06-01_one.md: "One!" + posts/aaa/2016-06-02_two.md: "Two!" + posts/xyz/2016-06-01_one-other.md: "One Other!" + posts/xyz/2016-06-02_two-other.md: "Two Other!" + pages/_index.md: | + {% for p in aaa.posts -%} + {{p.url}} + {% endfor %} + {% for p in xyz.posts -%} + {{p.url}} + {% endfor %} +outfiles: + index.html: | + /aaa/2016/06/02/two.html + /aaa/2016/06/01/one.html + + /xyz/2016/06/02/two-other.html + /xyz/2016/06/01/one-other.html +
--- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/tests/bakes/test_multiblog.yaml Wed Jun 01 22:24:35 2016 -0700 @@ -0,0 +1,28 @@ +--- +config: + site: + blogs: [one, two] +in: + posts/one/2016-01-01_post1.html: '' + posts/two/2016-01-02_post2.html: '' + pages/foo-one.html: "Link: {{pconeposturl(2016, 01, 01, 'post1')}}" + pages/foo-two.html: "Link: {{pctwoposturl(2016, 01, 02, 'post2')}}" +outfiles: + foo-one.html: "Link: /one/2016/01/01/post1.html" + foo-two.html: "Link: /two/2016/01/02/post2.html" +--- +config: + site: + blogs: [one, two] + one: + func_prefix: pc + two: + func_prefix: pc +in: + posts/one/2016-01-01_post1.html: '' + posts/two/2016-01-02_post2.html: '' + pages/foo-one.html: "---\nblog: one\n---\nLink: {{pcposturl(2016, 01, 01, 'post1', 'one')}}" + pages/foo-two.html: "---\nblog: two\n---\nLink: {{pcposturl(2016, 01, 02, 'post2', 'two')}}" +outfiles: + foo-one.html: "Link: /one/2016/01/01/post1.html" + foo-two.html: "Link: /two/2016/01/02/post2.html"
--- a/tests/bakes/test_simple_tags.yaml Sat May 14 18:18:54 2016 -0700 +++ b/tests/bakes/test_simple_tags.yaml Wed Jun 01 22:24:35 2016 -0700 @@ -55,4 +55,38 @@ whatever.html: | Pages in whatever Post 02 +--- +in: + posts/2016-06-01_post01.md: | + --- + title: Post 01 + tags: [foo, bar] + --- + posts/2016-06-02_post02.md: | + --- + title: Post 02 + tags: [bar, foo] + --- + pages/_tag.md: | + Pages in {{tags|join(', ')}} + {% for p in pagination.posts -%} + {{p.title}} + {% endfor %} + pages/blah.md: | + Link to: {{pctagurl('foo', 'bar')}} +outfiles: + blah.html: | + Link to: /tag/foo/bar.html + tag/foo.html: | + Pages in foo + Post 02 + Post 01 + tag/bar.html: | + Pages in bar + Post 02 + Post 01 + tag/foo/bar.html: | + Pages in foo, bar + Post 02 + Post 01
--- a/tests/bakes/test_unicode_tags.yaml Sat May 14 18:18:54 2016 -0700 +++ b/tests/bakes/test_unicode_tags.yaml Wed Jun 01 22:24:35 2016 -0700 @@ -11,18 +11,18 @@ tags: [étrange, sévère] --- pages/_tag.md: | - Pages in {{pctagurl(tag)}} + Pages in {{pctagurl(tag)}} with {{tag}} {% for p in pagination.posts -%} {{p.title}} {% endfor %} pages/_index.md: '' outfiles: tag/étrange.html: | - Pages in /tag/%C3%A9trange.html + Pages in /tag/%C3%A9trange.html with étrange Post 02 Post 01 tag/sévère.html: | - Pages in /tag/s%C3%A9v%C3%A8re.html + Pages in /tag/s%C3%A9v%C3%A8re.html with sévère Post 02 --- in:
--- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/tests/servings/test_archives.yaml Wed Jun 01 22:24:35 2016 -0700 @@ -0,0 +1,16 @@ +--- +url: /archives/2016.html +in: + pages/_year.html: | + Posts in {{year}} + {% for post in pagination.posts -%} + {{post.url}} + {% endfor %} + posts/2015-12-01_post0.html: '' + posts/2016-01-01_post1.html: '' + posts/2016-01-02_post2.html: '' +out: | + Posts in 2016 + /2016/01/02/post2.html + /2016/01/01/post1.html +
--- a/tests/test_appconfig.py Sat May 14 18:18:54 2016 -0700 +++ b/tests/test_appconfig.py Wed Jun 01 22:24:35 2016 -0700 @@ -49,12 +49,11 @@ with mock_fs_scope(fs): app = fs.getApp() # The order of routes is important. Sources, not so much. - # `posts` shows up 3 times in routes (posts, tags, categories) assert (list( map( - lambda v: v['source'], + lambda v: v.get('generator') or v['source'], app.config.get('site/routes'))) == - ['notes', 'posts', 'posts', 'posts', 'pages', 'theme_pages']) + ['notes', 'posts', 'posts_archives', 'posts_tags', 'posts_categories', 'pages', 'theme_pages']) assert list(app.config.get('site/sources').keys()) == [ 'theme_pages', 'pages', 'posts', 'notes'] @@ -77,9 +76,9 @@ # `posts` shows up 3 times in routes (posts, tags, categories) assert (list( map( - lambda v: v['source'], + lambda v: v.get('generator') or v['source'], app.config.get('site/routes'))) == - ['notes', 'posts', 'posts', 'posts', 'pages', 'theme_notes', 'theme_pages']) + ['notes', 'posts', 'posts_archives', 'posts_tags', 'posts_categories', 'pages', 'theme_notes', 'theme_pages']) assert list(app.config.get('site/sources').keys()) == [ 'theme_pages', 'theme_notes', 'pages', 'posts', 'notes']
--- a/tests/test_data_iterators.py Sat May 14 18:18:54 2016 -0700 +++ b/tests/test_data_iterators.py Wed Jun 01 22:24:35 2016 -0700 @@ -72,7 +72,8 @@ page = mock.MagicMock(spec=Page) page.config = PageConfiguration() page.config.set('threes', {'is_foo': 3}) - it = PageIterator([TestItem(v) for v in [3, 2, 3, 1, 4, 3]], page) + it = PageIterator([TestItem(v) for v in [3, 2, 3, 1, 4, 3]], + current_page=page) it.filter('threes') assert it.total_count == 3 assert len(it) == 3
--- a/tests/test_data_provider.py Sat May 14 18:18:54 2016 -0700 +++ b/tests/test_data_provider.py Wed Jun 01 22:24:35 2016 -0700 @@ -6,21 +6,21 @@ fs = (mock_fs() .withConfig() .withPage('posts/2015-03-01_one.md', - {'title': 'One', 'category': 'Foo'}) + {'title': 'One', 'tags': ['Foo']}) .withPage('posts/2015-03-02_two.md', - {'title': 'Two', 'category': 'Foo'}) + {'title': 'Two', 'tags': ['Foo']}) .withPage('posts/2015-03-03_three.md', - {'title': 'Three', 'category': 'Bar'}) - .withPage('pages/categories.md', + {'title': 'Three', 'tags': ['Bar']}) + .withPage('pages/tags.md', {'format': 'none', 'layout': 'none'}, - "{%for c in blog.categories%}\n" + "{%for c in blog.tags%}\n" "{{c.name}} ({{c.post_count}})\n" "{%endfor%}\n")) with mock_fs_scope(fs): app = fs.getApp() - page = app.getSource('pages').getPage({'slug': 'categories'}) - route = app.getRoute('pages', None) - route_metadata = {'slug': 'categories'} + page = app.getSource('pages').getPage({'slug': 'tags'}) + route = app.getSourceRoute('pages', None) + route_metadata = {'slug': 'tags'} qp = QualifiedPage(page, route, route_metadata) ctx = PageRenderingContext(qp) rp = render_page(ctx)
--- a/tests/test_fastpickle.py Sat May 14 18:18:54 2016 -0700 +++ b/tests/test_fastpickle.py Wed Jun 01 22:24:35 2016 -0700 @@ -1,6 +1,6 @@ import datetime import pytest -from piecrust.fastpickle import pickle, unpickle +from piecrust.fastpickle import pickle, unpickle, pickle_obj, unpickle_obj class Foo(object): @@ -51,3 +51,14 @@ for i in range(2): assert f.bars[i].value == o.bars[i].value + +def test_reentrance(): + a = {'test_ints': 42, 'test_set': set([1, 2])} + data = pickle_obj(a) + b = unpickle_obj(data) + assert a == b + other_b = unpickle_obj(data) + assert a == other_b + c = unpickle_obj(data) + assert a == c +
--- a/tests/test_serving.py Sat May 14 18:18:54 2016 -0700 +++ b/tests/test_serving.py Wed Jun 01 22:24:35 2016 -0700 @@ -33,7 +33,7 @@ assert len(matching) == len(expected) for i in range(len(matching)): - route, metadata = matching[i] + route, metadata, is_sub_page = matching[i] exp_source, exp_md = expected[i] assert route.source_name == exp_source assert metadata == exp_md @@ -76,12 +76,13 @@ with mock_fs_scope(fs): app = fs.getApp() page = app.getSource('pages').getPage({'slug': '_tag', 'tag': tag}) - route = app.getTaxonomyRoute('tags', 'posts') + route = app.getGeneratorRoute('posts_tags') + assert route is not None + route_metadata = {'slug': '_tag', 'tag': tag} - qp = QualifiedPage(page, route, route_metadata) ctx = PageRenderingContext(qp) - ctx.setTaxonomyFilter(tag) + route.generator.prepareRenderContext(ctx) rp = render_page(ctx) expected = "Pages in %s\n" % tag @@ -109,7 +110,13 @@ return c fs = (mock_fs() - .withConfig() + .withConfig({ + 'site': { + 'taxonomies': { + 'categories': {'term': 'category'} + } + } + }) .withPages(6, 'posts/2015-03-{idx1:02}_post{idx1:02}.md', config_factory) .withPage('pages/_category.md', {'layout': 'none', 'format': 'none'}, @@ -121,12 +128,13 @@ app = fs.getApp() page = app.getSource('pages').getPage({'slug': '_category', 'category': category}) - route = app.getTaxonomyRoute('categories', 'posts') + route = app.getGeneratorRoute('posts_categories') + assert route is not None + route_metadata = {'slug': '_category', 'category': category} - qp = QualifiedPage(page, route, route_metadata) ctx = PageRenderingContext(qp) - ctx.setTaxonomyFilter(category) + route.generator.prepareRenderContext(ctx) rp = render_page(ctx) expected = "Pages in %s\n" % category
--- a/tests/test_templating_jinjaengine.py Sat May 14 18:18:54 2016 -0700 +++ b/tests/test_templating_jinjaengine.py Wed Jun 01 22:24:35 2016 -0700 @@ -28,7 +28,7 @@ with mock_fs_scope(fs, open_patches=open_patches): app = fs.getApp() page = get_simple_page(app, 'foo.md') - route = app.getRoute('pages', None) + route = app.getSourceRoute('pages', None) route_metadata = {'slug': 'foo'} output = render_simple_page(page, route, route_metadata) assert output == expected @@ -46,7 +46,7 @@ with mock_fs_scope(fs, open_patches=open_patches): app = fs.getApp() page = get_simple_page(app, 'foo.md') - route = app.getRoute('pages', None) + route = app.getSourceRoute('pages', None) route_metadata = {'slug': 'foo'} output = render_simple_page(page, route, route_metadata) assert output == expected @@ -63,7 +63,7 @@ with mock_fs_scope(fs, open_patches=open_patches): app = fs.getApp() page = get_simple_page(app, 'foo.md') - route = app.getRoute('pages', None) + route = app.getSourceRoute('pages', None) route_metadata = {'slug': 'foo'} output = render_simple_page(page, route, route_metadata) assert output == expected
--- a/tests/test_templating_pystacheengine.py Sat May 14 18:18:54 2016 -0700 +++ b/tests/test_templating_pystacheengine.py Wed Jun 01 22:24:35 2016 -0700 @@ -28,7 +28,7 @@ with mock_fs_scope(fs, open_patches=open_patches): app = fs.getApp() page = get_simple_page(app, 'foo.md') - route = app.getRoute('pages', None) + route = app.getSourceRoute('pages', None) route_metadata = {'slug': 'foo'} output = render_simple_page(page, route, route_metadata) assert output == expected @@ -46,7 +46,7 @@ with mock_fs_scope(fs, open_patches=open_patches): app = fs.getApp() page = get_simple_page(app, 'foo.md') - route = app.getRoute('pages', None) + route = app.getSourceRoute('pages', None) route_metadata = {'slug': 'foo'} output = render_simple_page(page, route, route_metadata) # On Windows, pystache unexplicably adds `\r` to some newlines... wtf. @@ -65,7 +65,7 @@ with mock_fs_scope(fs, open_patches=open_patches): app = fs.getApp() page = get_simple_page(app, 'foo.md') - route = app.getRoute('pages', None) + route = app.getSourceRoute('pages', None) route_metadata = {'slug': 'foo'} output = render_simple_page(page, route, route_metadata) # On Windows, pystache unexplicably adds `\r` to some newlines... wtf.
--- a/tests/test_uriutil.py Sat May 14 18:18:54 2016 -0700 +++ b/tests/test_uriutil.py Wed Jun 01 22:24:35 2016 -0700 @@ -1,29 +1,6 @@ import mock import pytest -from piecrust.uriutil import UriInfo, parse_uri, split_sub_uri - - -@pytest.mark.parametrize('routes, uri, expected', [ - ({}, '/foo', None), - ( - {'/articles/%slug%': {'source': 'dummy'}}, - '/articles/foo', - UriInfo('', 'dummy', {'slug': 'foo'})), - ( - {'/foo/%bar%': {'source': 'foo'}, - '/other/%one%-%two%': {'source': 'other'}}, - '/other/some-thing', - UriInfo('', 'other', {'one': 'some', 'two': 'thing'})) - ]) -def test_parse_uri(routes, uri, expected): - if expected is not None: - expected.uri = uri - for pattern, args in routes.items(): - if 'taxonomy' not in args: - args['taxonomy'] = None - - actual = parse_uri(routes, uri) - assert actual == expected +from piecrust.uriutil import split_sub_uri @pytest.mark.parametrize('uri, expected, pretty_urls', [