# HG changeset patch # User Ludovic Chabant # Date 1511330832 28800 # Node ID 2e5c5d33d62c7176b9485500c696270519aa106b # Parent 4f2e0136123d04cd80111b7d1cdc64d2ddfe1dc5# Parent 7a1903ede496ba082510087ecb41ccc3dcc6fdc1 Merge changes. diff -r 7a1903ede496 -r 2e5c5d33d62c dev-requirements.txt --- a/dev-requirements.txt Tue Nov 21 11:00:06 2017 -0800 +++ b/dev-requirements.txt Tue Nov 21 22:07:12 2017 -0800 @@ -1,6 +1,6 @@ cov-core==1.15.0 coverage==4.0.3 -invoke==0.12.2 +invoke==0.21.0 mock==1.0.1 pytest==2.8.7 pytest-cov==2.2.1 diff -r 7a1903ede496 -r 2e5c5d33d62c docs/raw/apple-touch-icons.psd Binary file docs/raw/apple-touch-icons.psd has changed diff -r 7a1903ede496 -r 2e5c5d33d62c garcon/benchsite.py --- a/garcon/benchsite.py Tue Nov 21 11:00:06 2017 -0800 +++ b/garcon/benchsite.py Tue Nov 21 22:07:12 2017 -0800 @@ -73,6 +73,11 @@ if not os.path.isdir(posts_dir): os.makedirs(posts_dir) + config_path = os.path.join(self.out_dir, 'config.yml') + if not os.path.exists(config_path): + with open(config_path, 'w') as fp: + fp.write('\n') + def writePost(self, post_info): out_dir = os.path.join(self.out_dir, 'posts') slug = post_info['slug'] @@ -220,7 +225,7 @@ from invoke import task @task - def genbenchsite(engine, out_dir, post_count=100, tag_count=10): + def genbenchsite(ctx, engine, out_dir, post_count=100, tag_count=10): generate(engine, out_dir, post_count=post_count, tag_count=tag_count) diff -r 7a1903ede496 -r 2e5c5d33d62c garcon/changelog.py --- a/garcon/changelog.py Tue Nov 21 11:00:06 2017 -0800 +++ b/garcon/changelog.py Tue Nov 21 22:07:12 2017 -0800 @@ -196,6 +196,6 @@ from invoke import task @task - def genchangelog(out_file='CHANGELOG.rst', last=None): + def genchangelog(ctx, out_file='CHANGELOG.rst', last=None): generate(out_file, last) diff -r 7a1903ede496 -r 2e5c5d33d62c garcon/documentation.py --- a/garcon/documentation.py Tue Nov 21 11:00:06 2017 -0800 +++ b/garcon/documentation.py Tue Nov 21 22:07:12 2017 -0800 @@ -16,7 +16,7 @@ 'venv_dir': "The directory of the virtual environment to use to run " "PieCrust." }) -def gendocs(tmp_dir=None, out_dir=None, root_url=None, venv_dir=None): +def gendocs(ctx, tmp_dir=None, out_dir=None, root_url=None, venv_dir=None): base_dir = os.path.abspath( os.path.join(os.path.dirname(__file__), '..')) os.chdir(base_dir) diff -r 7a1903ede496 -r 2e5c5d33d62c garcon/messages.py --- a/garcon/messages.py Tue Nov 21 11:00:06 2017 -0800 +++ b/garcon/messages.py Tue Nov 21 22:07:12 2017 -0800 @@ -3,7 +3,7 @@ @task -def genmessages(): +def genmessages(ctx): root_dir = 'garcon/messages' out_dir = 'piecrust/resources/messages' run('python chef.py --root %s bake -o %s' % (root_dir, out_dir)) diff -r 7a1903ede496 -r 2e5c5d33d62c garcon/pypi.py --- a/garcon/pypi.py Tue Nov 21 11:00:06 2017 -0800 +++ b/garcon/pypi.py Tue Nov 21 22:07:12 2017 -0800 @@ -3,7 +3,7 @@ @task -def makerelease(version, local_only=False): +def makerelease(ctx, version, local_only=False): if not version: raise Exception("You must specify a version!") diff -r 7a1903ede496 -r 2e5c5d33d62c piecrust/__init__.py --- a/piecrust/__init__.py Tue Nov 21 11:00:06 2017 -0800 +++ b/piecrust/__init__.py Tue Nov 21 22:07:12 2017 -0800 @@ -19,13 +19,13 @@ PIECRUST_URL = 'https://bolt80.com/piecrust/' -CACHE_VERSION = 29 +CACHE_VERSION = 31 try: from piecrust.__version__ import APP_VERSION except ImportError: APP_VERSION = 'unknown' -import os.path +import os.path # NOQA RESOURCES_DIR = os.path.join(os.path.dirname(__file__), 'resources') diff -r 7a1903ede496 -r 2e5c5d33d62c piecrust/admin/assets/img/apple-touch-icon.png Binary file piecrust/admin/assets/img/apple-touch-icon.png has changed diff -r 7a1903ede496 -r 2e5c5d33d62c piecrust/admin/assets/js/foodtruck.js --- a/piecrust/admin/assets/js/foodtruck.js Tue Nov 21 11:00:06 2017 -0800 +++ b/piecrust/admin/assets/js/foodtruck.js Tue Nov 21 22:07:12 2017 -0800 @@ -26,6 +26,16 @@ closePublogBtn.on('click', function() { publogEl.fadeOut(200); }); + + if (!!window.EventSource) { + // TODO: this only works when the Foodtruck blueprint is added under `/pc-admin`. + var source = new EventSource('/pc-admin/publish-log'); + source.onerror = function(e) { + console.log("Error with SSE, closing.", e); + source.close(); + }; + source.addEventListener('message', onPublishEvent); + } }); var onPublishEvent = function(e) { @@ -55,13 +65,3 @@ containerEl.append(msgEl); }; -if (!!window.EventSource) { - var source = new EventSource('/publish-log'); - source.onerror = function(e) { - console.log("Error with SSE, closing.", e); - source.close(); - }; - source.addEventListener('message', onPublishEvent); -} - - diff -r 7a1903ede496 -r 2e5c5d33d62c piecrust/admin/blueprint.py --- a/piecrust/admin/blueprint.py Tue Nov 21 11:00:06 2017 -0800 +++ b/piecrust/admin/blueprint.py Tue Nov 21 22:07:12 2017 -0800 @@ -1,9 +1,7 @@ import time import logging -from flask import Blueprint, current_app, g, request -from .configuration import ( - FoodTruckConfigNotFoundError, get_foodtruck_config) -from .sites import FoodTruckSites, InvalidSiteError +from flask import Blueprint, current_app, g +from .siteinfo import SiteInfo logger = logging.getLogger(__name__) @@ -20,22 +18,24 @@ def load_user(user_id): - admin_id = g.config.get('security/username') + admin_id = current_app.config.get('USERNAME') if admin_id == user_id: - admin_pwd = g.config.get('security/password') + admin_pwd = current_app.config.get('PASSWORD') return User(admin_id, admin_pwd) return None -login_manager = LoginManager() -login_manager.login_view = 'FoodTruck.login' -login_manager.user_loader(load_user) - +def record_login_manager(state): + login_manager = LoginManager() + login_manager.login_view = 'FoodTruck.login' + login_manager.user_loader(load_user) -def record_login_manager(state): - if state.app.secret_key == 'temp-key': + if state.app.config['SECRET_KEY'] == 'temp-key': def _handler(): - raise FoodTruckConfigNotFoundError() + from flask import render_template + return render_template( + 'error.html', + error="No secret key has been set!") logger.debug("No secret key found, disabling website login.") login_manager.unauthorized_handler(_handler) @@ -92,31 +92,13 @@ @foodtruck_bp.before_request def _setup_foodtruck_globals(): - def _get_config(): - admin_root = current_app.config['FOODTRUCK_ROOT'] - procedural_config = current_app.config['FOODTRUCK_PROCEDURAL_CONFIG'] - return get_foodtruck_config(admin_root, procedural_config) - - def _get_sites(): - names = g.config.get('sites') - if not names or not isinstance(names, dict): - raise InvalidSiteError( - "No sites are defined in the configuration file.") + def _get_site(): + root_dir = current_app.config['FOODTRUCK_ROOT_DIR'] + return SiteInfo(root_dir, + url_prefix=foodtruck_bp.url_prefix, + debug=current_app.debug) - current = request.cookies.get('foodtruck_site_name') - if current is not None and current not in names: - current = None - if current is None: - current = next(iter(names.keys())) - s = FoodTruckSites(g.config, current) - return s - - def _get_current_site(): - return g.sites.get() - - g.config = LazySomething(_get_config) - g.sites = LazySomething(_get_sites) - g.site = LazySomething(_get_current_site) + g.site = LazySomething(_get_site) @foodtruck_bp.after_request @@ -148,8 +130,7 @@ import piecrust.admin.views.dashboard # NOQA import piecrust.admin.views.edit # NOQA import piecrust.admin.views.menu # NOQA +import piecrust.admin.views.micropub # NOQA import piecrust.admin.views.preview # NOQA import piecrust.admin.views.publish # NOQA import piecrust.admin.views.sources # NOQA - - diff -r 7a1903ede496 -r 2e5c5d33d62c piecrust/admin/configuration.py --- a/piecrust/admin/configuration.py Tue Nov 21 11:00:06 2017 -0800 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,74 +0,0 @@ -import os.path -import copy -import logging -import yaml -from piecrust.configuration import ( - Configuration, ConfigurationError, ConfigurationLoader, - merge_dicts) - - -logger = logging.getLogger(__name__) - - -def get_foodtruck_config(dirname=None, fallback_config=None): - dirname = dirname or os.getcwd() - cfg_path = os.path.join(dirname, 'foodtruck.yml') - return FoodTruckConfiguration(cfg_path, fallback_config) - - -class FoodTruckConfigNotFoundError(Exception): - pass - - -class FoodTruckConfiguration(Configuration): - def __init__(self, cfg_path, fallback_config=None): - super(FoodTruckConfiguration, self).__init__() - self.cfg_path = cfg_path - self.fallback_config = fallback_config - - def _load(self): - try: - with open(self.cfg_path, 'r', encoding='utf-8') as fp: - values = yaml.load( - fp.read(), - Loader=ConfigurationLoader) - - self._values = self._validateAll(values) - except OSError: - if self.fallback_config is None: - raise FoodTruckConfigNotFoundError() - - logger.debug("No FoodTruck configuration found, using fallback " - "configuration.") - self._values = copy.deepcopy(self.fallback_config) - except Exception as ex: - raise ConfigurationError( - "Error loading configuration from: %s" % - self.cfg_path) from ex - - def _validateAll(self, values): - if values is None: - values = {} - - values = merge_dicts(copy.deepcopy(default_configuration), values) - - return values - - def save(self): - with open(self.cfg_path, 'w', encoding='utf8') as fp: - self.cfg.write(fp) - - -default_configuration = { - 'triggers': { - 'bake': 'chef bake' - }, - 'scm': { - 'type': 'hg' - }, - 'security': { - 'username': '', - 'password': '' - } -} - diff -r 7a1903ede496 -r 2e5c5d33d62c piecrust/admin/main.py --- a/piecrust/admin/main.py Tue Nov 21 11:00:06 2017 -0800 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,26 +0,0 @@ -import logging - - -logger = logging.getLogger(__name__) - - -def run_foodtruck(host=None, port=None, debug=False, extra_settings=None): - es = {} - if debug: - es['DEBUG'] = True - if extra_settings: - es.update(extra_settings) - - from .web import create_foodtruck_app - try: - app = create_foodtruck_app(es) - app.run(host=host, port=port, debug=debug, threaded=True) - except SystemExit: - # This is needed for Werkzeug's code reloader to be able to correctly - # shutdown the child process in order to restart it (otherwise, SSE - # generators will keep it alive). - from . import pubutil - logger.debug("Shutting down SSE generators from main...") - pubutil.server_shutdown = True - raise - diff -r 7a1903ede496 -r 2e5c5d33d62c piecrust/admin/settings.py --- a/piecrust/admin/settings.py Tue Nov 21 11:00:06 2017 -0800 +++ b/piecrust/admin/settings.py Tue Nov 21 22:07:12 2017 -0800 @@ -0,0 +1,2 @@ +FOODTRUCK_ROOT_URL = '' +TOKEN_ENDPOINT = "https://tokens.indieauth.com/token" diff -r 7a1903ede496 -r 2e5c5d33d62c piecrust/admin/siteinfo.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/piecrust/admin/siteinfo.py Tue Nov 21 22:07:12 2017 -0800 @@ -0,0 +1,145 @@ +import os +import os.path +import sys +import copy +import logging +import threading +import subprocess +from flask import request, flash +from piecrust import CACHE_DIR +from piecrust.app import PieCrustFactory + + +logger = logging.getLogger(__name__) + + +class UnauthorizedSiteAccessError(Exception): + pass + + +class InvalidSiteError(Exception): + pass + + +class SiteInfo: + def __init__(self, root_dir, *, url_prefix='', debug=False): + self.root_dir = root_dir + self.url_prefix = url_prefix + self.debug = debug + self._piecrust_factory = None + self._piecrust_app = None + self._scm = None + + def make_url(self, rel_url): + prefix = self.url_prefix + if not prefix: + return rel_url + return prefix + rel_url + + @property + def piecrust_factory(self): + if self._piecrust_factory is None: + self._piecrust_factory = PieCrustFactory( + self.root_dir, + cache_key='admin', + debug=self.debug, + config_values=[ + ('site/root', self.make_url('/preview/')), + ('site/asset_url_format', self.make_url( + '/preview/_asset/%path%'))] + ) + return self._piecrust_factory + + @property + def piecrust_app(self): + if self._piecrust_app is None: + logger.debug("Creating PieCrust admin app: %s" % self.root_dir) + self._piecrust_app = self.piecrust_factory.create() + return self._piecrust_app + + @property + def scm(self): + if self._scm is None: + cfg = copy.deepcopy(self.piecrust_app.config.get('scm', {})) + + if os.path.isdir(os.path.join(self.root_dir, '.hg')): + from .scm.mercurial import MercurialSourceControl + self._scm = MercurialSourceControl(self.root_dir, cfg) + elif os.path.isdir(os.path.join(self.root_dir, '.git')): + from .scm.git import GitSourceControl + self._scm = GitSourceControl(self.root_dir, cfg) + else: + self._scm = False + + return self._scm + + @property + def publish_pid_file(self): + return os.path.join(self.piecrust_app.cache_dir, 'publish.pid') + + @property + def publish_log_file(self): + return os.path.join(self.piecrust_app.cache_dir, 'publish.log') + + def rebakeAssets(self): + out_dir = os.path.join( + self.root_dir, + CACHE_DIR, + self.piecrust_factory.cache_key, + 'server') + args = [ + '--no-color', + 'bake', + '-o', out_dir, + '--assets-only'] + proc = self._runChef(args) + try: + proc.wait(timeout=2) + if proc.returncode == 0: + flash("Assets baked successfully!") + else: + flash("Asset baking process returned '%s'... check the log." % + proc.returncode) + except subprocess.TimeoutExpired: + flash("Asset baking process is still running... check the log later.") + + def getPublishTargetLogFile(self, target): + target = target.replace(' ', '_').lower() + return os.path.join(self.piecrust_app.cache_dir, + 'publish.%s.log' % target) + + def publish(self, target): + args = [ + '--no-color', + '--pid-file', self.publish_pid_file, + '--log', self.publish_log_file, + 'publish', + '--log-publisher', self.getPublishTargetLogFile(target), + '--log-debug-info', + target] + proc = self._runChef(args) + try: + proc.wait(timeout=2) + if proc.returncode == 0: + flash("Publish process ran successfully!") + else: + flash("Publish process returned '%s'... check the log." % + proc.returncode) + except subprocess.TimeoutExpired: + flash("Publish process is still running... check the log later.") + + def _runChef(self, args): + chef_path = os.path.realpath(os.path.join( + os.path.dirname(__file__), + '../../chef.py')) + args = [sys.executable, chef_path] + args + + env = {} + for k, v in os.environ.items(): + env[k] = v + env['PYTHONHOME'] = sys.prefix + + logger.info("Running chef command: %s" % args) + proc = subprocess.Popen(args, cwd=self.root_dir, env=env) + logger.info("Chef process ID: %s" % proc.pid) + return proc diff -r 7a1903ede496 -r 2e5c5d33d62c piecrust/admin/sites.py --- a/piecrust/admin/sites.py Tue Nov 21 11:00:06 2017 -0800 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,110 +0,0 @@ -import os -import os.path -import copy -import logging -import threading -import subprocess -from piecrust.app import PieCrust -from piecrust.configuration import merge_dicts - - -logger = logging.getLogger(__name__) - - -class UnauthorizedSiteAccessError(Exception): - pass - - -class InvalidSiteError(Exception): - pass - - -class Site(object): - def __init__(self, name, root_dir, config): - self.name = name - self.root_dir = root_dir - self._global_config = config - self._piecrust_app = None - self._scm = None - logger.debug("Creating site object for %s" % self.name) - - @property - def piecrust_app(self): - if self._piecrust_app is None: - s = PieCrust(self.root_dir) - s.config.set('site/root', '/site/%s/' % self.name) - self._piecrust_app = s - return self._piecrust_app - - @property - def scm(self): - if self._scm is None: - cfg = copy.deepcopy(self._global_config.get('scm', {})) - merge_dicts(cfg, self.piecrust_app.config.get('scm', {})) - - if os.path.isdir(os.path.join(self.root_dir, '.hg')): - from .scm.mercurial import MercurialSourceControl - self._scm = MercurialSourceControl(self.root_dir, cfg) - elif os.path.isdir(os.path.join(self.root_dir, '.git')): - from .scm.git import GitSourceControl - self._scm = GitSourceControl(self.root_dir, cfg) - else: - self._scm = False - - return self._scm - - @property - def publish_pid_file(self): - return os.path.join(self.piecrust_app.cache_dir, 'publish.pid') - - @property - def publish_log_file(self): - return os.path.join(self.piecrust_app.cache_dir, 'publish.log') - - def publish(self, target): - args = [ - 'chef', - '--pid-file', self.publish_pid_file, - 'publish', target, - '--log-publisher', self.publish_log_file] - proc = subprocess.Popen(args, cwd=self.root_dir) - - def _comm(): - proc.communicate() - - t = threading.Thread(target=_comm, daemon=True) - t.start() - - -class FoodTruckSites(): - def __init__(self, config, current_site): - self._sites = {} - self.config = config - self.current_site = current_site - if current_site is None: - raise Exception("No current site was given.") - - def get_root_dir(self, name=None): - name = name or self.current_site - root_dir = self.config.get('sites/%s' % name) - if root_dir is None: - raise InvalidSiteError("No such site: %s" % name) - if not os.path.isdir(root_dir): - raise InvalidSiteError("Site '%s' has an invalid path." % name) - return root_dir - - def get(self, name=None): - name = name or self.current_site - s = self._sites.get(name) - if s: - return s - - root_dir = self.get_root_dir(name) - s = Site(name, root_dir, self.config) - self._sites[name] = s - return s - - def getall(self): - for name in self.config.get('sites'): - yield self.get(name) - diff -r 7a1903ede496 -r 2e5c5d33d62c piecrust/admin/static/css/foodtruck.min.css --- a/piecrust/admin/static/css/foodtruck.min.css Tue Nov 21 11:00:06 2017 -0800 +++ b/piecrust/admin/static/css/foodtruck.min.css Tue Nov 21 22:07:12 2017 -0800 @@ -1,11 +1,11 @@ -/*! normalize.css v3.0.3 | MIT License | github.com/necolas/normalize.css */html{font-family:sans-serif;-ms-text-size-adjust:100%;-webkit-text-size-adjust:100%}body{margin:0}article,aside,details,figcaption,figure,footer,header,hgroup,main,menu,nav,section,summary{display:block}audio,canvas,progress,video{display:inline-block;vertical-align:baseline}audio:not([controls]){display:none;height:0}[hidden],template{display:none}a{background-color:transparent}a:active,a:hover{outline:0}abbr[title]{border-bottom:1px dotted}b,strong{font-weight:700}dfn{font-style:italic}h1{font-size:2em;margin:.67em 0}mark{background:#ff0;color:#000}small{font-size:80%}sub,sup{font-size:75%;line-height:0;position:relative;vertical-align:baseline}sup{top:-.5em}sub{bottom:-.25em}img{border:0}svg:not(:root){overflow:hidden}figure{margin:1em 40px}hr{box-sizing:content-box;height:0}pre{overflow:auto}code,kbd,pre,samp{font-family:monospace,monospace;font-size:1em}button,input,optgroup,select,textarea{color:inherit;font:inherit;margin:0}button{overflow:visible}button,select{text-transform:none}button,html input[type=button],input[type=reset],input[type=submit]{-webkit-appearance:button;cursor:pointer}button[disabled],html input[disabled]{cursor:default}button::-moz-focus-inner,input::-moz-focus-inner{border:0;padding:0}input{line-height:normal}input[type=checkbox],input[type=radio]{box-sizing:border-box;padding:0}input[type=number]::-webkit-inner-spin-button,input[type=number]::-webkit-outer-spin-button{height:auto}input[type=search]{-webkit-appearance:textfield;box-sizing:content-box}input[type=search]::-webkit-search-cancel-button,input[type=search]::-webkit-search-decoration{-webkit-appearance:none}fieldset{border:1px solid silver;margin:0 2px;padding:.35em .625em .75em}textarea{overflow:auto}optgroup{font-weight:700}table{border-collapse:collapse;border-spacing:0}td,th{padding:0}/*! Source: https://github.com/h5bp/html5-boilerplate/blob/master/src/css/main.css */@media print{*,:after,:before{background:transparent!important;color:#000!important;box-shadow:none!important;text-shadow:none!important}a,a:visited{text-decoration:underline}a[href]:after{content:" (" attr(href) ")"}abbr[title]:after{content:" (" attr(title) ")"}a[href^="#"]:after,a[href^="javascript:"]:after{content:""}blockquote,pre{border:1px solid #999;page-break-inside:avoid}thead{display:table-header-group}img,tr{page-break-inside:avoid}img{max-width:100%!important}h2,h3,p{orphans:3;widows:3}h2,h3{page-break-after:avoid}.navbar{display:none}.btn>.caret,.dropup>.btn>.caret{border-top-color:#000!important}.label{border:1px solid #000}.table{border-collapse:collapse!important}.table td,.table th{background-color:#fff!important}.table-bordered td,.table-bordered th{border:1px solid #ddd!important}}@font-face{font-family:Glyphicons Halflings;src:url(../fonts/glyphicons-halflings-regular.eot);src:url(../fonts/glyphicons-halflings-regular.eot?#iefix) format("embedded-opentype"),url(../fonts/glyphicons-halflings-regular.woff2) format("woff2"),url(../fonts/glyphicons-halflings-regular.woff) format("woff"),url(../fonts/glyphicons-halflings-regular.ttf) format("truetype"),url(../fonts/glyphicons-halflings-regular.svg#glyphicons_halflingsregular) format("svg")}.glyphicon{position:relative;top:1px;display:inline-block;font-family:Glyphicons Halflings;font-style:normal;font-weight:400;line-height:1;-webkit-font-smoothing:antialiased;-moz-osx-font-smoothing:grayscale}.glyphicon-asterisk:before{content:"\002a"}.glyphicon-plus:before{content:"\002b"}.glyphicon-eur:before,.glyphicon-euro:before{content:"\20ac"}.glyphicon-minus:before{content:"\2212"}.glyphicon-cloud:before{content:"\2601"}.glyphicon-envelope:before{content:"\2709"}.glyphicon-pencil:before{content:"\270f"}.glyphicon-glass:before{content:"\e001"}.glyphicon-music:before{content:"\e002"}.glyphicon-search:before{content:"\e003"}.glyphicon-heart:before{content:"\e005"}.glyphicon-star:before{content:"\e006"}.glyphicon-star-empty:before{content:"\e007"}.glyphicon-user:before{content:"\e008"}.glyphicon-film:before{content:"\e009"}.glyphicon-th-large:before{content:"\e010"}.glyphicon-th:before{content:"\e011"}.glyphicon-th-list:before{content:"\e012"}.glyphicon-ok:before{content:"\e013"}.glyphicon-remove:before{content:"\e014"}.glyphicon-zoom-in:before{content:"\e015"}.glyphicon-zoom-out:before{content:"\e016"}.glyphicon-off:before{content:"\e017"}.glyphicon-signal:before{content:"\e018"}.glyphicon-cog:before{content:"\e019"}.glyphicon-trash:before{content:"\e020"}.glyphicon-home:before{content:"\e021"}.glyphicon-file:before{content:"\e022"}.glyphicon-time:before{content:"\e023"}.glyphicon-road:before{content:"\e024"}.glyphicon-download-alt:before{content:"\e025"}.glyphicon-download:before{content:"\e026"}.glyphicon-upload:before{content:"\e027"}.glyphicon-inbox:before{content:"\e028"}.glyphicon-play-circle:before{content:"\e029"}.glyphicon-repeat:before{content:"\e030"}.glyphicon-refresh:before{content:"\e031"}.glyphicon-list-alt:before{content:"\e032"}.glyphicon-lock:before{content:"\e033"}.glyphicon-flag:before{content:"\e034"}.glyphicon-headphones:before{content:"\e035"}.glyphicon-volume-off:before{content:"\e036"}.glyphicon-volume-down:before{content:"\e037"}.glyphicon-volume-up:before{content:"\e038"}.glyphicon-qrcode:before{content:"\e039"}.glyphicon-barcode:before{content:"\e040"}.glyphicon-tag:before{content:"\e041"}.glyphicon-tags:before{content:"\e042"}.glyphicon-book:before{content:"\e043"}.glyphicon-bookmark:before{content:"\e044"}.glyphicon-print:before{content:"\e045"}.glyphicon-camera:before{content:"\e046"}.glyphicon-font:before{content:"\e047"}.glyphicon-bold:before{content:"\e048"}.glyphicon-italic:before{content:"\e049"}.glyphicon-text-height:before{content:"\e050"}.glyphicon-text-width:before{content:"\e051"}.glyphicon-align-left:before{content:"\e052"}.glyphicon-align-center:before{content:"\e053"}.glyphicon-align-right:before{content:"\e054"}.glyphicon-align-justify:before{content:"\e055"}.glyphicon-list:before{content:"\e056"}.glyphicon-indent-left:before{content:"\e057"}.glyphicon-indent-right:before{content:"\e058"}.glyphicon-facetime-video:before{content:"\e059"}.glyphicon-picture:before{content:"\e060"}.glyphicon-map-marker:before{content:"\e062"}.glyphicon-adjust:before{content:"\e063"}.glyphicon-tint:before{content:"\e064"}.glyphicon-edit:before{content:"\e065"}.glyphicon-share:before{content:"\e066"}.glyphicon-check:before{content:"\e067"}.glyphicon-move:before{content:"\e068"}.glyphicon-step-backward:before{content:"\e069"}.glyphicon-fast-backward:before{content:"\e070"}.glyphicon-backward:before{content:"\e071"}.glyphicon-play:before{content:"\e072"}.glyphicon-pause:before{content:"\e073"}.glyphicon-stop:before{content:"\e074"}.glyphicon-forward:before{content:"\e075"}.glyphicon-fast-forward:before{content:"\e076"}.glyphicon-step-forward:before{content:"\e077"}.glyphicon-eject:before{content:"\e078"}.glyphicon-chevron-left:before{content:"\e079"}.glyphicon-chevron-right:before{content:"\e080"}.glyphicon-plus-sign:before{content:"\e081"}.glyphicon-minus-sign:before{content:"\e082"}.glyphicon-remove-sign:before{content:"\e083"}.glyphicon-ok-sign:before{content:"\e084"}.glyphicon-question-sign:before{content:"\e085"}.glyphicon-info-sign:before{content:"\e086"}.glyphicon-screenshot:before{content:"\e087"}.glyphicon-remove-circle:before{content:"\e088"}.glyphicon-ok-circle:before{content:"\e089"}.glyphicon-ban-circle:before{content:"\e090"}.glyphicon-arrow-left:before{content:"\e091"}.glyphicon-arrow-right:before{content:"\e092"}.glyphicon-arrow-up:before{content:"\e093"}.glyphicon-arrow-down:before{content:"\e094"}.glyphicon-share-alt:before{content:"\e095"}.glyphicon-resize-full:before{content:"\e096"}.glyphicon-resize-small:before{content:"\e097"}.glyphicon-exclamation-sign:before{content:"\e101"}.glyphicon-gift:before{content:"\e102"}.glyphicon-leaf:before{content:"\e103"}.glyphicon-fire:before{content:"\e104"}.glyphicon-eye-open:before{content:"\e105"}.glyphicon-eye-close:before{content:"\e106"}.glyphicon-warning-sign:before{content:"\e107"}.glyphicon-plane:before{content:"\e108"}.glyphicon-calendar:before{content:"\e109"}.glyphicon-random:before{content:"\e110"}.glyphicon-comment:before{content:"\e111"}.glyphicon-magnet:before{content:"\e112"}.glyphicon-chevron-up:before{content:"\e113"}.glyphicon-chevron-down:before{content:"\e114"}.glyphicon-retweet:before{content:"\e115"}.glyphicon-shopping-cart:before{content:"\e116"}.glyphicon-folder-close:before{content:"\e117"}.glyphicon-folder-open:before{content:"\e118"}.glyphicon-resize-vertical:before{content:"\e119"}.glyphicon-resize-horizontal:before{content:"\e120"}.glyphicon-hdd:before{content:"\e121"}.glyphicon-bullhorn:before{content:"\e122"}.glyphicon-bell:before{content:"\e123"}.glyphicon-certificate:before{content:"\e124"}.glyphicon-thumbs-up:before{content:"\e125"}.glyphicon-thumbs-down:before{content:"\e126"}.glyphicon-hand-right:before{content:"\e127"}.glyphicon-hand-left:before{content:"\e128"}.glyphicon-hand-up:before{content:"\e129"}.glyphicon-hand-down:before{content:"\e130"}.glyphicon-circle-arrow-right:before{content:"\e131"}.glyphicon-circle-arrow-left:before{content:"\e132"}.glyphicon-circle-arrow-up:before{content:"\e133"}.glyphicon-circle-arrow-down:before{content:"\e134"}.glyphicon-globe:before{content:"\e135"}.glyphicon-wrench:before{content:"\e136"}.glyphicon-tasks:before{content:"\e137"}.glyphicon-filter:before{content:"\e138"}.glyphicon-briefcase:before{content:"\e139"}.glyphicon-fullscreen:before{content:"\e140"}.glyphicon-dashboard:before{content:"\e141"}.glyphicon-paperclip:before{content:"\e142"}.glyphicon-heart-empty:before{content:"\e143"}.glyphicon-link:before{content:"\e144"}.glyphicon-phone:before{content:"\e145"}.glyphicon-pushpin:before{content:"\e146"}.glyphicon-usd:before{content:"\e148"}.glyphicon-gbp:before{content:"\e149"}.glyphicon-sort:before{content:"\e150"}.glyphicon-sort-by-alphabet:before{content:"\e151"}.glyphicon-sort-by-alphabet-alt:before{content:"\e152"}.glyphicon-sort-by-order:before{content:"\e153"}.glyphicon-sort-by-order-alt:before{content:"\e154"}.glyphicon-sort-by-attributes:before{content:"\e155"}.glyphicon-sort-by-attributes-alt:before{content:"\e156"}.glyphicon-unchecked:before{content:"\e157"}.glyphicon-expand:before{content:"\e158"}.glyphicon-collapse-down:before{content:"\e159"}.glyphicon-collapse-up:before{content:"\e160"}.glyphicon-log-in:before{content:"\e161"}.glyphicon-flash:before{content:"\e162"}.glyphicon-log-out:before{content:"\e163"}.glyphicon-new-window:before{content:"\e164"}.glyphicon-record:before{content:"\e165"}.glyphicon-save:before{content:"\e166"}.glyphicon-open:before{content:"\e167"}.glyphicon-saved:before{content:"\e168"}.glyphicon-import:before{content:"\e169"}.glyphicon-export:before{content:"\e170"}.glyphicon-send:before{content:"\e171"}.glyphicon-floppy-disk:before{content:"\e172"}.glyphicon-floppy-saved:before{content:"\e173"}.glyphicon-floppy-remove:before{content:"\e174"}.glyphicon-floppy-save:before{content:"\e175"}.glyphicon-floppy-open:before{content:"\e176"}.glyphicon-credit-card:before{content:"\e177"}.glyphicon-transfer:before{content:"\e178"}.glyphicon-cutlery:before{content:"\e179"}.glyphicon-header:before{content:"\e180"}.glyphicon-compressed:before{content:"\e181"}.glyphicon-earphone:before{content:"\e182"}.glyphicon-phone-alt:before{content:"\e183"}.glyphicon-tower:before{content:"\e184"}.glyphicon-stats:before{content:"\e185"}.glyphicon-sd-video:before{content:"\e186"}.glyphicon-hd-video:before{content:"\e187"}.glyphicon-subtitles:before{content:"\e188"}.glyphicon-sound-stereo:before{content:"\e189"}.glyphicon-sound-dolby:before{content:"\e190"}.glyphicon-sound-5-1:before{content:"\e191"}.glyphicon-sound-6-1:before{content:"\e192"}.glyphicon-sound-7-1:before{content:"\e193"}.glyphicon-copyright-mark:before{content:"\e194"}.glyphicon-registration-mark:before{content:"\e195"}.glyphicon-cloud-download:before{content:"\e197"}.glyphicon-cloud-upload:before{content:"\e198"}.glyphicon-tree-conifer:before{content:"\e199"}.glyphicon-tree-deciduous:before{content:"\e200"}.glyphicon-cd:before{content:"\e201"}.glyphicon-save-file:before{content:"\e202"}.glyphicon-open-file:before{content:"\e203"}.glyphicon-level-up:before{content:"\e204"}.glyphicon-copy:before{content:"\e205"}.glyphicon-paste:before{content:"\e206"}.glyphicon-alert:before{content:"\e209"}.glyphicon-equalizer:before{content:"\e210"}.glyphicon-king:before{content:"\e211"}.glyphicon-queen:before{content:"\e212"}.glyphicon-pawn:before{content:"\e213"}.glyphicon-bishop:before{content:"\e214"}.glyphicon-knight:before{content:"\e215"}.glyphicon-baby-formula:before{content:"\e216"}.glyphicon-tent:before{content:"\26fa"}.glyphicon-blackboard:before{content:"\e218"}.glyphicon-bed:before{content:"\e219"}.glyphicon-apple:before{content:"\f8ff"}.glyphicon-erase:before{content:"\e221"}.glyphicon-hourglass:before{content:"\231b"}.glyphicon-lamp:before{content:"\e223"}.glyphicon-duplicate:before{content:"\e224"}.glyphicon-piggy-bank:before{content:"\e225"}.glyphicon-scissors:before{content:"\e226"}.glyphicon-bitcoin:before,.glyphicon-btc:before,.glyphicon-xbt:before{content:"\e227"}.glyphicon-jpy:before,.glyphicon-yen:before{content:"\00a5"}.glyphicon-rub:before,.glyphicon-ruble:before{content:"\20bd"}.glyphicon-scale:before{content:"\e230"}.glyphicon-ice-lolly:before{content:"\e231"}.glyphicon-ice-lolly-tasted:before{content:"\e232"}.glyphicon-education:before{content:"\e233"}.glyphicon-option-horizontal:before{content:"\e234"}.glyphicon-option-vertical:before{content:"\e235"}.glyphicon-menu-hamburger:before{content:"\e236"}.glyphicon-modal-window:before{content:"\e237"}.glyphicon-oil:before{content:"\e238"}.glyphicon-grain:before{content:"\e239"}.glyphicon-sunglasses:before{content:"\e240"}.glyphicon-text-size:before{content:"\e241"}.glyphicon-text-color:before{content:"\e242"}.glyphicon-text-background:before{content:"\e243"}.glyphicon-object-align-top:before{content:"\e244"}.glyphicon-object-align-bottom:before{content:"\e245"}.glyphicon-object-align-horizontal:before{content:"\e246"}.glyphicon-object-align-left:before{content:"\e247"}.glyphicon-object-align-vertical:before{content:"\e248"}.glyphicon-object-align-right:before{content:"\e249"}.glyphicon-triangle-right:before{content:"\e250"}.glyphicon-triangle-left:before{content:"\e251"}.glyphicon-triangle-bottom:before{content:"\e252"}.glyphicon-triangle-top:before{content:"\e253"}.glyphicon-console:before{content:"\e254"}.glyphicon-superscript:before{content:"\e255"}.glyphicon-subscript:before{content:"\e256"}.glyphicon-menu-left:before{content:"\e257"}.glyphicon-menu-right:before{content:"\e258"}.glyphicon-menu-down:before{content:"\e259"}.glyphicon-menu-up:before{content:"\e260"}*,:after,:before{box-sizing:border-box}html{font-size:10px;-webkit-tap-highlight-color:transparent}body{font-family:Helvetica Neue,Helvetica,Arial,sans-serif;font-size:14px;line-height:1.42857;color:#333;background-color:#fff}button,input,select,textarea{font-family:inherit;font-size:inherit;line-height:inherit}a{color:#337ab7;text-decoration:none}a:focus,a:hover{color:#23527c;text-decoration:underline}a:focus{outline:5px auto -webkit-focus-ring-color;outline-offset:-2px}figure{margin:0}img{vertical-align:middle}.img-responsive{display:block;max-width:100%;height:auto}.img-rounded{border-radius:6px}.img-thumbnail{padding:4px;line-height:1.42857;background-color:#fff;border:1px solid #ddd;border-radius:4px;transition:all .2s ease-in-out;display:inline-block;max-width:100%;height:auto}.img-circle{border-radius:50%}hr{margin-top:20px;margin-bottom:20px;border:0;border-top:1px solid #eee}.sr-only{position:absolute;width:1px;height:1px;margin:-1px;padding:0;overflow:hidden;clip:rect(0,0,0,0);border:0}.sr-only-focusable:active,.sr-only-focusable:focus{position:static;width:auto;height:auto;margin:0;overflow:visible;clip:auto}[role=button]{cursor:pointer}.h1,.h2,.h3,.h4,.h5,.h6,h1,h2,h3,h4,h5,h6{font-family:inherit;font-weight:500;line-height:1.1;color:inherit}.h1 .small,.h1 small,.h2 .small,.h2 small,.h3 .small,.h3 small,.h4 .small,.h4 small,.h5 .small,.h5 small,.h6 .small,.h6 small,h1 .small,h1 small,h2 .small,h2 small,h3 .small,h3 small,h4 .small,h4 small,h5 .small,h5 small,h6 .small,h6 small{font-weight:400;line-height:1;color:#777}.h1,.h2,.h3,h1,h2,h3{margin-top:20px;margin-bottom:10px}.h1 .small,.h1 small,.h2 .small,.h2 small,.h3 .small,.h3 small,h1 .small,h1 small,h2 .small,h2 small,h3 .small,h3 small{font-size:65%}.h4,.h5,.h6,h4,h5,h6{margin-top:10px;margin-bottom:10px}.h4 .small,.h4 small,.h5 .small,.h5 small,.h6 .small,.h6 small,h4 .small,h4 small,h5 .small,h5 small,h6 .small,h6 small{font-size:75%}.h1,h1{font-size:36px}.h2,h2{font-size:30px}.h3,h3{font-size:24px}.h4,h4{font-size:18px}.h5,h5{font-size:14px}.h6,h6{font-size:12px}p{margin:0 0 10px}.lead{margin-bottom:20px;font-size:16px;font-weight:300;line-height:1.4}@media (min-width:768px){.lead{font-size:21px}}.small,small{font-size:85%}.mark,mark{background-color:#fcf8e3;padding:.2em}.text-left{text-align:left}.text-right{text-align:right}.text-center{text-align:center}.text-justify{text-align:justify}.text-nowrap{white-space:nowrap}.text-lowercase{text-transform:lowercase}.initialism,.text-uppercase{text-transform:uppercase}.text-capitalize{text-transform:capitalize}.text-muted{color:#777}.text-primary{color:#337ab7}a.text-primary:focus,a.text-primary:hover{color:#286090}.text-success{color:#3c763d}a.text-success:focus,a.text-success:hover{color:#2b542c}.text-info{color:#31708f}a.text-info:focus,a.text-info:hover{color:#245269}.text-warning{color:#8a6d3b}a.text-warning:focus,a.text-warning:hover{color:#66512c}.text-danger{color:#a94442}a.text-danger:focus,a.text-danger:hover{color:#843534}.bg-primary{color:#fff;background-color:#337ab7}a.bg-primary:focus,a.bg-primary:hover{background-color:#286090}.bg-success{background-color:#dff0d8}a.bg-success:focus,a.bg-success:hover{background-color:#c1e2b3}.bg-info{background-color:#d9edf7}a.bg-info:focus,a.bg-info:hover{background-color:#afd9ee}.bg-warning{background-color:#fcf8e3}a.bg-warning:focus,a.bg-warning:hover{background-color:#f7ecb5}.bg-danger{background-color:#f2dede}a.bg-danger:focus,a.bg-danger:hover{background-color:#e4b9b9}.page-header{padding-bottom:9px;margin:40px 0 20px;border-bottom:1px solid #eee}ol,ul{margin-top:0;margin-bottom:10px}ol ol,ol ul,ul ol,ul ul{margin-bottom:0}.list-inline,.list-unstyled{padding-left:0;list-style:none}.list-inline{margin-left:-5px}.list-inline>li{display:inline-block;padding-left:5px;padding-right:5px}dl{margin-top:0;margin-bottom:20px}dd,dt{line-height:1.42857}dt{font-weight:700}dd{margin-left:0}.dl-horizontal dd:after,.dl-horizontal dd:before{content:" ";display:table}.dl-horizontal dd:after{clear:both}@media (min-width:768px){.dl-horizontal dt{float:left;width:160px;clear:left;text-align:right;overflow:hidden;text-overflow:ellipsis;white-space:nowrap}.dl-horizontal dd{margin-left:180px}}abbr[data-original-title],abbr[title]{cursor:help;border-bottom:1px dotted #777}.initialism{font-size:90%}blockquote{padding:10px 20px;margin:0 0 20px;font-size:17.5px;border-left:5px solid #eee}blockquote ol:last-child,blockquote p:last-child,blockquote ul:last-child{margin-bottom:0}blockquote .small,blockquote footer,blockquote small{display:block;font-size:80%;line-height:1.42857;color:#777}blockquote .small:before,blockquote footer:before,blockquote small:before{content:"\2014 \00A0"}.blockquote-reverse,blockquote.pull-right{padding-right:15px;padding-left:0;border-right:5px solid #eee;border-left:0;text-align:right}.blockquote-reverse .small:before,.blockquote-reverse footer:before,.blockquote-reverse small:before,blockquote.pull-right .small:before,blockquote.pull-right footer:before,blockquote.pull-right small:before{content:""}.blockquote-reverse .small:after,.blockquote-reverse footer:after,.blockquote-reverse small:after,blockquote.pull-right .small:after,blockquote.pull-right footer:after,blockquote.pull-right small:after{content:"\00A0 \2014"}address{margin-bottom:20px;font-style:normal;line-height:1.42857}code,kbd,pre,samp{font-family:Menlo,Monaco,Consolas,Courier New,monospace}code{color:#c7254e;background-color:#f9f2f4;border-radius:4px}code,kbd{padding:2px 4px;font-size:90%}kbd{color:#fff;background-color:#333;border-radius:3px;box-shadow:inset 0 -1px 0 rgba(0,0,0,.25)}kbd kbd{padding:0;font-size:100%;font-weight:700;box-shadow:none}pre{display:block;padding:9.5px;margin:0 0 10px;font-size:13px;line-height:1.42857;word-break:break-all;word-wrap:break-word;color:#333;background-color:#f5f5f5;border:1px solid #ccc;border-radius:4px}pre code{padding:0;font-size:inherit;color:inherit;white-space:pre-wrap;background-color:transparent;border-radius:0}.pre-scrollable{max-height:340px;overflow-y:scroll}.container{margin-right:auto;margin-left:auto;padding-left:15px;padding-right:15px}.container:after,.container:before{content:" ";display:table}.container:after{clear:both}@media (min-width:768px){.container{width:750px}}@media (min-width:992px){.container{width:970px}}@media (min-width:1200px){.container{width:1170px}}.container-fluid{margin-right:auto;margin-left:auto;padding-left:15px;padding-right:15px}.container-fluid:after,.container-fluid:before{content:" ";display:table}.container-fluid:after{clear:both}.row{margin-left:-15px;margin-right:-15px}.row:after,.row:before{content:" ";display:table}.row:after{clear:both}.col-lg-1,.col-lg-2,.col-lg-3,.col-lg-4,.col-lg-5,.col-lg-6,.col-lg-7,.col-lg-8,.col-lg-9,.col-lg-10,.col-lg-11,.col-lg-12,.col-md-1,.col-md-2,.col-md-3,.col-md-4,.col-md-5,.col-md-6,.col-md-7,.col-md-8,.col-md-9,.col-md-10,.col-md-11,.col-md-12,.col-sm-1,.col-sm-2,.col-sm-3,.col-sm-4,.col-sm-5,.col-sm-6,.col-sm-7,.col-sm-8,.col-sm-9,.col-sm-10,.col-sm-11,.col-sm-12,.col-xs-1,.col-xs-2,.col-xs-3,.col-xs-4,.col-xs-5,.col-xs-6,.col-xs-7,.col-xs-8,.col-xs-9,.col-xs-10,.col-xs-11,.col-xs-12{position:relative;min-height:1px;padding-left:15px;padding-right:15px}.col-xs-1,.col-xs-2,.col-xs-3,.col-xs-4,.col-xs-5,.col-xs-6,.col-xs-7,.col-xs-8,.col-xs-9,.col-xs-10,.col-xs-11,.col-xs-12{float:left}.col-xs-1{width:8.33333%}.col-xs-2{width:16.66667%}.col-xs-3{width:25%}.col-xs-4{width:33.33333%}.col-xs-5{width:41.66667%}.col-xs-6{width:50%}.col-xs-7{width:58.33333%}.col-xs-8{width:66.66667%}.col-xs-9{width:75%}.col-xs-10{width:83.33333%}.col-xs-11{width:91.66667%}.col-xs-12{width:100%}.col-xs-pull-0{right:auto}.col-xs-pull-1{right:8.33333%}.col-xs-pull-2{right:16.66667%}.col-xs-pull-3{right:25%}.col-xs-pull-4{right:33.33333%}.col-xs-pull-5{right:41.66667%}.col-xs-pull-6{right:50%}.col-xs-pull-7{right:58.33333%}.col-xs-pull-8{right:66.66667%}.col-xs-pull-9{right:75%}.col-xs-pull-10{right:83.33333%}.col-xs-pull-11{right:91.66667%}.col-xs-pull-12{right:100%}.col-xs-push-0{left:auto}.col-xs-push-1{left:8.33333%}.col-xs-push-2{left:16.66667%}.col-xs-push-3{left:25%}.col-xs-push-4{left:33.33333%}.col-xs-push-5{left:41.66667%}.col-xs-push-6{left:50%}.col-xs-push-7{left:58.33333%}.col-xs-push-8{left:66.66667%}.col-xs-push-9{left:75%}.col-xs-push-10{left:83.33333%}.col-xs-push-11{left:91.66667%}.col-xs-push-12{left:100%}.col-xs-offset-0{margin-left:0}.col-xs-offset-1{margin-left:8.33333%}.col-xs-offset-2{margin-left:16.66667%}.col-xs-offset-3{margin-left:25%}.col-xs-offset-4{margin-left:33.33333%}.col-xs-offset-5{margin-left:41.66667%}.col-xs-offset-6{margin-left:50%}.col-xs-offset-7{margin-left:58.33333%}.col-xs-offset-8{margin-left:66.66667%}.col-xs-offset-9{margin-left:75%}.col-xs-offset-10{margin-left:83.33333%}.col-xs-offset-11{margin-left:91.66667%}.col-xs-offset-12{margin-left:100%}@media (min-width:768px){.col-sm-1,.col-sm-2,.col-sm-3,.col-sm-4,.col-sm-5,.col-sm-6,.col-sm-7,.col-sm-8,.col-sm-9,.col-sm-10,.col-sm-11,.col-sm-12{float:left}.col-sm-1{width:8.33333%}.col-sm-2{width:16.66667%}.col-sm-3{width:25%}.col-sm-4{width:33.33333%}.col-sm-5{width:41.66667%}.col-sm-6{width:50%}.col-sm-7{width:58.33333%}.col-sm-8{width:66.66667%}.col-sm-9{width:75%}.col-sm-10{width:83.33333%}.col-sm-11{width:91.66667%}.col-sm-12{width:100%}.col-sm-pull-0{right:auto}.col-sm-pull-1{right:8.33333%}.col-sm-pull-2{right:16.66667%}.col-sm-pull-3{right:25%}.col-sm-pull-4{right:33.33333%}.col-sm-pull-5{right:41.66667%}.col-sm-pull-6{right:50%}.col-sm-pull-7{right:58.33333%}.col-sm-pull-8{right:66.66667%}.col-sm-pull-9{right:75%}.col-sm-pull-10{right:83.33333%}.col-sm-pull-11{right:91.66667%}.col-sm-pull-12{right:100%}.col-sm-push-0{left:auto}.col-sm-push-1{left:8.33333%}.col-sm-push-2{left:16.66667%}.col-sm-push-3{left:25%}.col-sm-push-4{left:33.33333%}.col-sm-push-5{left:41.66667%}.col-sm-push-6{left:50%}.col-sm-push-7{left:58.33333%}.col-sm-push-8{left:66.66667%}.col-sm-push-9{left:75%}.col-sm-push-10{left:83.33333%}.col-sm-push-11{left:91.66667%}.col-sm-push-12{left:100%}.col-sm-offset-0{margin-left:0}.col-sm-offset-1{margin-left:8.33333%}.col-sm-offset-2{margin-left:16.66667%}.col-sm-offset-3{margin-left:25%}.col-sm-offset-4{margin-left:33.33333%}.col-sm-offset-5{margin-left:41.66667%}.col-sm-offset-6{margin-left:50%}.col-sm-offset-7{margin-left:58.33333%}.col-sm-offset-8{margin-left:66.66667%}.col-sm-offset-9{margin-left:75%}.col-sm-offset-10{margin-left:83.33333%}.col-sm-offset-11{margin-left:91.66667%}.col-sm-offset-12{margin-left:100%}}@media (min-width:992px){.col-md-1,.col-md-2,.col-md-3,.col-md-4,.col-md-5,.col-md-6,.col-md-7,.col-md-8,.col-md-9,.col-md-10,.col-md-11,.col-md-12{float:left}.col-md-1{width:8.33333%}.col-md-2{width:16.66667%}.col-md-3{width:25%}.col-md-4{width:33.33333%}.col-md-5{width:41.66667%}.col-md-6{width:50%}.col-md-7{width:58.33333%}.col-md-8{width:66.66667%}.col-md-9{width:75%}.col-md-10{width:83.33333%}.col-md-11{width:91.66667%}.col-md-12{width:100%}.col-md-pull-0{right:auto}.col-md-pull-1{right:8.33333%}.col-md-pull-2{right:16.66667%}.col-md-pull-3{right:25%}.col-md-pull-4{right:33.33333%}.col-md-pull-5{right:41.66667%}.col-md-pull-6{right:50%}.col-md-pull-7{right:58.33333%}.col-md-pull-8{right:66.66667%}.col-md-pull-9{right:75%}.col-md-pull-10{right:83.33333%}.col-md-pull-11{right:91.66667%}.col-md-pull-12{right:100%}.col-md-push-0{left:auto}.col-md-push-1{left:8.33333%}.col-md-push-2{left:16.66667%}.col-md-push-3{left:25%}.col-md-push-4{left:33.33333%}.col-md-push-5{left:41.66667%}.col-md-push-6{left:50%}.col-md-push-7{left:58.33333%}.col-md-push-8{left:66.66667%}.col-md-push-9{left:75%}.col-md-push-10{left:83.33333%}.col-md-push-11{left:91.66667%}.col-md-push-12{left:100%}.col-md-offset-0{margin-left:0}.col-md-offset-1{margin-left:8.33333%}.col-md-offset-2{margin-left:16.66667%}.col-md-offset-3{margin-left:25%}.col-md-offset-4{margin-left:33.33333%}.col-md-offset-5{margin-left:41.66667%}.col-md-offset-6{margin-left:50%}.col-md-offset-7{margin-left:58.33333%}.col-md-offset-8{margin-left:66.66667%}.col-md-offset-9{margin-left:75%}.col-md-offset-10{margin-left:83.33333%}.col-md-offset-11{margin-left:91.66667%}.col-md-offset-12{margin-left:100%}}@media (min-width:1200px){.col-lg-1,.col-lg-2,.col-lg-3,.col-lg-4,.col-lg-5,.col-lg-6,.col-lg-7,.col-lg-8,.col-lg-9,.col-lg-10,.col-lg-11,.col-lg-12{float:left}.col-lg-1{width:8.33333%}.col-lg-2{width:16.66667%}.col-lg-3{width:25%}.col-lg-4{width:33.33333%}.col-lg-5{width:41.66667%}.col-lg-6{width:50%}.col-lg-7{width:58.33333%}.col-lg-8{width:66.66667%}.col-lg-9{width:75%}.col-lg-10{width:83.33333%}.col-lg-11{width:91.66667%}.col-lg-12{width:100%}.col-lg-pull-0{right:auto}.col-lg-pull-1{right:8.33333%}.col-lg-pull-2{right:16.66667%}.col-lg-pull-3{right:25%}.col-lg-pull-4{right:33.33333%}.col-lg-pull-5{right:41.66667%}.col-lg-pull-6{right:50%}.col-lg-pull-7{right:58.33333%}.col-lg-pull-8{right:66.66667%}.col-lg-pull-9{right:75%}.col-lg-pull-10{right:83.33333%}.col-lg-pull-11{right:91.66667%}.col-lg-pull-12{right:100%}.col-lg-push-0{left:auto}.col-lg-push-1{left:8.33333%}.col-lg-push-2{left:16.66667%}.col-lg-push-3{left:25%}.col-lg-push-4{left:33.33333%}.col-lg-push-5{left:41.66667%}.col-lg-push-6{left:50%}.col-lg-push-7{left:58.33333%}.col-lg-push-8{left:66.66667%}.col-lg-push-9{left:75%}.col-lg-push-10{left:83.33333%}.col-lg-push-11{left:91.66667%}.col-lg-push-12{left:100%}.col-lg-offset-0{margin-left:0}.col-lg-offset-1{margin-left:8.33333%}.col-lg-offset-2{margin-left:16.66667%}.col-lg-offset-3{margin-left:25%}.col-lg-offset-4{margin-left:33.33333%}.col-lg-offset-5{margin-left:41.66667%}.col-lg-offset-6{margin-left:50%}.col-lg-offset-7{margin-left:58.33333%}.col-lg-offset-8{margin-left:66.66667%}.col-lg-offset-9{margin-left:75%}.col-lg-offset-10{margin-left:83.33333%}.col-lg-offset-11{margin-left:91.66667%}.col-lg-offset-12{margin-left:100%}}table{background-color:transparent}caption{padding-top:8px;padding-bottom:8px;color:#777}caption,th{text-align:left}.table{width:100%;max-width:100%;margin-bottom:20px}.table>tbody>tr>td,.table>tbody>tr>th,.table>tfoot>tr>td,.table>tfoot>tr>th,.table>thead>tr>td,.table>thead>tr>th{padding:8px;line-height:1.42857;vertical-align:top;border-top:1px solid #ddd}.table>thead>tr>th{vertical-align:bottom;border-bottom:2px solid #ddd}.table>caption+thead>tr:first-child>td,.table>caption+thead>tr:first-child>th,.table>colgroup+thead>tr:first-child>td,.table>colgroup+thead>tr:first-child>th,.table>thead:first-child>tr:first-child>td,.table>thead:first-child>tr:first-child>th{border-top:0}.table>tbody+tbody{border-top:2px solid #ddd}.table .table{background-color:#fff}.table-condensed>tbody>tr>td,.table-condensed>tbody>tr>th,.table-condensed>tfoot>tr>td,.table-condensed>tfoot>tr>th,.table-condensed>thead>tr>td,.table-condensed>thead>tr>th{padding:5px}.table-bordered,.table-bordered>tbody>tr>td,.table-bordered>tbody>tr>th,.table-bordered>tfoot>tr>td,.table-bordered>tfoot>tr>th,.table-bordered>thead>tr>td,.table-bordered>thead>tr>th{border:1px solid #ddd}.table-bordered>thead>tr>td,.table-bordered>thead>tr>th{border-bottom-width:2px}.table-striped>tbody>tr:nth-of-type(odd){background-color:#f9f9f9}.table-hover>tbody>tr:hover{background-color:#f5f5f5}table col[class*=col-]{position:static;float:none;display:table-column}table td[class*=col-],table th[class*=col-]{position:static;float:none;display:table-cell}.table>tbody>tr.active>td,.table>tbody>tr.active>th,.table>tbody>tr>td.active,.table>tbody>tr>th.active,.table>tfoot>tr.active>td,.table>tfoot>tr.active>th,.table>tfoot>tr>td.active,.table>tfoot>tr>th.active,.table>thead>tr.active>td,.table>thead>tr.active>th,.table>thead>tr>td.active,.table>thead>tr>th.active{background-color:#f5f5f5}.table-hover>tbody>tr.active:hover>td,.table-hover>tbody>tr.active:hover>th,.table-hover>tbody>tr:hover>.active,.table-hover>tbody>tr>td.active:hover,.table-hover>tbody>tr>th.active:hover{background-color:#e8e8e8}.table>tbody>tr.success>td,.table>tbody>tr.success>th,.table>tbody>tr>td.success,.table>tbody>tr>th.success,.table>tfoot>tr.success>td,.table>tfoot>tr.success>th,.table>tfoot>tr>td.success,.table>tfoot>tr>th.success,.table>thead>tr.success>td,.table>thead>tr.success>th,.table>thead>tr>td.success,.table>thead>tr>th.success{background-color:#dff0d8}.table-hover>tbody>tr.success:hover>td,.table-hover>tbody>tr.success:hover>th,.table-hover>tbody>tr:hover>.success,.table-hover>tbody>tr>td.success:hover,.table-hover>tbody>tr>th.success:hover{background-color:#d0e9c6}.table>tbody>tr.info>td,.table>tbody>tr.info>th,.table>tbody>tr>td.info,.table>tbody>tr>th.info,.table>tfoot>tr.info>td,.table>tfoot>tr.info>th,.table>tfoot>tr>td.info,.table>tfoot>tr>th.info,.table>thead>tr.info>td,.table>thead>tr.info>th,.table>thead>tr>td.info,.table>thead>tr>th.info{background-color:#d9edf7}.table-hover>tbody>tr.info:hover>td,.table-hover>tbody>tr.info:hover>th,.table-hover>tbody>tr:hover>.info,.table-hover>tbody>tr>td.info:hover,.table-hover>tbody>tr>th.info:hover{background-color:#c4e3f3}.table>tbody>tr.warning>td,.table>tbody>tr.warning>th,.table>tbody>tr>td.warning,.table>tbody>tr>th.warning,.table>tfoot>tr.warning>td,.table>tfoot>tr.warning>th,.table>tfoot>tr>td.warning,.table>tfoot>tr>th.warning,.table>thead>tr.warning>td,.table>thead>tr.warning>th,.table>thead>tr>td.warning,.table>thead>tr>th.warning{background-color:#fcf8e3}.table-hover>tbody>tr.warning:hover>td,.table-hover>tbody>tr.warning:hover>th,.table-hover>tbody>tr:hover>.warning,.table-hover>tbody>tr>td.warning:hover,.table-hover>tbody>tr>th.warning:hover{background-color:#faf2cc}.table>tbody>tr.danger>td,.table>tbody>tr.danger>th,.table>tbody>tr>td.danger,.table>tbody>tr>th.danger,.table>tfoot>tr.danger>td,.table>tfoot>tr.danger>th,.table>tfoot>tr>td.danger,.table>tfoot>tr>th.danger,.table>thead>tr.danger>td,.table>thead>tr.danger>th,.table>thead>tr>td.danger,.table>thead>tr>th.danger{background-color:#f2dede}.table-hover>tbody>tr.danger:hover>td,.table-hover>tbody>tr.danger:hover>th,.table-hover>tbody>tr:hover>.danger,.table-hover>tbody>tr>td.danger:hover,.table-hover>tbody>tr>th.danger:hover{background-color:#ebcccc}.table-responsive{overflow-x:auto;min-height:.01%}@media screen and (max-width:767px){.table-responsive{width:100%;margin-bottom:15px;overflow-y:hidden;-ms-overflow-style:-ms-autohiding-scrollbar;border:1px solid #ddd}.table-responsive>.table{margin-bottom:0}.table-responsive>.table>tbody>tr>td,.table-responsive>.table>tbody>tr>th,.table-responsive>.table>tfoot>tr>td,.table-responsive>.table>tfoot>tr>th,.table-responsive>.table>thead>tr>td,.table-responsive>.table>thead>tr>th{white-space:nowrap}.table-responsive>.table-bordered{border:0}.table-responsive>.table-bordered>tbody>tr>td:first-child,.table-responsive>.table-bordered>tbody>tr>th:first-child,.table-responsive>.table-bordered>tfoot>tr>td:first-child,.table-responsive>.table-bordered>tfoot>tr>th:first-child,.table-responsive>.table-bordered>thead>tr>td:first-child,.table-responsive>.table-bordered>thead>tr>th:first-child{border-left:0}.table-responsive>.table-bordered>tbody>tr>td:last-child,.table-responsive>.table-bordered>tbody>tr>th:last-child,.table-responsive>.table-bordered>tfoot>tr>td:last-child,.table-responsive>.table-bordered>tfoot>tr>th:last-child,.table-responsive>.table-bordered>thead>tr>td:last-child,.table-responsive>.table-bordered>thead>tr>th:last-child{border-right:0}.table-responsive>.table-bordered>tbody>tr:last-child>td,.table-responsive>.table-bordered>tbody>tr:last-child>th,.table-responsive>.table-bordered>tfoot>tr:last-child>td,.table-responsive>.table-bordered>tfoot>tr:last-child>th{border-bottom:0}}fieldset{margin:0;min-width:0}fieldset,legend{padding:0;border:0}legend{display:block;width:100%;margin-bottom:20px;font-size:21px;line-height:inherit;color:#333;border-bottom:1px solid #e5e5e5}label{display:inline-block;max-width:100%;margin-bottom:5px;font-weight:700}input[type=search]{box-sizing:border-box}input[type=checkbox],input[type=radio]{margin:4px 0 0;margin-top:1px\9;line-height:normal}input[type=file]{display:block}input[type=range]{display:block;width:100%}select[multiple],select[size]{height:auto}input[type=checkbox]:focus,input[type=file]:focus,input[type=radio]:focus{outline:5px auto -webkit-focus-ring-color;outline-offset:-2px}output{padding-top:7px}.form-control,output{display:block;font-size:14px;line-height:1.42857;color:#555}.form-control{width:100%;height:34px;padding:6px 12px;background-color:#fff;background-image:none;border:1px solid #ccc;border-radius:4px;box-shadow:inset 0 1px 1px rgba(0,0,0,.075);transition:border-color .15s ease-in-out,box-shadow .15s ease-in-out}.form-control:focus{border-color:#66afe9;outline:0;box-shadow:inset 0 1px 1px rgba(0,0,0,.075),0 0 8px rgba(102,175,233,.6)}.form-control::-moz-placeholder{color:#999;opacity:1}.form-control:-ms-input-placeholder{color:#999}.form-control::-webkit-input-placeholder{color:#999}.form-control::-ms-expand{border:0;background-color:transparent}.form-control[disabled],.form-control[readonly],fieldset[disabled] .form-control{background-color:#eee;opacity:1}.form-control[disabled],fieldset[disabled] .form-control{cursor:not-allowed}textarea.form-control{height:auto}input[type=search]{-webkit-appearance:none}@media screen and (-webkit-min-device-pixel-ratio:0){input[type=date].form-control,input[type=datetime-local].form-control,input[type=month].form-control,input[type=time].form-control{line-height:34px}.input-group-sm>.input-group-btn>input[type=date].btn,.input-group-sm>.input-group-btn>input[type=datetime-local].btn,.input-group-sm>.input-group-btn>input[type=month].btn,.input-group-sm>.input-group-btn>input[type=time].btn,.input-group-sm>input[type=date].form-control,.input-group-sm>input[type=date].input-group-addon,.input-group-sm>input[type=datetime-local].form-control,.input-group-sm>input[type=datetime-local].input-group-addon,.input-group-sm>input[type=month].form-control,.input-group-sm>input[type=month].input-group-addon,.input-group-sm>input[type=time].form-control,.input-group-sm>input[type=time].input-group-addon,.input-group-sm input[type=date],.input-group-sm input[type=datetime-local],.input-group-sm input[type=month],.input-group-sm input[type=time],input[type=date].input-sm,input[type=datetime-local].input-sm,input[type=month].input-sm,input[type=time].input-sm{line-height:30px}.input-group-lg>.input-group-btn>input[type=date].btn,.input-group-lg>.input-group-btn>input[type=datetime-local].btn,.input-group-lg>.input-group-btn>input[type=month].btn,.input-group-lg>.input-group-btn>input[type=time].btn,.input-group-lg>input[type=date].form-control,.input-group-lg>input[type=date].input-group-addon,.input-group-lg>input[type=datetime-local].form-control,.input-group-lg>input[type=datetime-local].input-group-addon,.input-group-lg>input[type=month].form-control,.input-group-lg>input[type=month].input-group-addon,.input-group-lg>input[type=time].form-control,.input-group-lg>input[type=time].input-group-addon,.input-group-lg input[type=date],.input-group-lg input[type=datetime-local],.input-group-lg input[type=month],.input-group-lg input[type=time],input[type=date].input-lg,input[type=datetime-local].input-lg,input[type=month].input-lg,input[type=time].input-lg{line-height:46px}}.form-group{margin-bottom:15px}.checkbox,.radio{position:relative;display:block;margin-top:10px;margin-bottom:10px}.checkbox label,.radio label{min-height:20px;padding-left:20px;margin-bottom:0;font-weight:400;cursor:pointer}.checkbox-inline input[type=checkbox],.checkbox input[type=checkbox],.radio-inline input[type=radio],.radio input[type=radio]{position:absolute;margin-left:-20px;margin-top:4px\9}.checkbox+.checkbox,.radio+.radio{margin-top:-5px}.checkbox-inline,.radio-inline{position:relative;display:inline-block;padding-left:20px;margin-bottom:0;vertical-align:middle;font-weight:400;cursor:pointer}.checkbox-inline+.checkbox-inline,.radio-inline+.radio-inline{margin-top:0;margin-left:10px}.checkbox-inline.disabled,.checkbox.disabled label,.radio-inline.disabled,.radio.disabled label,fieldset[disabled] .checkbox-inline,fieldset[disabled] .checkbox label,fieldset[disabled] .radio-inline,fieldset[disabled] .radio label,fieldset[disabled] input[type=checkbox],fieldset[disabled] input[type=radio],input[type=checkbox].disabled,input[type=checkbox][disabled],input[type=radio].disabled,input[type=radio][disabled]{cursor:not-allowed}.form-control-static{padding-top:7px;padding-bottom:7px;margin-bottom:0;min-height:34px}.form-control-static.input-lg,.form-control-static.input-sm,.input-group-lg>.form-control-static.form-control,.input-group-lg>.form-control-static.input-group-addon,.input-group-lg>.input-group-btn>.form-control-static.btn,.input-group-sm>.form-control-static.form-control,.input-group-sm>.form-control-static.input-group-addon,.input-group-sm>.input-group-btn>.form-control-static.btn{padding-left:0;padding-right:0}.input-group-sm>.form-control,.input-group-sm>.input-group-addon,.input-group-sm>.input-group-btn>.btn,.input-sm{height:30px;padding:5px 10px;font-size:12px;line-height:1.5;border-radius:3px}.input-group-sm>.input-group-btn>select.btn,.input-group-sm>select.form-control,.input-group-sm>select.input-group-addon,select.input-sm{height:30px;line-height:30px}.input-group-sm>.input-group-btn>select[multiple].btn,.input-group-sm>.input-group-btn>textarea.btn,.input-group-sm>select[multiple].form-control,.input-group-sm>select[multiple].input-group-addon,.input-group-sm>textarea.form-control,.input-group-sm>textarea.input-group-addon,select[multiple].input-sm,textarea.input-sm{height:auto}.form-group-sm .form-control{height:30px;padding:5px 10px;font-size:12px;line-height:1.5;border-radius:3px}.form-group-sm select.form-control{height:30px;line-height:30px}.form-group-sm select[multiple].form-control,.form-group-sm textarea.form-control{height:auto}.form-group-sm .form-control-static{height:30px;min-height:32px;padding:6px 10px;font-size:12px;line-height:1.5}.input-group-lg>.form-control,.input-group-lg>.input-group-addon,.input-group-lg>.input-group-btn>.btn,.input-lg{height:46px;padding:10px 16px;font-size:18px;line-height:1.33333;border-radius:6px}.input-group-lg>.input-group-btn>select.btn,.input-group-lg>select.form-control,.input-group-lg>select.input-group-addon,select.input-lg{height:46px;line-height:46px}.input-group-lg>.input-group-btn>select[multiple].btn,.input-group-lg>.input-group-btn>textarea.btn,.input-group-lg>select[multiple].form-control,.input-group-lg>select[multiple].input-group-addon,.input-group-lg>textarea.form-control,.input-group-lg>textarea.input-group-addon,select[multiple].input-lg,textarea.input-lg{height:auto}.form-group-lg .form-control{height:46px;padding:10px 16px;font-size:18px;line-height:1.33333;border-radius:6px}.form-group-lg select.form-control{height:46px;line-height:46px}.form-group-lg select[multiple].form-control,.form-group-lg textarea.form-control{height:auto}.form-group-lg .form-control-static{height:46px;min-height:38px;padding:11px 16px;font-size:18px;line-height:1.33333}.has-feedback{position:relative}.has-feedback .form-control{padding-right:42.5px}.form-control-feedback{position:absolute;top:0;right:0;z-index:1;display:block;width:34px;height:34px;line-height:34px;text-align:center;pointer-events:none}.form-group-lg .form-control+.form-control-feedback,.input-group-lg+.form-control-feedback,.input-group-lg>.form-control+.form-control-feedback,.input-group-lg>.input-group-addon+.form-control-feedback,.input-group-lg>.input-group-btn>.btn+.form-control-feedback,.input-lg+.form-control-feedback{width:46px;height:46px;line-height:46px}.form-group-sm .form-control+.form-control-feedback,.input-group-sm+.form-control-feedback,.input-group-sm>.form-control+.form-control-feedback,.input-group-sm>.input-group-addon+.form-control-feedback,.input-group-sm>.input-group-btn>.btn+.form-control-feedback,.input-sm+.form-control-feedback{width:30px;height:30px;line-height:30px}.has-success .checkbox,.has-success .checkbox-inline,.has-success.checkbox-inline label,.has-success.checkbox label,.has-success .control-label,.has-success .help-block,.has-success .radio,.has-success .radio-inline,.has-success.radio-inline label,.has-success.radio label{color:#3c763d}.has-success .form-control{border-color:#3c763d;box-shadow:inset 0 1px 1px rgba(0,0,0,.075)}.has-success .form-control:focus{border-color:#2b542c;box-shadow:inset 0 1px 1px rgba(0,0,0,.075),0 0 6px #67b168}.has-success .input-group-addon{color:#3c763d;border-color:#3c763d;background-color:#dff0d8}.has-success .form-control-feedback{color:#3c763d}.has-warning .checkbox,.has-warning .checkbox-inline,.has-warning.checkbox-inline label,.has-warning.checkbox label,.has-warning .control-label,.has-warning .help-block,.has-warning .radio,.has-warning .radio-inline,.has-warning.radio-inline label,.has-warning.radio label{color:#8a6d3b}.has-warning .form-control{border-color:#8a6d3b;box-shadow:inset 0 1px 1px rgba(0,0,0,.075)}.has-warning .form-control:focus{border-color:#66512c;box-shadow:inset 0 1px 1px rgba(0,0,0,.075),0 0 6px #c0a16b}.has-warning .input-group-addon{color:#8a6d3b;border-color:#8a6d3b;background-color:#fcf8e3}.has-warning .form-control-feedback{color:#8a6d3b}.has-error .checkbox,.has-error .checkbox-inline,.has-error.checkbox-inline label,.has-error.checkbox label,.has-error .control-label,.has-error .help-block,.has-error .radio,.has-error .radio-inline,.has-error.radio-inline label,.has-error.radio label{color:#a94442}.has-error .form-control{border-color:#a94442;box-shadow:inset 0 1px 1px rgba(0,0,0,.075)}.has-error .form-control:focus{border-color:#843534;box-shadow:inset 0 1px 1px rgba(0,0,0,.075),0 0 6px #ce8483}.has-error .input-group-addon{color:#a94442;border-color:#a94442;background-color:#f2dede}.has-error .form-control-feedback{color:#a94442}.has-feedback label~.form-control-feedback{top:25px}.has-feedback label.sr-only~.form-control-feedback{top:0}.help-block{display:block;margin-top:5px;margin-bottom:10px;color:#737373}@media (min-width:768px){.form-inline .form-group{display:inline-block;margin-bottom:0;vertical-align:middle}.form-inline .form-control{display:inline-block;width:auto;vertical-align:middle}.form-inline .form-control-static{display:inline-block}.form-inline .input-group{display:inline-table;vertical-align:middle}.form-inline .input-group .form-control,.form-inline .input-group .input-group-addon,.form-inline .input-group .input-group-btn{width:auto}.form-inline .input-group>.form-control{width:100%}.form-inline .control-label{margin-bottom:0;vertical-align:middle}.form-inline .checkbox,.form-inline .radio{display:inline-block;margin-top:0;margin-bottom:0;vertical-align:middle}.form-inline .checkbox label,.form-inline .radio label{padding-left:0}.form-inline .checkbox input[type=checkbox],.form-inline .radio input[type=radio]{position:relative;margin-left:0}.form-inline .has-feedback .form-control-feedback{top:0}}.form-horizontal .checkbox,.form-horizontal .checkbox-inline,.form-horizontal .radio,.form-horizontal .radio-inline{margin-top:0;margin-bottom:0;padding-top:7px}.form-horizontal .checkbox,.form-horizontal .radio{min-height:27px}.form-horizontal .form-group{margin-left:-15px;margin-right:-15px}.form-horizontal .form-group:after,.form-horizontal .form-group:before{content:" ";display:table}.form-horizontal .form-group:after{clear:both}@media (min-width:768px){.form-horizontal .control-label{text-align:right;margin-bottom:0;padding-top:7px}}.form-horizontal .has-feedback .form-control-feedback{right:15px}@media (min-width:768px){.form-horizontal .form-group-lg .control-label{padding-top:11px;font-size:18px}}@media (min-width:768px){.form-horizontal .form-group-sm .control-label{padding-top:6px;font-size:12px}}.btn{display:inline-block;margin-bottom:0;font-weight:400;text-align:center;vertical-align:middle;touch-action:manipulation;cursor:pointer;background-image:none;border:1px solid transparent;white-space:nowrap;padding:6px 12px;font-size:14px;line-height:1.42857;border-radius:4px;-webkit-user-select:none;-moz-user-select:none;-ms-user-select:none;user-select:none}.btn.active.focus,.btn.active:focus,.btn.focus,.btn:active.focus,.btn:active:focus,.btn:focus{outline:5px auto -webkit-focus-ring-color;outline-offset:-2px}.btn.focus,.btn:focus,.btn:hover{color:#333;text-decoration:none}.btn.active,.btn:active{outline:0;background-image:none;box-shadow:inset 0 3px 5px rgba(0,0,0,.125)}.btn.disabled,.btn[disabled],fieldset[disabled] .btn{cursor:not-allowed;opacity:.65;filter:alpha(opacity=65);box-shadow:none}a.btn.disabled,fieldset[disabled] a.btn{pointer-events:none}.btn-default{color:#333;background-color:#fff;border-color:#ccc}.btn-default.focus,.btn-default:focus{color:#333;background-color:#e6e6e6;border-color:#8c8c8c}.btn-default.active,.btn-default:active,.btn-default:hover,.open>.btn-default.dropdown-toggle{color:#333;background-color:#e6e6e6;border-color:#adadad}.btn-default.active.focus,.btn-default.active:focus,.btn-default.active:hover,.btn-default:active.focus,.btn-default:active:focus,.btn-default:active:hover,.open>.btn-default.dropdown-toggle.focus,.open>.btn-default.dropdown-toggle:focus,.open>.btn-default.dropdown-toggle:hover{color:#333;background-color:#d4d4d4;border-color:#8c8c8c}.btn-default.active,.btn-default:active,.open>.btn-default.dropdown-toggle{background-image:none}.btn-default.disabled.focus,.btn-default.disabled:focus,.btn-default.disabled:hover,.btn-default[disabled].focus,.btn-default[disabled]:focus,.btn-default[disabled]:hover,fieldset[disabled] .btn-default.focus,fieldset[disabled] .btn-default:focus,fieldset[disabled] .btn-default:hover{background-color:#fff;border-color:#ccc}.btn-default .badge{color:#fff;background-color:#333}.btn-primary{color:#fff;background-color:#337ab7;border-color:#2e6da4}.btn-primary.focus,.btn-primary:focus{color:#fff;background-color:#286090;border-color:#122b40}.btn-primary.active,.btn-primary:active,.btn-primary:hover,.open>.btn-primary.dropdown-toggle{color:#fff;background-color:#286090;border-color:#204d74}.btn-primary.active.focus,.btn-primary.active:focus,.btn-primary.active:hover,.btn-primary:active.focus,.btn-primary:active:focus,.btn-primary:active:hover,.open>.btn-primary.dropdown-toggle.focus,.open>.btn-primary.dropdown-toggle:focus,.open>.btn-primary.dropdown-toggle:hover{color:#fff;background-color:#204d74;border-color:#122b40}.btn-primary.active,.btn-primary:active,.open>.btn-primary.dropdown-toggle{background-image:none}.btn-primary.disabled.focus,.btn-primary.disabled:focus,.btn-primary.disabled:hover,.btn-primary[disabled].focus,.btn-primary[disabled]:focus,.btn-primary[disabled]:hover,fieldset[disabled] .btn-primary.focus,fieldset[disabled] .btn-primary:focus,fieldset[disabled] .btn-primary:hover{background-color:#337ab7;border-color:#2e6da4}.btn-primary .badge{color:#337ab7;background-color:#fff}.btn-success{color:#fff;background-color:#5cb85c;border-color:#4cae4c}.btn-success.focus,.btn-success:focus{color:#fff;background-color:#449d44;border-color:#255625}.btn-success.active,.btn-success:active,.btn-success:hover,.open>.btn-success.dropdown-toggle{color:#fff;background-color:#449d44;border-color:#398439}.btn-success.active.focus,.btn-success.active:focus,.btn-success.active:hover,.btn-success:active.focus,.btn-success:active:focus,.btn-success:active:hover,.open>.btn-success.dropdown-toggle.focus,.open>.btn-success.dropdown-toggle:focus,.open>.btn-success.dropdown-toggle:hover{color:#fff;background-color:#398439;border-color:#255625}.btn-success.active,.btn-success:active,.open>.btn-success.dropdown-toggle{background-image:none}.btn-success.disabled.focus,.btn-success.disabled:focus,.btn-success.disabled:hover,.btn-success[disabled].focus,.btn-success[disabled]:focus,.btn-success[disabled]:hover,fieldset[disabled] .btn-success.focus,fieldset[disabled] .btn-success:focus,fieldset[disabled] .btn-success:hover{background-color:#5cb85c;border-color:#4cae4c}.btn-success .badge{color:#5cb85c;background-color:#fff}.btn-info{color:#fff;background-color:#5bc0de;border-color:#46b8da}.btn-info.focus,.btn-info:focus{color:#fff;background-color:#31b0d5;border-color:#1b6d85}.btn-info.active,.btn-info:active,.btn-info:hover,.open>.btn-info.dropdown-toggle{color:#fff;background-color:#31b0d5;border-color:#269abc}.btn-info.active.focus,.btn-info.active:focus,.btn-info.active:hover,.btn-info:active.focus,.btn-info:active:focus,.btn-info:active:hover,.open>.btn-info.dropdown-toggle.focus,.open>.btn-info.dropdown-toggle:focus,.open>.btn-info.dropdown-toggle:hover{color:#fff;background-color:#269abc;border-color:#1b6d85}.btn-info.active,.btn-info:active,.open>.btn-info.dropdown-toggle{background-image:none}.btn-info.disabled.focus,.btn-info.disabled:focus,.btn-info.disabled:hover,.btn-info[disabled].focus,.btn-info[disabled]:focus,.btn-info[disabled]:hover,fieldset[disabled] .btn-info.focus,fieldset[disabled] .btn-info:focus,fieldset[disabled] .btn-info:hover{background-color:#5bc0de;border-color:#46b8da}.btn-info .badge{color:#5bc0de;background-color:#fff}.btn-warning{color:#fff;background-color:#f0ad4e;border-color:#eea236}.btn-warning.focus,.btn-warning:focus{color:#fff;background-color:#ec971f;border-color:#985f0d}.btn-warning.active,.btn-warning:active,.btn-warning:hover,.open>.btn-warning.dropdown-toggle{color:#fff;background-color:#ec971f;border-color:#d58512}.btn-warning.active.focus,.btn-warning.active:focus,.btn-warning.active:hover,.btn-warning:active.focus,.btn-warning:active:focus,.btn-warning:active:hover,.open>.btn-warning.dropdown-toggle.focus,.open>.btn-warning.dropdown-toggle:focus,.open>.btn-warning.dropdown-toggle:hover{color:#fff;background-color:#d58512;border-color:#985f0d}.btn-warning.active,.btn-warning:active,.open>.btn-warning.dropdown-toggle{background-image:none}.btn-warning.disabled.focus,.btn-warning.disabled:focus,.btn-warning.disabled:hover,.btn-warning[disabled].focus,.btn-warning[disabled]:focus,.btn-warning[disabled]:hover,fieldset[disabled] .btn-warning.focus,fieldset[disabled] .btn-warning:focus,fieldset[disabled] .btn-warning:hover{background-color:#f0ad4e;border-color:#eea236}.btn-warning .badge{color:#f0ad4e;background-color:#fff}.btn-danger{color:#fff;background-color:#d9534f;border-color:#d43f3a}.btn-danger.focus,.btn-danger:focus{color:#fff;background-color:#c9302c;border-color:#761c19}.btn-danger.active,.btn-danger:active,.btn-danger:hover,.open>.btn-danger.dropdown-toggle{color:#fff;background-color:#c9302c;border-color:#ac2925}.btn-danger.active.focus,.btn-danger.active:focus,.btn-danger.active:hover,.btn-danger:active.focus,.btn-danger:active:focus,.btn-danger:active:hover,.open>.btn-danger.dropdown-toggle.focus,.open>.btn-danger.dropdown-toggle:focus,.open>.btn-danger.dropdown-toggle:hover{color:#fff;background-color:#ac2925;border-color:#761c19}.btn-danger.active,.btn-danger:active,.open>.btn-danger.dropdown-toggle{background-image:none}.btn-danger.disabled.focus,.btn-danger.disabled:focus,.btn-danger.disabled:hover,.btn-danger[disabled].focus,.btn-danger[disabled]:focus,.btn-danger[disabled]:hover,fieldset[disabled] .btn-danger.focus,fieldset[disabled] .btn-danger:focus,fieldset[disabled] .btn-danger:hover{background-color:#d9534f;border-color:#d43f3a}.btn-danger .badge{color:#d9534f;background-color:#fff}.btn-link{color:#337ab7;font-weight:400;border-radius:0}.btn-link,.btn-link.active,.btn-link:active,.btn-link[disabled],fieldset[disabled] .btn-link{background-color:transparent;box-shadow:none}.btn-link,.btn-link:active,.btn-link:focus,.btn-link:hover{border-color:transparent}.btn-link:focus,.btn-link:hover{color:#23527c;text-decoration:underline;background-color:transparent}.btn-link[disabled]:focus,.btn-link[disabled]:hover,fieldset[disabled] .btn-link:focus,fieldset[disabled] .btn-link:hover{color:#777;text-decoration:none}.btn-group-lg>.btn,.btn-lg{padding:10px 16px;font-size:18px;line-height:1.33333;border-radius:6px}.btn-group-sm>.btn,.btn-sm{padding:5px 10px;font-size:12px;line-height:1.5;border-radius:3px}.btn-group-xs>.btn,.btn-xs{padding:1px 5px;font-size:12px;line-height:1.5;border-radius:3px}.btn-block{display:block;width:100%}.btn-block+.btn-block{margin-top:5px}input[type=button].btn-block,input[type=reset].btn-block,input[type=submit].btn-block{width:100%}.alert{padding:15px;margin-bottom:20px;border:1px solid transparent;border-radius:4px}.alert h4{margin-top:0;color:inherit}.alert .alert-link{font-weight:700}.alert>p,.alert>ul{margin-bottom:0}.alert>p+p{margin-top:5px}.alert-dismissable,.alert-dismissible{padding-right:35px}.alert-dismissable .close,.alert-dismissible .close{position:relative;top:-2px;right:-21px;color:inherit}.alert-success{background-color:#dff0d8;border-color:#d6e9c6;color:#3c763d}.alert-success hr{border-top-color:#c9e2b3}.alert-success .alert-link{color:#2b542c}.alert-info{background-color:#d9edf7;border-color:#bce8f1;color:#31708f}.alert-info hr{border-top-color:#a6e1ec}.alert-info .alert-link{color:#245269}.alert-warning{background-color:#fcf8e3;border-color:#faebcc;color:#8a6d3b}.alert-warning hr{border-top-color:#f7e1b5}.alert-warning .alert-link{color:#66512c}.alert-danger{background-color:#f2dede;border-color:#ebccd1;color:#a94442}.alert-danger hr{border-top-color:#e4b9c0}.alert-danger .alert-link{color:#843534}.btn-group,.btn-group-vertical{position:relative;display:inline-block;vertical-align:middle}.btn-group-vertical>.btn,.btn-group>.btn{position:relative;float:left}.btn-group-vertical>.btn.active,.btn-group-vertical>.btn:active,.btn-group-vertical>.btn:focus,.btn-group-vertical>.btn:hover,.btn-group>.btn.active,.btn-group>.btn:active,.btn-group>.btn:focus,.btn-group>.btn:hover{z-index:1}.btn-group .btn+.btn,.btn-group .btn+.btn-group,.btn-group .btn-group+.btn,.btn-group .btn-group+.btn-group{margin-left:-1px}.btn-toolbar{margin-left:-5px}.btn-toolbar:after,.btn-toolbar:before{content:" ";display:table}.btn-toolbar:after{clear:both}.btn-toolbar .btn,.btn-toolbar .btn-group,.btn-toolbar .input-group{float:left}.btn-toolbar>.btn,.btn-toolbar>.btn-group,.btn-toolbar>.input-group{margin-left:5px}.btn-group>.btn:not(:first-child):not(:last-child):not(.dropdown-toggle){border-radius:0}.btn-group>.btn:first-child{margin-left:0}.btn-group>.btn:first-child:not(:last-child):not(.dropdown-toggle){border-bottom-right-radius:0;border-top-right-radius:0}.btn-group>.btn:last-child:not(:first-child),.btn-group>.dropdown-toggle:not(:first-child){border-bottom-left-radius:0;border-top-left-radius:0}.btn-group>.btn-group{float:left}.btn-group>.btn-group:not(:first-child):not(:last-child)>.btn{border-radius:0}.btn-group>.btn-group:first-child:not(:last-child)>.btn:last-child,.btn-group>.btn-group:first-child:not(:last-child)>.dropdown-toggle{border-bottom-right-radius:0;border-top-right-radius:0}.btn-group>.btn-group:last-child:not(:first-child)>.btn:first-child{border-bottom-left-radius:0;border-top-left-radius:0}.btn-group .dropdown-toggle:active,.btn-group.open .dropdown-toggle{outline:0}.btn-group>.btn+.dropdown-toggle{padding-left:8px;padding-right:8px}.btn-group-lg.btn-group>.btn+.dropdown-toggle,.btn-group>.btn-lg+.dropdown-toggle{padding-left:12px;padding-right:12px}.btn-group.open .dropdown-toggle{box-shadow:inset 0 3px 5px rgba(0,0,0,.125)}.btn-group.open .dropdown-toggle.btn-link{box-shadow:none}.btn .caret{margin-left:0}.btn-group-lg>.btn .caret,.btn-lg .caret{border-width:5px 5px 0;border-bottom-width:0}.dropup .btn-group-lg>.btn .caret,.dropup .btn-lg .caret{border-width:0 5px 5px}.btn-group-vertical>.btn,.btn-group-vertical>.btn-group,.btn-group-vertical>.btn-group>.btn{display:block;float:none;width:100%;max-width:100%}.btn-group-vertical>.btn-group:after,.btn-group-vertical>.btn-group:before{content:" ";display:table}.btn-group-vertical>.btn-group:after{clear:both}.btn-group-vertical>.btn-group>.btn{float:none}.btn-group-vertical>.btn+.btn,.btn-group-vertical>.btn+.btn-group,.btn-group-vertical>.btn-group+.btn,.btn-group-vertical>.btn-group+.btn-group{margin-top:-1px;margin-left:0}.btn-group-vertical>.btn:not(:first-child):not(:last-child){border-radius:0}.btn-group-vertical>.btn:first-child:not(:last-child){border-top-right-radius:4px;border-top-left-radius:4px;border-bottom-right-radius:0;border-bottom-left-radius:0}.btn-group-vertical>.btn:last-child:not(:first-child){border-top-right-radius:0;border-top-left-radius:0;border-bottom-right-radius:4px;border-bottom-left-radius:4px}.btn-group-vertical>.btn-group:not(:first-child):not(:last-child)>.btn{border-radius:0}.btn-group-vertical>.btn-group:first-child:not(:last-child)>.btn:last-child,.btn-group-vertical>.btn-group:first-child:not(:last-child)>.dropdown-toggle{border-bottom-right-radius:0;border-bottom-left-radius:0}.btn-group-vertical>.btn-group:last-child:not(:first-child)>.btn:first-child{border-top-right-radius:0;border-top-left-radius:0}.btn-group-justified{display:table;width:100%;table-layout:fixed;border-collapse:separate}.btn-group-justified>.btn,.btn-group-justified>.btn-group{float:none;display:table-cell;width:1%}.btn-group-justified>.btn-group .btn{width:100%}.btn-group-justified>.btn-group .dropdown-menu{left:auto}[data-toggle=buttons]>.btn-group>.btn input[type=checkbox],[data-toggle=buttons]>.btn-group>.btn input[type=radio],[data-toggle=buttons]>.btn input[type=checkbox],[data-toggle=buttons]>.btn input[type=radio]{position:absolute;clip:rect(0,0,0,0);pointer-events:none}.close{float:right;font-size:21px;font-weight:700;line-height:1;color:#000;text-shadow:0 1px 0 #fff;opacity:.2;filter:alpha(opacity=20)}.close:focus,.close:hover{color:#000;text-decoration:none;cursor:pointer;opacity:.5;filter:alpha(opacity=50)}button.close{padding:0;cursor:pointer;background:transparent;border:0;-webkit-appearance:none}.fade{opacity:0;transition:opacity .15s linear}.fade.in{opacity:1}.collapse{display:none}.collapse.in{display:block}tr.collapse.in{display:table-row}tbody.collapse.in{display:table-row-group}.collapsing{position:relative;height:0;overflow:hidden;transition-property:height,visibility;transition-duration:.35s;transition-timing-function:ease}.caret{display:inline-block;width:0;height:0;margin-left:2px;vertical-align:middle;border-top:4px dashed;border-top:4px solid\9;border-right:4px solid transparent;border-left:4px solid transparent}.dropdown,.dropup{position:relative}.dropdown-toggle:focus{outline:0}.dropdown-menu{position:absolute;top:100%;left:0;z-index:4;display:none;float:left;min-width:160px;padding:5px 0;margin:2px 0 0;list-style:none;font-size:14px;text-align:left;background-color:#fff;border:1px solid #ccc;border:1px solid rgba(0,0,0,.15);border-radius:4px;box-shadow:0 6px 12px rgba(0,0,0,.175);background-clip:padding-box}.dropdown-menu.pull-right{right:0;left:auto}.dropdown-menu .divider{height:1px;margin:9px 0;overflow:hidden;background-color:#e5e5e5}.dropdown-menu>li>a{display:block;padding:3px 20px;clear:both;font-weight:400;line-height:1.42857;color:#333;white-space:nowrap}.dropdown-menu>li>a:focus,.dropdown-menu>li>a:hover{text-decoration:none;color:#262626;background-color:#f5f5f5}.dropdown-menu>.active>a,.dropdown-menu>.active>a:focus,.dropdown-menu>.active>a:hover{color:#fff;text-decoration:none;outline:0;background-color:#337ab7}.dropdown-menu>.disabled>a,.dropdown-menu>.disabled>a:focus,.dropdown-menu>.disabled>a:hover{color:#777}.dropdown-menu>.disabled>a:focus,.dropdown-menu>.disabled>a:hover{text-decoration:none;background-color:transparent;background-image:none;filter:progid:DXImageTransform.Microsoft.gradient(enabled = false);cursor:not-allowed}.open>.dropdown-menu{display:block}.open>a{outline:0}.dropdown-menu-right{left:auto;right:0}.dropdown-menu-left{left:0;right:auto}.dropdown-header{display:block;padding:3px 20px;font-size:12px;line-height:1.42857;color:#777;white-space:nowrap}.dropdown-backdrop{position:fixed;left:0;right:0;bottom:0;top:0;z-index:3}.pull-right>.dropdown-menu{right:0;left:auto}.dropup .caret,.navbar-fixed-bottom .dropdown .caret{border-top:0;border-bottom:4px dashed;border-bottom:4px solid\9;content:""}.dropup .dropdown-menu,.navbar-fixed-bottom .dropdown .dropdown-menu{top:auto;bottom:100%;margin-bottom:2px}@media (min-width:768px){.navbar-right .dropdown-menu{right:0;left:auto}.navbar-right .dropdown-menu-left{left:0;right:auto}}.input-group{position:relative;display:table;border-collapse:separate}.input-group[class*=col-]{float:none;padding-left:0;padding-right:0}.input-group .form-control{position:relative;z-index:1;float:left;width:100%;margin-bottom:0}.input-group .form-control:focus{z-index:2}.input-group-addon,.input-group-btn,.input-group .form-control{display:table-cell}.input-group-addon:not(:first-child):not(:last-child),.input-group-btn:not(:first-child):not(:last-child),.input-group .form-control:not(:first-child):not(:last-child){border-radius:0}.input-group-addon,.input-group-btn{width:1%;white-space:nowrap;vertical-align:middle}.input-group-addon{padding:6px 12px;font-size:14px;font-weight:400;line-height:1;color:#555;text-align:center;background-color:#eee;border:1px solid #ccc;border-radius:4px}.input-group-addon.input-sm,.input-group-sm>.input-group-addon,.input-group-sm>.input-group-btn>.input-group-addon.btn{padding:5px 10px;font-size:12px;border-radius:3px}.input-group-addon.input-lg,.input-group-lg>.input-group-addon,.input-group-lg>.input-group-btn>.input-group-addon.btn{padding:10px 16px;font-size:18px;border-radius:6px}.input-group-addon input[type=checkbox],.input-group-addon input[type=radio]{margin-top:0}.input-group-addon:first-child,.input-group-btn:first-child>.btn,.input-group-btn:first-child>.btn-group>.btn,.input-group-btn:first-child>.dropdown-toggle,.input-group-btn:last-child>.btn-group:not(:last-child)>.btn,.input-group-btn:last-child>.btn:not(:last-child):not(.dropdown-toggle),.input-group .form-control:first-child{border-bottom-right-radius:0;border-top-right-radius:0}.input-group-addon:first-child{border-right:0}.input-group-addon:last-child,.input-group-btn:first-child>.btn-group:not(:first-child)>.btn,.input-group-btn:first-child>.btn:not(:first-child),.input-group-btn:last-child>.btn,.input-group-btn:last-child>.btn-group>.btn,.input-group-btn:last-child>.dropdown-toggle,.input-group .form-control:last-child{border-bottom-left-radius:0;border-top-left-radius:0}.input-group-addon:last-child{border-left:0}.input-group-btn{font-size:0;white-space:nowrap}.input-group-btn,.input-group-btn>.btn{position:relative}.input-group-btn>.btn+.btn{margin-left:-1px}.input-group-btn>.btn:active,.input-group-btn>.btn:focus,.input-group-btn>.btn:hover{z-index:1}.input-group-btn:first-child>.btn,.input-group-btn:first-child>.btn-group{margin-right:-1px}.input-group-btn:last-child>.btn,.input-group-btn:last-child>.btn-group{z-index:1;margin-left:-1px}.modal,.modal-open{overflow:hidden}.modal{display:none;position:fixed;top:0;right:0;bottom:0;left:0;z-index:7;-webkit-overflow-scrolling:touch;outline:0}.modal.fade .modal-dialog{-webkit-transform:translateY(-25%);transform:translateY(-25%);transition:transform .3s ease-out}.modal.in .modal-dialog{-webkit-transform:translate(0);transform:translate(0)}.modal-open .modal{overflow-x:hidden;overflow-y:auto}.modal-dialog{position:relative;width:auto;margin:10px}.modal-content{position:relative;background-color:#fff;border:1px solid #999;border:1px solid rgba(0,0,0,.2);border-radius:6px;box-shadow:0 3px 9px rgba(0,0,0,.5);background-clip:padding-box;outline:0}.modal-backdrop{position:fixed;top:0;right:0;bottom:0;left:0;z-index:6;background-color:#000}.modal-backdrop.fade{opacity:0;filter:alpha(opacity=0)}.modal-backdrop.in{opacity:.5;filter:alpha(opacity=50)}.modal-header{padding:15px;border-bottom:1px solid #e5e5e5}.modal-header:after,.modal-header:before{content:" ";display:table}.modal-header:after{clear:both}.modal-header .close{margin-top:-2px}.modal-title{margin:0;line-height:1.42857}.modal-body{position:relative;padding:15px}.modal-footer{padding:15px;text-align:right;border-top:1px solid #e5e5e5}.modal-footer:after,.modal-footer:before{content:" ";display:table}.modal-footer:after{clear:both}.modal-footer .btn+.btn{margin-left:5px;margin-bottom:0}.modal-footer .btn-group .btn+.btn{margin-left:-1px}.modal-footer .btn-block+.btn-block{margin-left:0}.modal-scrollbar-measure{position:absolute;top:-9999px;width:50px;height:50px;overflow:scroll}@media (min-width:768px){.modal-dialog{width:600px;margin:30px auto}.modal-content{box-shadow:0 5px 15px rgba(0,0,0,.5)}.modal-sm{width:300px}}@media (min-width:992px){.modal-lg{width:900px}}.clearfix:after,.clearfix:before{content:" ";display:table}.clearfix:after{clear:both}.center-block{display:block;margin-left:auto;margin-right:auto}.pull-right{float:right!important}.pull-left{float:left!important}.hide{display:none!important}.show{display:block!important}.invisible{visibility:hidden}.text-hide{font:0/0 a;color:transparent;text-shadow:none;background-color:transparent;border:0}.hidden{display:none!important}.affix{position:fixed}@-ms-viewport{width:device-width}.visible-lg,.visible-lg-block,.visible-lg-inline,.visible-lg-inline-block,.visible-md,.visible-md-block,.visible-md-inline,.visible-md-inline-block,.visible-sm,.visible-sm-block,.visible-sm-inline,.visible-sm-inline-block,.visible-xs,.visible-xs-block,.visible-xs-inline,.visible-xs-inline-block{display:none!important}@media (max-width:767px){.visible-xs{display:block!important}table.visible-xs{display:table!important}tr.visible-xs{display:table-row!important}td.visible-xs,th.visible-xs{display:table-cell!important}}@media (max-width:767px){.visible-xs-block{display:block!important}}@media (max-width:767px){.visible-xs-inline{display:inline!important}}@media (max-width:767px){.visible-xs-inline-block{display:inline-block!important}}@media (min-width:768px) and (max-width:991px){.visible-sm{display:block!important}table.visible-sm{display:table!important}tr.visible-sm{display:table-row!important}td.visible-sm,th.visible-sm{display:table-cell!important}}@media (min-width:768px) and (max-width:991px){.visible-sm-block{display:block!important}}@media (min-width:768px) and (max-width:991px){.visible-sm-inline{display:inline!important}}@media (min-width:768px) and (max-width:991px){.visible-sm-inline-block{display:inline-block!important}}@media (min-width:992px) and (max-width:1199px){.visible-md{display:block!important}table.visible-md{display:table!important}tr.visible-md{display:table-row!important}td.visible-md,th.visible-md{display:table-cell!important}}@media (min-width:992px) and (max-width:1199px){.visible-md-block{display:block!important}}@media (min-width:992px) and (max-width:1199px){.visible-md-inline{display:inline!important}}@media (min-width:992px) and (max-width:1199px){.visible-md-inline-block{display:inline-block!important}}@media (min-width:1200px){.visible-lg{display:block!important}table.visible-lg{display:table!important}tr.visible-lg{display:table-row!important}td.visible-lg,th.visible-lg{display:table-cell!important}}@media (min-width:1200px){.visible-lg-block{display:block!important}}@media (min-width:1200px){.visible-lg-inline{display:inline!important}}@media (min-width:1200px){.visible-lg-inline-block{display:inline-block!important}}@media (max-width:767px){.hidden-xs{display:none!important}}@media (min-width:768px) and (max-width:991px){.hidden-sm{display:none!important}}@media (min-width:992px) and (max-width:1199px){.hidden-md{display:none!important}}@media (min-width:1200px){.hidden-lg{display:none!important}}.visible-print{display:none!important}@media print{.visible-print{display:block!important}table.visible-print{display:table!important}tr.visible-print{display:table-row!important}td.visible-print,th.visible-print{display:table-cell!important}}.visible-print-block{display:none!important}@media print{.visible-print-block{display:block!important}}.visible-print-inline{display:none!important}@media print{.visible-print-inline{display:inline!important}}.visible-print-inline-block{display:none!important}@media print{.visible-print-inline-block{display:inline-block!important}}@media print{.hidden-print{display:none!important}}/*! - Ionicons, v2.0.0 - Created by Ben Sperry for the Ionic Framework, http://ionicons.com/ - https://twitter.com/benjsperry https://twitter.com/ionicframework - MIT License: https://github.com/driftyco/ionicons - - Android-style icons originally built by Google’s - Material Design Icons: https://github.com/google/material-design-icons - used under CC BY http://creativecommons.org/licenses/by/4.0/ - Modified icons to fit ionicon’s grid from original. +/*! normalize.css v3.0.3 | MIT License | github.com/necolas/normalize.css */html{font-family:sans-serif;-ms-text-size-adjust:100%;-webkit-text-size-adjust:100%}body{margin:0}article,aside,details,figcaption,figure,footer,header,hgroup,main,menu,nav,section,summary{display:block}audio,canvas,progress,video{display:inline-block;vertical-align:baseline}audio:not([controls]){display:none;height:0}[hidden],template{display:none}a{background-color:transparent}a:active,a:hover{outline:0}abbr[title]{border-bottom:1px dotted}b,strong{font-weight:700}dfn{font-style:italic}h1{font-size:2em;margin:.67em 0}mark{background:#ff0;color:#000}small{font-size:80%}sub,sup{font-size:75%;line-height:0;position:relative;vertical-align:baseline}sup{top:-.5em}sub{bottom:-.25em}img{border:0}svg:not(:root){overflow:hidden}figure{margin:1em 40px}hr{box-sizing:content-box;height:0}pre{overflow:auto}code,kbd,pre,samp{font-family:monospace;font-size:1em}button,input,optgroup,select,textarea{color:inherit;font:inherit;margin:0}button{overflow:visible}button,select{text-transform:none}button,html input[type=button],input[type=reset],input[type=submit]{-webkit-appearance:button;cursor:pointer}button[disabled],html input[disabled]{cursor:default}button::-moz-focus-inner,input::-moz-focus-inner{border:0;padding:0}input{line-height:normal}input[type=checkbox],input[type=radio]{box-sizing:border-box;padding:0}input[type=number]::-webkit-inner-spin-button,input[type=number]::-webkit-outer-spin-button{height:auto}input[type=search]{-webkit-appearance:textfield;box-sizing:content-box}input[type=search]::-webkit-search-cancel-button,input[type=search]::-webkit-search-decoration{-webkit-appearance:none}fieldset{border:1px solid silver;margin:0 2px;padding:.35em .625em .75em}legend{border:0;padding:0}textarea{overflow:auto}optgroup{font-weight:700}table{border-collapse:collapse;border-spacing:0}td,th{padding:0}/*! Source: https://github.com/h5bp/html5-boilerplate/blob/master/src/css/main.css */@media print{*,:after,:before{background:transparent!important;color:#000!important;box-shadow:none!important;text-shadow:none!important}a,a:visited{text-decoration:underline}a[href]:after{content:" (" attr(href) ")"}abbr[title]:after{content:" (" attr(title) ")"}a[href^="#"]:after,a[href^="javascript:"]:after{content:""}blockquote,pre{border:1px solid #999;page-break-inside:avoid}thead{display:table-header-group}img,tr{page-break-inside:avoid}img{max-width:100%!important}h2,h3,p{orphans:3;widows:3}h2,h3{page-break-after:avoid}.navbar{display:none}.btn>.caret,.dropup>.btn>.caret{border-top-color:#000!important}.label{border:1px solid #000}.table{border-collapse:collapse!important}.table td,.table th{background-color:#fff!important}.table-bordered td,.table-bordered th{border:1px solid #ddd!important}}@font-face{font-family:Glyphicons Halflings;src:url(../fonts/glyphicons-halflings-regular.eot);src:url(../fonts/glyphicons-halflings-regular.eot?#iefix) format("embedded-opentype"),url(../fonts/glyphicons-halflings-regular.woff2) format("woff2"),url(../fonts/glyphicons-halflings-regular.woff) format("woff"),url(../fonts/glyphicons-halflings-regular.ttf) format("truetype"),url(../fonts/glyphicons-halflings-regular.svg#glyphicons_halflingsregular) format("svg")}.glyphicon{position:relative;top:1px;display:inline-block;font-family:Glyphicons Halflings;font-style:normal;font-weight:400;line-height:1;-webkit-font-smoothing:antialiased;-moz-osx-font-smoothing:grayscale}.glyphicon-asterisk:before{content:"\002a"}.glyphicon-plus:before{content:"\002b"}.glyphicon-eur:before,.glyphicon-euro:before{content:"\20ac"}.glyphicon-minus:before{content:"\2212"}.glyphicon-cloud:before{content:"\2601"}.glyphicon-envelope:before{content:"\2709"}.glyphicon-pencil:before{content:"\270f"}.glyphicon-glass:before{content:"\e001"}.glyphicon-music:before{content:"\e002"}.glyphicon-search:before{content:"\e003"}.glyphicon-heart:before{content:"\e005"}.glyphicon-star:before{content:"\e006"}.glyphicon-star-empty:before{content:"\e007"}.glyphicon-user:before{content:"\e008"}.glyphicon-film:before{content:"\e009"}.glyphicon-th-large:before{content:"\e010"}.glyphicon-th:before{content:"\e011"}.glyphicon-th-list:before{content:"\e012"}.glyphicon-ok:before{content:"\e013"}.glyphicon-remove:before{content:"\e014"}.glyphicon-zoom-in:before{content:"\e015"}.glyphicon-zoom-out:before{content:"\e016"}.glyphicon-off:before{content:"\e017"}.glyphicon-signal:before{content:"\e018"}.glyphicon-cog:before{content:"\e019"}.glyphicon-trash:before{content:"\e020"}.glyphicon-home:before{content:"\e021"}.glyphicon-file:before{content:"\e022"}.glyphicon-time:before{content:"\e023"}.glyphicon-road:before{content:"\e024"}.glyphicon-download-alt:before{content:"\e025"}.glyphicon-download:before{content:"\e026"}.glyphicon-upload:before{content:"\e027"}.glyphicon-inbox:before{content:"\e028"}.glyphicon-play-circle:before{content:"\e029"}.glyphicon-repeat:before{content:"\e030"}.glyphicon-refresh:before{content:"\e031"}.glyphicon-list-alt:before{content:"\e032"}.glyphicon-lock:before{content:"\e033"}.glyphicon-flag:before{content:"\e034"}.glyphicon-headphones:before{content:"\e035"}.glyphicon-volume-off:before{content:"\e036"}.glyphicon-volume-down:before{content:"\e037"}.glyphicon-volume-up:before{content:"\e038"}.glyphicon-qrcode:before{content:"\e039"}.glyphicon-barcode:before{content:"\e040"}.glyphicon-tag:before{content:"\e041"}.glyphicon-tags:before{content:"\e042"}.glyphicon-book:before{content:"\e043"}.glyphicon-bookmark:before{content:"\e044"}.glyphicon-print:before{content:"\e045"}.glyphicon-camera:before{content:"\e046"}.glyphicon-font:before{content:"\e047"}.glyphicon-bold:before{content:"\e048"}.glyphicon-italic:before{content:"\e049"}.glyphicon-text-height:before{content:"\e050"}.glyphicon-text-width:before{content:"\e051"}.glyphicon-align-left:before{content:"\e052"}.glyphicon-align-center:before{content:"\e053"}.glyphicon-align-right:before{content:"\e054"}.glyphicon-align-justify:before{content:"\e055"}.glyphicon-list:before{content:"\e056"}.glyphicon-indent-left:before{content:"\e057"}.glyphicon-indent-right:before{content:"\e058"}.glyphicon-facetime-video:before{content:"\e059"}.glyphicon-picture:before{content:"\e060"}.glyphicon-map-marker:before{content:"\e062"}.glyphicon-adjust:before{content:"\e063"}.glyphicon-tint:before{content:"\e064"}.glyphicon-edit:before{content:"\e065"}.glyphicon-share:before{content:"\e066"}.glyphicon-check:before{content:"\e067"}.glyphicon-move:before{content:"\e068"}.glyphicon-step-backward:before{content:"\e069"}.glyphicon-fast-backward:before{content:"\e070"}.glyphicon-backward:before{content:"\e071"}.glyphicon-play:before{content:"\e072"}.glyphicon-pause:before{content:"\e073"}.glyphicon-stop:before{content:"\e074"}.glyphicon-forward:before{content:"\e075"}.glyphicon-fast-forward:before{content:"\e076"}.glyphicon-step-forward:before{content:"\e077"}.glyphicon-eject:before{content:"\e078"}.glyphicon-chevron-left:before{content:"\e079"}.glyphicon-chevron-right:before{content:"\e080"}.glyphicon-plus-sign:before{content:"\e081"}.glyphicon-minus-sign:before{content:"\e082"}.glyphicon-remove-sign:before{content:"\e083"}.glyphicon-ok-sign:before{content:"\e084"}.glyphicon-question-sign:before{content:"\e085"}.glyphicon-info-sign:before{content:"\e086"}.glyphicon-screenshot:before{content:"\e087"}.glyphicon-remove-circle:before{content:"\e088"}.glyphicon-ok-circle:before{content:"\e089"}.glyphicon-ban-circle:before{content:"\e090"}.glyphicon-arrow-left:before{content:"\e091"}.glyphicon-arrow-right:before{content:"\e092"}.glyphicon-arrow-up:before{content:"\e093"}.glyphicon-arrow-down:before{content:"\e094"}.glyphicon-share-alt:before{content:"\e095"}.glyphicon-resize-full:before{content:"\e096"}.glyphicon-resize-small:before{content:"\e097"}.glyphicon-exclamation-sign:before{content:"\e101"}.glyphicon-gift:before{content:"\e102"}.glyphicon-leaf:before{content:"\e103"}.glyphicon-fire:before{content:"\e104"}.glyphicon-eye-open:before{content:"\e105"}.glyphicon-eye-close:before{content:"\e106"}.glyphicon-warning-sign:before{content:"\e107"}.glyphicon-plane:before{content:"\e108"}.glyphicon-calendar:before{content:"\e109"}.glyphicon-random:before{content:"\e110"}.glyphicon-comment:before{content:"\e111"}.glyphicon-magnet:before{content:"\e112"}.glyphicon-chevron-up:before{content:"\e113"}.glyphicon-chevron-down:before{content:"\e114"}.glyphicon-retweet:before{content:"\e115"}.glyphicon-shopping-cart:before{content:"\e116"}.glyphicon-folder-close:before{content:"\e117"}.glyphicon-folder-open:before{content:"\e118"}.glyphicon-resize-vertical:before{content:"\e119"}.glyphicon-resize-horizontal:before{content:"\e120"}.glyphicon-hdd:before{content:"\e121"}.glyphicon-bullhorn:before{content:"\e122"}.glyphicon-bell:before{content:"\e123"}.glyphicon-certificate:before{content:"\e124"}.glyphicon-thumbs-up:before{content:"\e125"}.glyphicon-thumbs-down:before{content:"\e126"}.glyphicon-hand-right:before{content:"\e127"}.glyphicon-hand-left:before{content:"\e128"}.glyphicon-hand-up:before{content:"\e129"}.glyphicon-hand-down:before{content:"\e130"}.glyphicon-circle-arrow-right:before{content:"\e131"}.glyphicon-circle-arrow-left:before{content:"\e132"}.glyphicon-circle-arrow-up:before{content:"\e133"}.glyphicon-circle-arrow-down:before{content:"\e134"}.glyphicon-globe:before{content:"\e135"}.glyphicon-wrench:before{content:"\e136"}.glyphicon-tasks:before{content:"\e137"}.glyphicon-filter:before{content:"\e138"}.glyphicon-briefcase:before{content:"\e139"}.glyphicon-fullscreen:before{content:"\e140"}.glyphicon-dashboard:before{content:"\e141"}.glyphicon-paperclip:before{content:"\e142"}.glyphicon-heart-empty:before{content:"\e143"}.glyphicon-link:before{content:"\e144"}.glyphicon-phone:before{content:"\e145"}.glyphicon-pushpin:before{content:"\e146"}.glyphicon-usd:before{content:"\e148"}.glyphicon-gbp:before{content:"\e149"}.glyphicon-sort:before{content:"\e150"}.glyphicon-sort-by-alphabet:before{content:"\e151"}.glyphicon-sort-by-alphabet-alt:before{content:"\e152"}.glyphicon-sort-by-order:before{content:"\e153"}.glyphicon-sort-by-order-alt:before{content:"\e154"}.glyphicon-sort-by-attributes:before{content:"\e155"}.glyphicon-sort-by-attributes-alt:before{content:"\e156"}.glyphicon-unchecked:before{content:"\e157"}.glyphicon-expand:before{content:"\e158"}.glyphicon-collapse-down:before{content:"\e159"}.glyphicon-collapse-up:before{content:"\e160"}.glyphicon-log-in:before{content:"\e161"}.glyphicon-flash:before{content:"\e162"}.glyphicon-log-out:before{content:"\e163"}.glyphicon-new-window:before{content:"\e164"}.glyphicon-record:before{content:"\e165"}.glyphicon-save:before{content:"\e166"}.glyphicon-open:before{content:"\e167"}.glyphicon-saved:before{content:"\e168"}.glyphicon-import:before{content:"\e169"}.glyphicon-export:before{content:"\e170"}.glyphicon-send:before{content:"\e171"}.glyphicon-floppy-disk:before{content:"\e172"}.glyphicon-floppy-saved:before{content:"\e173"}.glyphicon-floppy-remove:before{content:"\e174"}.glyphicon-floppy-save:before{content:"\e175"}.glyphicon-floppy-open:before{content:"\e176"}.glyphicon-credit-card:before{content:"\e177"}.glyphicon-transfer:before{content:"\e178"}.glyphicon-cutlery:before{content:"\e179"}.glyphicon-header:before{content:"\e180"}.glyphicon-compressed:before{content:"\e181"}.glyphicon-earphone:before{content:"\e182"}.glyphicon-phone-alt:before{content:"\e183"}.glyphicon-tower:before{content:"\e184"}.glyphicon-stats:before{content:"\e185"}.glyphicon-sd-video:before{content:"\e186"}.glyphicon-hd-video:before{content:"\e187"}.glyphicon-subtitles:before{content:"\e188"}.glyphicon-sound-stereo:before{content:"\e189"}.glyphicon-sound-dolby:before{content:"\e190"}.glyphicon-sound-5-1:before{content:"\e191"}.glyphicon-sound-6-1:before{content:"\e192"}.glyphicon-sound-7-1:before{content:"\e193"}.glyphicon-copyright-mark:before{content:"\e194"}.glyphicon-registration-mark:before{content:"\e195"}.glyphicon-cloud-download:before{content:"\e197"}.glyphicon-cloud-upload:before{content:"\e198"}.glyphicon-tree-conifer:before{content:"\e199"}.glyphicon-tree-deciduous:before{content:"\e200"}.glyphicon-cd:before{content:"\e201"}.glyphicon-save-file:before{content:"\e202"}.glyphicon-open-file:before{content:"\e203"}.glyphicon-level-up:before{content:"\e204"}.glyphicon-copy:before{content:"\e205"}.glyphicon-paste:before{content:"\e206"}.glyphicon-alert:before{content:"\e209"}.glyphicon-equalizer:before{content:"\e210"}.glyphicon-king:before{content:"\e211"}.glyphicon-queen:before{content:"\e212"}.glyphicon-pawn:before{content:"\e213"}.glyphicon-bishop:before{content:"\e214"}.glyphicon-knight:before{content:"\e215"}.glyphicon-baby-formula:before{content:"\e216"}.glyphicon-tent:before{content:"\26fa"}.glyphicon-blackboard:before{content:"\e218"}.glyphicon-bed:before{content:"\e219"}.glyphicon-apple:before{content:"\f8ff"}.glyphicon-erase:before{content:"\e221"}.glyphicon-hourglass:before{content:"\231b"}.glyphicon-lamp:before{content:"\e223"}.glyphicon-duplicate:before{content:"\e224"}.glyphicon-piggy-bank:before{content:"\e225"}.glyphicon-scissors:before{content:"\e226"}.glyphicon-bitcoin:before,.glyphicon-btc:before,.glyphicon-xbt:before{content:"\e227"}.glyphicon-jpy:before,.glyphicon-yen:before{content:"\00a5"}.glyphicon-rub:before,.glyphicon-ruble:before{content:"\20bd"}.glyphicon-scale:before{content:"\e230"}.glyphicon-ice-lolly:before{content:"\e231"}.glyphicon-ice-lolly-tasted:before{content:"\e232"}.glyphicon-education:before{content:"\e233"}.glyphicon-option-horizontal:before{content:"\e234"}.glyphicon-option-vertical:before{content:"\e235"}.glyphicon-menu-hamburger:before{content:"\e236"}.glyphicon-modal-window:before{content:"\e237"}.glyphicon-oil:before{content:"\e238"}.glyphicon-grain:before{content:"\e239"}.glyphicon-sunglasses:before{content:"\e240"}.glyphicon-text-size:before{content:"\e241"}.glyphicon-text-color:before{content:"\e242"}.glyphicon-text-background:before{content:"\e243"}.glyphicon-object-align-top:before{content:"\e244"}.glyphicon-object-align-bottom:before{content:"\e245"}.glyphicon-object-align-horizontal:before{content:"\e246"}.glyphicon-object-align-left:before{content:"\e247"}.glyphicon-object-align-vertical:before{content:"\e248"}.glyphicon-object-align-right:before{content:"\e249"}.glyphicon-triangle-right:before{content:"\e250"}.glyphicon-triangle-left:before{content:"\e251"}.glyphicon-triangle-bottom:before{content:"\e252"}.glyphicon-triangle-top:before{content:"\e253"}.glyphicon-console:before{content:"\e254"}.glyphicon-superscript:before{content:"\e255"}.glyphicon-subscript:before{content:"\e256"}.glyphicon-menu-left:before{content:"\e257"}.glyphicon-menu-right:before{content:"\e258"}.glyphicon-menu-down:before{content:"\e259"}.glyphicon-menu-up:before{content:"\e260"}*,:after,:before{box-sizing:border-box}html{font-size:10px;-webkit-tap-highlight-color:transparent}body{font-family:Helvetica Neue,Helvetica,Arial,sans-serif;font-size:14px;line-height:1.42857;color:#333;background-color:#fff}button,input,select,textarea{font-family:inherit;font-size:inherit;line-height:inherit}a{color:#337ab7;text-decoration:none}a:focus,a:hover{color:#23527c;text-decoration:underline}a:focus{outline:5px auto -webkit-focus-ring-color;outline-offset:-2px}figure{margin:0}img{vertical-align:middle}.img-responsive{display:block;max-width:100%;height:auto}.img-rounded{border-radius:6px}.img-thumbnail{padding:4px;line-height:1.42857;background-color:#fff;border:1px solid #ddd;border-radius:4px;-webkit-transition:all .2s ease-in-out;transition:all .2s ease-in-out;display:inline-block;max-width:100%;height:auto}.img-circle{border-radius:50%}hr{margin-top:20px;margin-bottom:20px;border:0;border-top:1px solid #eee}.sr-only{position:absolute;width:1px;height:1px;margin:-1px;padding:0;overflow:hidden;clip:rect(0,0,0,0);border:0}.sr-only-focusable:active,.sr-only-focusable:focus{position:static;width:auto;height:auto;margin:0;overflow:visible;clip:auto}[role=button]{cursor:pointer}.h1,.h2,.h3,.h4,.h5,.h6,h1,h2,h3,h4,h5,h6{font-family:inherit;font-weight:500;line-height:1.1;color:inherit}.h1 .small,.h1 small,.h2 .small,.h2 small,.h3 .small,.h3 small,.h4 .small,.h4 small,.h5 .small,.h5 small,.h6 .small,.h6 small,h1 .small,h1 small,h2 .small,h2 small,h3 .small,h3 small,h4 .small,h4 small,h5 .small,h5 small,h6 .small,h6 small{font-weight:400;line-height:1;color:#777}.h1,.h2,.h3,h1,h2,h3{margin-top:20px;margin-bottom:10px}.h1 .small,.h1 small,.h2 .small,.h2 small,.h3 .small,.h3 small,h1 .small,h1 small,h2 .small,h2 small,h3 .small,h3 small{font-size:65%}.h4,.h5,.h6,h4,h5,h6{margin-top:10px;margin-bottom:10px}.h4 .small,.h4 small,.h5 .small,.h5 small,.h6 .small,.h6 small,h4 .small,h4 small,h5 .small,h5 small,h6 .small,h6 small{font-size:75%}.h1,h1{font-size:36px}.h2,h2{font-size:30px}.h3,h3{font-size:24px}.h4,h4{font-size:18px}.h5,h5{font-size:14px}.h6,h6{font-size:12px}p{margin:0 0 10px}.lead{margin-bottom:20px;font-size:16px;font-weight:300;line-height:1.4}@media (min-width:768px){.lead{font-size:21px}}.small,small{font-size:85%}.mark,mark{background-color:#fcf8e3;padding:.2em}.text-left{text-align:left}.text-right{text-align:right}.text-center{text-align:center}.text-justify{text-align:justify}.text-nowrap{white-space:nowrap}.text-lowercase{text-transform:lowercase}.initialism,.text-uppercase{text-transform:uppercase}.text-capitalize{text-transform:capitalize}.text-muted{color:#777}.text-primary{color:#337ab7}a.text-primary:focus,a.text-primary:hover{color:#286090}.text-success{color:#3c763d}a.text-success:focus,a.text-success:hover{color:#2b542c}.text-info{color:#31708f}a.text-info:focus,a.text-info:hover{color:#245269}.text-warning{color:#8a6d3b}a.text-warning:focus,a.text-warning:hover{color:#66512c}.text-danger{color:#a94442}a.text-danger:focus,a.text-danger:hover{color:#843534}.bg-primary{color:#fff;background-color:#337ab7}a.bg-primary:focus,a.bg-primary:hover{background-color:#286090}.bg-success{background-color:#dff0d8}a.bg-success:focus,a.bg-success:hover{background-color:#c1e2b3}.bg-info{background-color:#d9edf7}a.bg-info:focus,a.bg-info:hover{background-color:#afd9ee}.bg-warning{background-color:#fcf8e3}a.bg-warning:focus,a.bg-warning:hover{background-color:#f7ecb5}.bg-danger{background-color:#f2dede}a.bg-danger:focus,a.bg-danger:hover{background-color:#e4b9b9}.page-header{padding-bottom:9px;margin:40px 0 20px;border-bottom:1px solid #eee}ol,ul{margin-top:0;margin-bottom:10px}ol ol,ol ul,ul ol,ul ul{margin-bottom:0}.list-inline,.list-unstyled{padding-left:0;list-style:none}.list-inline{margin-left:-5px}.list-inline>li{display:inline-block;padding-left:5px;padding-right:5px}dl{margin-top:0;margin-bottom:20px}dd,dt{line-height:1.42857}dt{font-weight:700}dd{margin-left:0}.dl-horizontal dd:after,.dl-horizontal dd:before{content:" ";display:table}.dl-horizontal dd:after{clear:both}@media (min-width:768px){.dl-horizontal dt{float:left;width:160px;clear:left;text-align:right;overflow:hidden;text-overflow:ellipsis;white-space:nowrap}.dl-horizontal dd{margin-left:180px}}abbr[data-original-title],abbr[title]{cursor:help;border-bottom:1px dotted #777}.initialism{font-size:90%}blockquote{padding:10px 20px;margin:0 0 20px;font-size:17.5px;border-left:5px solid #eee}blockquote ol:last-child,blockquote p:last-child,blockquote ul:last-child{margin-bottom:0}blockquote .small,blockquote footer,blockquote small{display:block;font-size:80%;line-height:1.42857;color:#777}blockquote .small:before,blockquote footer:before,blockquote small:before{content:'\2014 \00A0'}.blockquote-reverse,blockquote.pull-right{padding-right:15px;padding-left:0;border-right:5px solid #eee;border-left:0;text-align:right}.blockquote-reverse .small:before,.blockquote-reverse footer:before,.blockquote-reverse small:before,blockquote.pull-right .small:before,blockquote.pull-right footer:before,blockquote.pull-right small:before{content:''}.blockquote-reverse .small:after,.blockquote-reverse footer:after,.blockquote-reverse small:after,blockquote.pull-right .small:after,blockquote.pull-right footer:after,blockquote.pull-right small:after{content:'\00A0 \2014'}address{margin-bottom:20px;font-style:normal;line-height:1.42857}code,kbd,pre,samp{font-family:Menlo,Monaco,Consolas,Courier New,monospace}code{color:#c7254e;background-color:#f9f2f4;border-radius:4px}code,kbd{padding:2px 4px;font-size:90%}kbd{color:#fff;background-color:#333;border-radius:3px;box-shadow:inset 0 -1px 0 rgba(0,0,0,.25)}kbd kbd{padding:0;font-size:100%;font-weight:700;box-shadow:none}pre{display:block;padding:9.5px;margin:0 0 10px;font-size:13px;line-height:1.42857;word-break:break-all;word-wrap:break-word;color:#333;background-color:#f5f5f5;border:1px solid #ccc;border-radius:4px}pre code{padding:0;font-size:inherit;color:inherit;white-space:pre-wrap;background-color:transparent;border-radius:0}.pre-scrollable{max-height:340px;overflow-y:scroll}.container{margin-right:auto;margin-left:auto;padding-left:15px;padding-right:15px}.container:after,.container:before{content:" ";display:table}.container:after{clear:both}@media (min-width:768px){.container{width:750px}}@media (min-width:992px){.container{width:970px}}@media (min-width:1200px){.container{width:1170px}}.container-fluid{margin-right:auto;margin-left:auto;padding-left:15px;padding-right:15px}.container-fluid:after,.container-fluid:before{content:" ";display:table}.container-fluid:after{clear:both}.row{margin-left:-15px;margin-right:-15px}.row:after,.row:before{content:" ";display:table}.row:after{clear:both}.col-lg-1,.col-lg-2,.col-lg-3,.col-lg-4,.col-lg-5,.col-lg-6,.col-lg-7,.col-lg-8,.col-lg-9,.col-lg-10,.col-lg-11,.col-lg-12,.col-md-1,.col-md-2,.col-md-3,.col-md-4,.col-md-5,.col-md-6,.col-md-7,.col-md-8,.col-md-9,.col-md-10,.col-md-11,.col-md-12,.col-sm-1,.col-sm-2,.col-sm-3,.col-sm-4,.col-sm-5,.col-sm-6,.col-sm-7,.col-sm-8,.col-sm-9,.col-sm-10,.col-sm-11,.col-sm-12,.col-xs-1,.col-xs-2,.col-xs-3,.col-xs-4,.col-xs-5,.col-xs-6,.col-xs-7,.col-xs-8,.col-xs-9,.col-xs-10,.col-xs-11,.col-xs-12{position:relative;min-height:1px;padding-left:15px;padding-right:15px}.col-xs-1,.col-xs-2,.col-xs-3,.col-xs-4,.col-xs-5,.col-xs-6,.col-xs-7,.col-xs-8,.col-xs-9,.col-xs-10,.col-xs-11,.col-xs-12{float:left}.col-xs-1{width:8.33333%}.col-xs-2{width:16.66667%}.col-xs-3{width:25%}.col-xs-4{width:33.33333%}.col-xs-5{width:41.66667%}.col-xs-6{width:50%}.col-xs-7{width:58.33333%}.col-xs-8{width:66.66667%}.col-xs-9{width:75%}.col-xs-10{width:83.33333%}.col-xs-11{width:91.66667%}.col-xs-12{width:100%}.col-xs-pull-0{right:auto}.col-xs-pull-1{right:8.33333%}.col-xs-pull-2{right:16.66667%}.col-xs-pull-3{right:25%}.col-xs-pull-4{right:33.33333%}.col-xs-pull-5{right:41.66667%}.col-xs-pull-6{right:50%}.col-xs-pull-7{right:58.33333%}.col-xs-pull-8{right:66.66667%}.col-xs-pull-9{right:75%}.col-xs-pull-10{right:83.33333%}.col-xs-pull-11{right:91.66667%}.col-xs-pull-12{right:100%}.col-xs-push-0{left:auto}.col-xs-push-1{left:8.33333%}.col-xs-push-2{left:16.66667%}.col-xs-push-3{left:25%}.col-xs-push-4{left:33.33333%}.col-xs-push-5{left:41.66667%}.col-xs-push-6{left:50%}.col-xs-push-7{left:58.33333%}.col-xs-push-8{left:66.66667%}.col-xs-push-9{left:75%}.col-xs-push-10{left:83.33333%}.col-xs-push-11{left:91.66667%}.col-xs-push-12{left:100%}.col-xs-offset-0{margin-left:0}.col-xs-offset-1{margin-left:8.33333%}.col-xs-offset-2{margin-left:16.66667%}.col-xs-offset-3{margin-left:25%}.col-xs-offset-4{margin-left:33.33333%}.col-xs-offset-5{margin-left:41.66667%}.col-xs-offset-6{margin-left:50%}.col-xs-offset-7{margin-left:58.33333%}.col-xs-offset-8{margin-left:66.66667%}.col-xs-offset-9{margin-left:75%}.col-xs-offset-10{margin-left:83.33333%}.col-xs-offset-11{margin-left:91.66667%}.col-xs-offset-12{margin-left:100%}@media (min-width:768px){.col-sm-1,.col-sm-2,.col-sm-3,.col-sm-4,.col-sm-5,.col-sm-6,.col-sm-7,.col-sm-8,.col-sm-9,.col-sm-10,.col-sm-11,.col-sm-12{float:left}.col-sm-1{width:8.33333%}.col-sm-2{width:16.66667%}.col-sm-3{width:25%}.col-sm-4{width:33.33333%}.col-sm-5{width:41.66667%}.col-sm-6{width:50%}.col-sm-7{width:58.33333%}.col-sm-8{width:66.66667%}.col-sm-9{width:75%}.col-sm-10{width:83.33333%}.col-sm-11{width:91.66667%}.col-sm-12{width:100%}.col-sm-pull-0{right:auto}.col-sm-pull-1{right:8.33333%}.col-sm-pull-2{right:16.66667%}.col-sm-pull-3{right:25%}.col-sm-pull-4{right:33.33333%}.col-sm-pull-5{right:41.66667%}.col-sm-pull-6{right:50%}.col-sm-pull-7{right:58.33333%}.col-sm-pull-8{right:66.66667%}.col-sm-pull-9{right:75%}.col-sm-pull-10{right:83.33333%}.col-sm-pull-11{right:91.66667%}.col-sm-pull-12{right:100%}.col-sm-push-0{left:auto}.col-sm-push-1{left:8.33333%}.col-sm-push-2{left:16.66667%}.col-sm-push-3{left:25%}.col-sm-push-4{left:33.33333%}.col-sm-push-5{left:41.66667%}.col-sm-push-6{left:50%}.col-sm-push-7{left:58.33333%}.col-sm-push-8{left:66.66667%}.col-sm-push-9{left:75%}.col-sm-push-10{left:83.33333%}.col-sm-push-11{left:91.66667%}.col-sm-push-12{left:100%}.col-sm-offset-0{margin-left:0}.col-sm-offset-1{margin-left:8.33333%}.col-sm-offset-2{margin-left:16.66667%}.col-sm-offset-3{margin-left:25%}.col-sm-offset-4{margin-left:33.33333%}.col-sm-offset-5{margin-left:41.66667%}.col-sm-offset-6{margin-left:50%}.col-sm-offset-7{margin-left:58.33333%}.col-sm-offset-8{margin-left:66.66667%}.col-sm-offset-9{margin-left:75%}.col-sm-offset-10{margin-left:83.33333%}.col-sm-offset-11{margin-left:91.66667%}.col-sm-offset-12{margin-left:100%}}@media (min-width:992px){.col-md-1,.col-md-2,.col-md-3,.col-md-4,.col-md-5,.col-md-6,.col-md-7,.col-md-8,.col-md-9,.col-md-10,.col-md-11,.col-md-12{float:left}.col-md-1{width:8.33333%}.col-md-2{width:16.66667%}.col-md-3{width:25%}.col-md-4{width:33.33333%}.col-md-5{width:41.66667%}.col-md-6{width:50%}.col-md-7{width:58.33333%}.col-md-8{width:66.66667%}.col-md-9{width:75%}.col-md-10{width:83.33333%}.col-md-11{width:91.66667%}.col-md-12{width:100%}.col-md-pull-0{right:auto}.col-md-pull-1{right:8.33333%}.col-md-pull-2{right:16.66667%}.col-md-pull-3{right:25%}.col-md-pull-4{right:33.33333%}.col-md-pull-5{right:41.66667%}.col-md-pull-6{right:50%}.col-md-pull-7{right:58.33333%}.col-md-pull-8{right:66.66667%}.col-md-pull-9{right:75%}.col-md-pull-10{right:83.33333%}.col-md-pull-11{right:91.66667%}.col-md-pull-12{right:100%}.col-md-push-0{left:auto}.col-md-push-1{left:8.33333%}.col-md-push-2{left:16.66667%}.col-md-push-3{left:25%}.col-md-push-4{left:33.33333%}.col-md-push-5{left:41.66667%}.col-md-push-6{left:50%}.col-md-push-7{left:58.33333%}.col-md-push-8{left:66.66667%}.col-md-push-9{left:75%}.col-md-push-10{left:83.33333%}.col-md-push-11{left:91.66667%}.col-md-push-12{left:100%}.col-md-offset-0{margin-left:0}.col-md-offset-1{margin-left:8.33333%}.col-md-offset-2{margin-left:16.66667%}.col-md-offset-3{margin-left:25%}.col-md-offset-4{margin-left:33.33333%}.col-md-offset-5{margin-left:41.66667%}.col-md-offset-6{margin-left:50%}.col-md-offset-7{margin-left:58.33333%}.col-md-offset-8{margin-left:66.66667%}.col-md-offset-9{margin-left:75%}.col-md-offset-10{margin-left:83.33333%}.col-md-offset-11{margin-left:91.66667%}.col-md-offset-12{margin-left:100%}}@media (min-width:1200px){.col-lg-1,.col-lg-2,.col-lg-3,.col-lg-4,.col-lg-5,.col-lg-6,.col-lg-7,.col-lg-8,.col-lg-9,.col-lg-10,.col-lg-11,.col-lg-12{float:left}.col-lg-1{width:8.33333%}.col-lg-2{width:16.66667%}.col-lg-3{width:25%}.col-lg-4{width:33.33333%}.col-lg-5{width:41.66667%}.col-lg-6{width:50%}.col-lg-7{width:58.33333%}.col-lg-8{width:66.66667%}.col-lg-9{width:75%}.col-lg-10{width:83.33333%}.col-lg-11{width:91.66667%}.col-lg-12{width:100%}.col-lg-pull-0{right:auto}.col-lg-pull-1{right:8.33333%}.col-lg-pull-2{right:16.66667%}.col-lg-pull-3{right:25%}.col-lg-pull-4{right:33.33333%}.col-lg-pull-5{right:41.66667%}.col-lg-pull-6{right:50%}.col-lg-pull-7{right:58.33333%}.col-lg-pull-8{right:66.66667%}.col-lg-pull-9{right:75%}.col-lg-pull-10{right:83.33333%}.col-lg-pull-11{right:91.66667%}.col-lg-pull-12{right:100%}.col-lg-push-0{left:auto}.col-lg-push-1{left:8.33333%}.col-lg-push-2{left:16.66667%}.col-lg-push-3{left:25%}.col-lg-push-4{left:33.33333%}.col-lg-push-5{left:41.66667%}.col-lg-push-6{left:50%}.col-lg-push-7{left:58.33333%}.col-lg-push-8{left:66.66667%}.col-lg-push-9{left:75%}.col-lg-push-10{left:83.33333%}.col-lg-push-11{left:91.66667%}.col-lg-push-12{left:100%}.col-lg-offset-0{margin-left:0}.col-lg-offset-1{margin-left:8.33333%}.col-lg-offset-2{margin-left:16.66667%}.col-lg-offset-3{margin-left:25%}.col-lg-offset-4{margin-left:33.33333%}.col-lg-offset-5{margin-left:41.66667%}.col-lg-offset-6{margin-left:50%}.col-lg-offset-7{margin-left:58.33333%}.col-lg-offset-8{margin-left:66.66667%}.col-lg-offset-9{margin-left:75%}.col-lg-offset-10{margin-left:83.33333%}.col-lg-offset-11{margin-left:91.66667%}.col-lg-offset-12{margin-left:100%}}table{background-color:transparent}caption{padding-top:8px;padding-bottom:8px;color:#777}caption,th{text-align:left}.table{width:100%;max-width:100%;margin-bottom:20px}.table>tbody>tr>td,.table>tbody>tr>th,.table>tfoot>tr>td,.table>tfoot>tr>th,.table>thead>tr>td,.table>thead>tr>th{padding:8px;line-height:1.42857;vertical-align:top;border-top:1px solid #ddd}.table>thead>tr>th{vertical-align:bottom;border-bottom:2px solid #ddd}.table>caption+thead>tr:first-child>td,.table>caption+thead>tr:first-child>th,.table>colgroup+thead>tr:first-child>td,.table>colgroup+thead>tr:first-child>th,.table>thead:first-child>tr:first-child>td,.table>thead:first-child>tr:first-child>th{border-top:0}.table>tbody+tbody{border-top:2px solid #ddd}.table .table{background-color:#fff}.table-condensed>tbody>tr>td,.table-condensed>tbody>tr>th,.table-condensed>tfoot>tr>td,.table-condensed>tfoot>tr>th,.table-condensed>thead>tr>td,.table-condensed>thead>tr>th{padding:5px}.table-bordered,.table-bordered>tbody>tr>td,.table-bordered>tbody>tr>th,.table-bordered>tfoot>tr>td,.table-bordered>tfoot>tr>th,.table-bordered>thead>tr>td,.table-bordered>thead>tr>th{border:1px solid #ddd}.table-bordered>thead>tr>td,.table-bordered>thead>tr>th{border-bottom-width:2px}.table-striped>tbody>tr:nth-of-type(odd){background-color:#f9f9f9}.table-hover>tbody>tr:hover{background-color:#f5f5f5}table col[class*=col-]{position:static;float:none;display:table-column}table td[class*=col-],table th[class*=col-]{position:static;float:none;display:table-cell}.table>tbody>tr.active>td,.table>tbody>tr.active>th,.table>tbody>tr>td.active,.table>tbody>tr>th.active,.table>tfoot>tr.active>td,.table>tfoot>tr.active>th,.table>tfoot>tr>td.active,.table>tfoot>tr>th.active,.table>thead>tr.active>td,.table>thead>tr.active>th,.table>thead>tr>td.active,.table>thead>tr>th.active{background-color:#f5f5f5}.table-hover>tbody>tr.active:hover>td,.table-hover>tbody>tr.active:hover>th,.table-hover>tbody>tr:hover>.active,.table-hover>tbody>tr>td.active:hover,.table-hover>tbody>tr>th.active:hover{background-color:#e8e8e8}.table>tbody>tr.success>td,.table>tbody>tr.success>th,.table>tbody>tr>td.success,.table>tbody>tr>th.success,.table>tfoot>tr.success>td,.table>tfoot>tr.success>th,.table>tfoot>tr>td.success,.table>tfoot>tr>th.success,.table>thead>tr.success>td,.table>thead>tr.success>th,.table>thead>tr>td.success,.table>thead>tr>th.success{background-color:#dff0d8}.table-hover>tbody>tr.success:hover>td,.table-hover>tbody>tr.success:hover>th,.table-hover>tbody>tr:hover>.success,.table-hover>tbody>tr>td.success:hover,.table-hover>tbody>tr>th.success:hover{background-color:#d0e9c6}.table>tbody>tr.info>td,.table>tbody>tr.info>th,.table>tbody>tr>td.info,.table>tbody>tr>th.info,.table>tfoot>tr.info>td,.table>tfoot>tr.info>th,.table>tfoot>tr>td.info,.table>tfoot>tr>th.info,.table>thead>tr.info>td,.table>thead>tr.info>th,.table>thead>tr>td.info,.table>thead>tr>th.info{background-color:#d9edf7}.table-hover>tbody>tr.info:hover>td,.table-hover>tbody>tr.info:hover>th,.table-hover>tbody>tr:hover>.info,.table-hover>tbody>tr>td.info:hover,.table-hover>tbody>tr>th.info:hover{background-color:#c4e3f3}.table>tbody>tr.warning>td,.table>tbody>tr.warning>th,.table>tbody>tr>td.warning,.table>tbody>tr>th.warning,.table>tfoot>tr.warning>td,.table>tfoot>tr.warning>th,.table>tfoot>tr>td.warning,.table>tfoot>tr>th.warning,.table>thead>tr.warning>td,.table>thead>tr.warning>th,.table>thead>tr>td.warning,.table>thead>tr>th.warning{background-color:#fcf8e3}.table-hover>tbody>tr.warning:hover>td,.table-hover>tbody>tr.warning:hover>th,.table-hover>tbody>tr:hover>.warning,.table-hover>tbody>tr>td.warning:hover,.table-hover>tbody>tr>th.warning:hover{background-color:#faf2cc}.table>tbody>tr.danger>td,.table>tbody>tr.danger>th,.table>tbody>tr>td.danger,.table>tbody>tr>th.danger,.table>tfoot>tr.danger>td,.table>tfoot>tr.danger>th,.table>tfoot>tr>td.danger,.table>tfoot>tr>th.danger,.table>thead>tr.danger>td,.table>thead>tr.danger>th,.table>thead>tr>td.danger,.table>thead>tr>th.danger{background-color:#f2dede}.table-hover>tbody>tr.danger:hover>td,.table-hover>tbody>tr.danger:hover>th,.table-hover>tbody>tr:hover>.danger,.table-hover>tbody>tr>td.danger:hover,.table-hover>tbody>tr>th.danger:hover{background-color:#ebcccc}.table-responsive{overflow-x:auto;min-height:.01%}@media screen and (max-width:767px){.table-responsive{width:100%;margin-bottom:15px;overflow-y:hidden;-ms-overflow-style:-ms-autohiding-scrollbar;border:1px solid #ddd}.table-responsive>.table{margin-bottom:0}.table-responsive>.table>tbody>tr>td,.table-responsive>.table>tbody>tr>th,.table-responsive>.table>tfoot>tr>td,.table-responsive>.table>tfoot>tr>th,.table-responsive>.table>thead>tr>td,.table-responsive>.table>thead>tr>th{white-space:nowrap}.table-responsive>.table-bordered{border:0}.table-responsive>.table-bordered>tbody>tr>td:first-child,.table-responsive>.table-bordered>tbody>tr>th:first-child,.table-responsive>.table-bordered>tfoot>tr>td:first-child,.table-responsive>.table-bordered>tfoot>tr>th:first-child,.table-responsive>.table-bordered>thead>tr>td:first-child,.table-responsive>.table-bordered>thead>tr>th:first-child{border-left:0}.table-responsive>.table-bordered>tbody>tr>td:last-child,.table-responsive>.table-bordered>tbody>tr>th:last-child,.table-responsive>.table-bordered>tfoot>tr>td:last-child,.table-responsive>.table-bordered>tfoot>tr>th:last-child,.table-responsive>.table-bordered>thead>tr>td:last-child,.table-responsive>.table-bordered>thead>tr>th:last-child{border-right:0}.table-responsive>.table-bordered>tbody>tr:last-child>td,.table-responsive>.table-bordered>tbody>tr:last-child>th,.table-responsive>.table-bordered>tfoot>tr:last-child>td,.table-responsive>.table-bordered>tfoot>tr:last-child>th{border-bottom:0}}fieldset{margin:0;min-width:0}fieldset,legend{padding:0;border:0}legend{display:block;width:100%;margin-bottom:20px;font-size:21px;line-height:inherit;color:#333;border-bottom:1px solid #e5e5e5}label{display:inline-block;max-width:100%;margin-bottom:5px;font-weight:700}input[type=search]{box-sizing:border-box}input[type=checkbox],input[type=radio]{margin:4px 0 0;margin-top:1px\9;line-height:normal}input[type=file]{display:block}input[type=range]{display:block;width:100%}select[multiple],select[size]{height:auto}input[type=checkbox]:focus,input[type=file]:focus,input[type=radio]:focus{outline:5px auto -webkit-focus-ring-color;outline-offset:-2px}output{padding-top:7px}.form-control,output{display:block;font-size:14px;line-height:1.42857;color:#555}.form-control{width:100%;height:34px;padding:6px 12px;background-color:#fff;background-image:none;border:1px solid #ccc;border-radius:4px;box-shadow:inset 0 1px 1px rgba(0,0,0,.075);-webkit-transition:border-color ease-in-out .15s,box-shadow ease-in-out .15s;transition:border-color ease-in-out .15s,box-shadow ease-in-out .15s}.form-control:focus{border-color:#66afe9;outline:0;box-shadow:inset 0 1px 1px rgba(0,0,0,.075),0 0 8px rgba(102,175,233,.6)}.form-control::-moz-placeholder{color:#999;opacity:1}.form-control:-ms-input-placeholder{color:#999}.form-control::-webkit-input-placeholder{color:#999}.form-control::-ms-expand{border:0;background-color:transparent}.form-control[disabled],.form-control[readonly],fieldset[disabled] .form-control{background-color:#eee;opacity:1}.form-control[disabled],fieldset[disabled] .form-control{cursor:not-allowed}textarea.form-control{height:auto}input[type=search]{-webkit-appearance:none}@media screen and (-webkit-min-device-pixel-ratio:0){input[type=date].form-control,input[type=datetime-local].form-control,input[type=month].form-control,input[type=time].form-control{line-height:34px}.input-group-sm>.input-group-btn>input[type=date].btn,.input-group-sm>.input-group-btn>input[type=datetime-local].btn,.input-group-sm>.input-group-btn>input[type=month].btn,.input-group-sm>.input-group-btn>input[type=time].btn,.input-group-sm>input[type=date].form-control,.input-group-sm>input[type=date].input-group-addon,.input-group-sm>input[type=datetime-local].form-control,.input-group-sm>input[type=datetime-local].input-group-addon,.input-group-sm>input[type=month].form-control,.input-group-sm>input[type=month].input-group-addon,.input-group-sm>input[type=time].form-control,.input-group-sm>input[type=time].input-group-addon,.input-group-sm input[type=date],.input-group-sm input[type=datetime-local],.input-group-sm input[type=month],.input-group-sm input[type=time],input[type=date].input-sm,input[type=datetime-local].input-sm,input[type=month].input-sm,input[type=time].input-sm{line-height:30px}.input-group-lg>.input-group-btn>input[type=date].btn,.input-group-lg>.input-group-btn>input[type=datetime-local].btn,.input-group-lg>.input-group-btn>input[type=month].btn,.input-group-lg>.input-group-btn>input[type=time].btn,.input-group-lg>input[type=date].form-control,.input-group-lg>input[type=date].input-group-addon,.input-group-lg>input[type=datetime-local].form-control,.input-group-lg>input[type=datetime-local].input-group-addon,.input-group-lg>input[type=month].form-control,.input-group-lg>input[type=month].input-group-addon,.input-group-lg>input[type=time].form-control,.input-group-lg>input[type=time].input-group-addon,.input-group-lg input[type=date],.input-group-lg input[type=datetime-local],.input-group-lg input[type=month],.input-group-lg input[type=time],input[type=date].input-lg,input[type=datetime-local].input-lg,input[type=month].input-lg,input[type=time].input-lg{line-height:46px}}.form-group{margin-bottom:15px}.checkbox,.radio{position:relative;display:block;margin-top:10px;margin-bottom:10px}.checkbox label,.radio label{min-height:20px;padding-left:20px;margin-bottom:0;font-weight:400;cursor:pointer}.checkbox-inline input[type=checkbox],.checkbox input[type=checkbox],.radio-inline input[type=radio],.radio input[type=radio]{position:absolute;margin-left:-20px;margin-top:4px\9}.checkbox+.checkbox,.radio+.radio{margin-top:-5px}.checkbox-inline,.radio-inline{position:relative;display:inline-block;padding-left:20px;margin-bottom:0;vertical-align:middle;font-weight:400;cursor:pointer}.checkbox-inline+.checkbox-inline,.radio-inline+.radio-inline{margin-top:0;margin-left:10px}.checkbox-inline.disabled,.checkbox.disabled label,.radio-inline.disabled,.radio.disabled label,fieldset[disabled] .checkbox-inline,fieldset[disabled] .checkbox label,fieldset[disabled] .radio-inline,fieldset[disabled] .radio label,fieldset[disabled] input[type=checkbox],fieldset[disabled] input[type=radio],input[type=checkbox].disabled,input[type=checkbox][disabled],input[type=radio].disabled,input[type=radio][disabled]{cursor:not-allowed}.form-control-static{padding-top:7px;padding-bottom:7px;margin-bottom:0;min-height:34px}.form-control-static.input-lg,.form-control-static.input-sm,.input-group-lg>.form-control-static.form-control,.input-group-lg>.form-control-static.input-group-addon,.input-group-lg>.input-group-btn>.form-control-static.btn,.input-group-sm>.form-control-static.form-control,.input-group-sm>.form-control-static.input-group-addon,.input-group-sm>.input-group-btn>.form-control-static.btn{padding-left:0;padding-right:0}.input-group-sm>.form-control,.input-group-sm>.input-group-addon,.input-group-sm>.input-group-btn>.btn,.input-sm{height:30px;padding:5px 10px;font-size:12px;line-height:1.5;border-radius:3px}.input-group-sm>.input-group-btn>select.btn,.input-group-sm>select.form-control,.input-group-sm>select.input-group-addon,select.input-sm{height:30px;line-height:30px}.input-group-sm>.input-group-btn>select[multiple].btn,.input-group-sm>.input-group-btn>textarea.btn,.input-group-sm>select[multiple].form-control,.input-group-sm>select[multiple].input-group-addon,.input-group-sm>textarea.form-control,.input-group-sm>textarea.input-group-addon,select[multiple].input-sm,textarea.input-sm{height:auto}.form-group-sm .form-control{height:30px;padding:5px 10px;font-size:12px;line-height:1.5;border-radius:3px}.form-group-sm select.form-control{height:30px;line-height:30px}.form-group-sm select[multiple].form-control,.form-group-sm textarea.form-control{height:auto}.form-group-sm .form-control-static{height:30px;min-height:32px;padding:6px 10px;font-size:12px;line-height:1.5}.input-group-lg>.form-control,.input-group-lg>.input-group-addon,.input-group-lg>.input-group-btn>.btn,.input-lg{height:46px;padding:10px 16px;font-size:18px;line-height:1.33333;border-radius:6px}.input-group-lg>.input-group-btn>select.btn,.input-group-lg>select.form-control,.input-group-lg>select.input-group-addon,select.input-lg{height:46px;line-height:46px}.input-group-lg>.input-group-btn>select[multiple].btn,.input-group-lg>.input-group-btn>textarea.btn,.input-group-lg>select[multiple].form-control,.input-group-lg>select[multiple].input-group-addon,.input-group-lg>textarea.form-control,.input-group-lg>textarea.input-group-addon,select[multiple].input-lg,textarea.input-lg{height:auto}.form-group-lg .form-control{height:46px;padding:10px 16px;font-size:18px;line-height:1.33333;border-radius:6px}.form-group-lg select.form-control{height:46px;line-height:46px}.form-group-lg select[multiple].form-control,.form-group-lg textarea.form-control{height:auto}.form-group-lg .form-control-static{height:46px;min-height:38px;padding:11px 16px;font-size:18px;line-height:1.33333}.has-feedback{position:relative}.has-feedback .form-control{padding-right:42.5px}.form-control-feedback{position:absolute;top:0;right:0;z-index:1;display:block;width:34px;height:34px;line-height:34px;text-align:center;pointer-events:none}.form-group-lg .form-control+.form-control-feedback,.input-group-lg+.form-control-feedback,.input-group-lg>.form-control+.form-control-feedback,.input-group-lg>.input-group-addon+.form-control-feedback,.input-group-lg>.input-group-btn>.btn+.form-control-feedback,.input-lg+.form-control-feedback{width:46px;height:46px;line-height:46px}.form-group-sm .form-control+.form-control-feedback,.input-group-sm+.form-control-feedback,.input-group-sm>.form-control+.form-control-feedback,.input-group-sm>.input-group-addon+.form-control-feedback,.input-group-sm>.input-group-btn>.btn+.form-control-feedback,.input-sm+.form-control-feedback{width:30px;height:30px;line-height:30px}.has-success .checkbox,.has-success .checkbox-inline,.has-success.checkbox-inline label,.has-success.checkbox label,.has-success .control-label,.has-success .help-block,.has-success .radio,.has-success .radio-inline,.has-success.radio-inline label,.has-success.radio label{color:#3c763d}.has-success .form-control{border-color:#3c763d;box-shadow:inset 0 1px 1px rgba(0,0,0,.075)}.has-success .form-control:focus{border-color:#2b542c;box-shadow:inset 0 1px 1px rgba(0,0,0,.075),0 0 6px #67b168}.has-success .input-group-addon{color:#3c763d;border-color:#3c763d;background-color:#dff0d8}.has-success .form-control-feedback{color:#3c763d}.has-warning .checkbox,.has-warning .checkbox-inline,.has-warning.checkbox-inline label,.has-warning.checkbox label,.has-warning .control-label,.has-warning .help-block,.has-warning .radio,.has-warning .radio-inline,.has-warning.radio-inline label,.has-warning.radio label{color:#8a6d3b}.has-warning .form-control{border-color:#8a6d3b;box-shadow:inset 0 1px 1px rgba(0,0,0,.075)}.has-warning .form-control:focus{border-color:#66512c;box-shadow:inset 0 1px 1px rgba(0,0,0,.075),0 0 6px #c0a16b}.has-warning .input-group-addon{color:#8a6d3b;border-color:#8a6d3b;background-color:#fcf8e3}.has-warning .form-control-feedback{color:#8a6d3b}.has-error .checkbox,.has-error .checkbox-inline,.has-error.checkbox-inline label,.has-error.checkbox label,.has-error .control-label,.has-error .help-block,.has-error .radio,.has-error .radio-inline,.has-error.radio-inline label,.has-error.radio label{color:#a94442}.has-error .form-control{border-color:#a94442;box-shadow:inset 0 1px 1px rgba(0,0,0,.075)}.has-error .form-control:focus{border-color:#843534;box-shadow:inset 0 1px 1px rgba(0,0,0,.075),0 0 6px #ce8483}.has-error .input-group-addon{color:#a94442;border-color:#a94442;background-color:#f2dede}.has-error .form-control-feedback{color:#a94442}.has-feedback label~.form-control-feedback{top:25px}.has-feedback label.sr-only~.form-control-feedback{top:0}.help-block{display:block;margin-top:5px;margin-bottom:10px;color:#737373}@media (min-width:768px){.form-inline .form-group{display:inline-block;margin-bottom:0;vertical-align:middle}.form-inline .form-control{display:inline-block;width:auto;vertical-align:middle}.form-inline .form-control-static{display:inline-block}.form-inline .input-group{display:inline-table;vertical-align:middle}.form-inline .input-group .form-control,.form-inline .input-group .input-group-addon,.form-inline .input-group .input-group-btn{width:auto}.form-inline .input-group>.form-control{width:100%}.form-inline .control-label{margin-bottom:0;vertical-align:middle}.form-inline .checkbox,.form-inline .radio{display:inline-block;margin-top:0;margin-bottom:0;vertical-align:middle}.form-inline .checkbox label,.form-inline .radio label{padding-left:0}.form-inline .checkbox input[type=checkbox],.form-inline .radio input[type=radio]{position:relative;margin-left:0}.form-inline .has-feedback .form-control-feedback{top:0}}.form-horizontal .checkbox,.form-horizontal .checkbox-inline,.form-horizontal .radio,.form-horizontal .radio-inline{margin-top:0;margin-bottom:0;padding-top:7px}.form-horizontal .checkbox,.form-horizontal .radio{min-height:27px}.form-horizontal .form-group{margin-left:-15px;margin-right:-15px}.form-horizontal .form-group:after,.form-horizontal .form-group:before{content:" ";display:table}.form-horizontal .form-group:after{clear:both}@media (min-width:768px){.form-horizontal .control-label{text-align:right;margin-bottom:0;padding-top:7px}}.form-horizontal .has-feedback .form-control-feedback{right:15px}@media (min-width:768px){.form-horizontal .form-group-lg .control-label{padding-top:11px;font-size:18px}}@media (min-width:768px){.form-horizontal .form-group-sm .control-label{padding-top:6px;font-size:12px}}.btn{display:inline-block;margin-bottom:0;font-weight:400;text-align:center;vertical-align:middle;touch-action:manipulation;cursor:pointer;background-image:none;border:1px solid transparent;white-space:nowrap;padding:6px 12px;font-size:14px;line-height:1.42857;border-radius:4px;-webkit-user-select:none;-moz-user-select:none;-ms-user-select:none;user-select:none}.btn.active.focus,.btn.active:focus,.btn.focus,.btn:active.focus,.btn:active:focus,.btn:focus{outline:5px auto -webkit-focus-ring-color;outline-offset:-2px}.btn.focus,.btn:focus,.btn:hover{color:#333;text-decoration:none}.btn.active,.btn:active{outline:0;background-image:none;box-shadow:inset 0 3px 5px rgba(0,0,0,.125)}.btn.disabled,.btn[disabled],fieldset[disabled] .btn{cursor:not-allowed;opacity:.65;filter:alpha(opacity=65);box-shadow:none}a.btn.disabled,fieldset[disabled] a.btn{pointer-events:none}.btn-default{color:#333;background-color:#fff;border-color:#ccc}.btn-default.focus,.btn-default:focus{color:#333;background-color:#e6e6e6;border-color:#8c8c8c}.btn-default.active,.btn-default:active,.btn-default:hover,.open>.btn-default.dropdown-toggle{color:#333;background-color:#e6e6e6;border-color:#adadad}.btn-default.active.focus,.btn-default.active:focus,.btn-default.active:hover,.btn-default:active.focus,.btn-default:active:focus,.btn-default:active:hover,.open>.btn-default.dropdown-toggle.focus,.open>.btn-default.dropdown-toggle:focus,.open>.btn-default.dropdown-toggle:hover{color:#333;background-color:#d4d4d4;border-color:#8c8c8c}.btn-default.active,.btn-default:active,.open>.btn-default.dropdown-toggle{background-image:none}.btn-default.disabled.focus,.btn-default.disabled:focus,.btn-default.disabled:hover,.btn-default[disabled].focus,.btn-default[disabled]:focus,.btn-default[disabled]:hover,fieldset[disabled] .btn-default.focus,fieldset[disabled] .btn-default:focus,fieldset[disabled] .btn-default:hover{background-color:#fff;border-color:#ccc}.btn-default .badge{color:#fff;background-color:#333}.btn-primary{color:#fff;background-color:#337ab7;border-color:#2e6da4}.btn-primary.focus,.btn-primary:focus{color:#fff;background-color:#286090;border-color:#122b40}.btn-primary.active,.btn-primary:active,.btn-primary:hover,.open>.btn-primary.dropdown-toggle{color:#fff;background-color:#286090;border-color:#204d74}.btn-primary.active.focus,.btn-primary.active:focus,.btn-primary.active:hover,.btn-primary:active.focus,.btn-primary:active:focus,.btn-primary:active:hover,.open>.btn-primary.dropdown-toggle.focus,.open>.btn-primary.dropdown-toggle:focus,.open>.btn-primary.dropdown-toggle:hover{color:#fff;background-color:#204d74;border-color:#122b40}.btn-primary.active,.btn-primary:active,.open>.btn-primary.dropdown-toggle{background-image:none}.btn-primary.disabled.focus,.btn-primary.disabled:focus,.btn-primary.disabled:hover,.btn-primary[disabled].focus,.btn-primary[disabled]:focus,.btn-primary[disabled]:hover,fieldset[disabled] .btn-primary.focus,fieldset[disabled] .btn-primary:focus,fieldset[disabled] .btn-primary:hover{background-color:#337ab7;border-color:#2e6da4}.btn-primary .badge{color:#337ab7;background-color:#fff}.btn-success{color:#fff;background-color:#5cb85c;border-color:#4cae4c}.btn-success.focus,.btn-success:focus{color:#fff;background-color:#449d44;border-color:#255625}.btn-success.active,.btn-success:active,.btn-success:hover,.open>.btn-success.dropdown-toggle{color:#fff;background-color:#449d44;border-color:#398439}.btn-success.active.focus,.btn-success.active:focus,.btn-success.active:hover,.btn-success:active.focus,.btn-success:active:focus,.btn-success:active:hover,.open>.btn-success.dropdown-toggle.focus,.open>.btn-success.dropdown-toggle:focus,.open>.btn-success.dropdown-toggle:hover{color:#fff;background-color:#398439;border-color:#255625}.btn-success.active,.btn-success:active,.open>.btn-success.dropdown-toggle{background-image:none}.btn-success.disabled.focus,.btn-success.disabled:focus,.btn-success.disabled:hover,.btn-success[disabled].focus,.btn-success[disabled]:focus,.btn-success[disabled]:hover,fieldset[disabled] .btn-success.focus,fieldset[disabled] .btn-success:focus,fieldset[disabled] .btn-success:hover{background-color:#5cb85c;border-color:#4cae4c}.btn-success .badge{color:#5cb85c;background-color:#fff}.btn-info{color:#fff;background-color:#5bc0de;border-color:#46b8da}.btn-info.focus,.btn-info:focus{color:#fff;background-color:#31b0d5;border-color:#1b6d85}.btn-info.active,.btn-info:active,.btn-info:hover,.open>.btn-info.dropdown-toggle{color:#fff;background-color:#31b0d5;border-color:#269abc}.btn-info.active.focus,.btn-info.active:focus,.btn-info.active:hover,.btn-info:active.focus,.btn-info:active:focus,.btn-info:active:hover,.open>.btn-info.dropdown-toggle.focus,.open>.btn-info.dropdown-toggle:focus,.open>.btn-info.dropdown-toggle:hover{color:#fff;background-color:#269abc;border-color:#1b6d85}.btn-info.active,.btn-info:active,.open>.btn-info.dropdown-toggle{background-image:none}.btn-info.disabled.focus,.btn-info.disabled:focus,.btn-info.disabled:hover,.btn-info[disabled].focus,.btn-info[disabled]:focus,.btn-info[disabled]:hover,fieldset[disabled] .btn-info.focus,fieldset[disabled] .btn-info:focus,fieldset[disabled] .btn-info:hover{background-color:#5bc0de;border-color:#46b8da}.btn-info .badge{color:#5bc0de;background-color:#fff}.btn-warning{color:#fff;background-color:#f0ad4e;border-color:#eea236}.btn-warning.focus,.btn-warning:focus{color:#fff;background-color:#ec971f;border-color:#985f0d}.btn-warning.active,.btn-warning:active,.btn-warning:hover,.open>.btn-warning.dropdown-toggle{color:#fff;background-color:#ec971f;border-color:#d58512}.btn-warning.active.focus,.btn-warning.active:focus,.btn-warning.active:hover,.btn-warning:active.focus,.btn-warning:active:focus,.btn-warning:active:hover,.open>.btn-warning.dropdown-toggle.focus,.open>.btn-warning.dropdown-toggle:focus,.open>.btn-warning.dropdown-toggle:hover{color:#fff;background-color:#d58512;border-color:#985f0d}.btn-warning.active,.btn-warning:active,.open>.btn-warning.dropdown-toggle{background-image:none}.btn-warning.disabled.focus,.btn-warning.disabled:focus,.btn-warning.disabled:hover,.btn-warning[disabled].focus,.btn-warning[disabled]:focus,.btn-warning[disabled]:hover,fieldset[disabled] .btn-warning.focus,fieldset[disabled] .btn-warning:focus,fieldset[disabled] .btn-warning:hover{background-color:#f0ad4e;border-color:#eea236}.btn-warning .badge{color:#f0ad4e;background-color:#fff}.btn-danger{color:#fff;background-color:#d9534f;border-color:#d43f3a}.btn-danger.focus,.btn-danger:focus{color:#fff;background-color:#c9302c;border-color:#761c19}.btn-danger.active,.btn-danger:active,.btn-danger:hover,.open>.btn-danger.dropdown-toggle{color:#fff;background-color:#c9302c;border-color:#ac2925}.btn-danger.active.focus,.btn-danger.active:focus,.btn-danger.active:hover,.btn-danger:active.focus,.btn-danger:active:focus,.btn-danger:active:hover,.open>.btn-danger.dropdown-toggle.focus,.open>.btn-danger.dropdown-toggle:focus,.open>.btn-danger.dropdown-toggle:hover{color:#fff;background-color:#ac2925;border-color:#761c19}.btn-danger.active,.btn-danger:active,.open>.btn-danger.dropdown-toggle{background-image:none}.btn-danger.disabled.focus,.btn-danger.disabled:focus,.btn-danger.disabled:hover,.btn-danger[disabled].focus,.btn-danger[disabled]:focus,.btn-danger[disabled]:hover,fieldset[disabled] .btn-danger.focus,fieldset[disabled] .btn-danger:focus,fieldset[disabled] .btn-danger:hover{background-color:#d9534f;border-color:#d43f3a}.btn-danger .badge{color:#d9534f;background-color:#fff}.btn-link{color:#337ab7;font-weight:400;border-radius:0}.btn-link,.btn-link.active,.btn-link:active,.btn-link[disabled],fieldset[disabled] .btn-link{background-color:transparent;box-shadow:none}.btn-link,.btn-link:active,.btn-link:focus,.btn-link:hover{border-color:transparent}.btn-link:focus,.btn-link:hover{color:#23527c;text-decoration:underline;background-color:transparent}.btn-link[disabled]:focus,.btn-link[disabled]:hover,fieldset[disabled] .btn-link:focus,fieldset[disabled] .btn-link:hover{color:#777;text-decoration:none}.btn-group-lg>.btn,.btn-lg{padding:10px 16px;font-size:18px;line-height:1.33333;border-radius:6px}.btn-group-sm>.btn,.btn-sm{padding:5px 10px;font-size:12px;line-height:1.5;border-radius:3px}.btn-group-xs>.btn,.btn-xs{padding:1px 5px;font-size:12px;line-height:1.5;border-radius:3px}.btn-block{display:block;width:100%}.btn-block+.btn-block{margin-top:5px}input[type=button].btn-block,input[type=reset].btn-block,input[type=submit].btn-block{width:100%}.alert{padding:15px;margin-bottom:20px;border:1px solid transparent;border-radius:4px}.alert h4{margin-top:0;color:inherit}.alert .alert-link{font-weight:700}.alert>p,.alert>ul{margin-bottom:0}.alert>p+p{margin-top:5px}.alert-dismissable,.alert-dismissible{padding-right:35px}.alert-dismissable .close,.alert-dismissible .close{position:relative;top:-2px;right:-21px;color:inherit}.alert-success{background-color:#dff0d8;border-color:#d6e9c6;color:#3c763d}.alert-success hr{border-top-color:#c9e2b3}.alert-success .alert-link{color:#2b542c}.alert-info{background-color:#d9edf7;border-color:#bce8f1;color:#31708f}.alert-info hr{border-top-color:#a6e1ec}.alert-info .alert-link{color:#245269}.alert-warning{background-color:#fcf8e3;border-color:#faebcc;color:#8a6d3b}.alert-warning hr{border-top-color:#f7e1b5}.alert-warning .alert-link{color:#66512c}.alert-danger{background-color:#f2dede;border-color:#ebccd1;color:#a94442}.alert-danger hr{border-top-color:#e4b9c0}.alert-danger .alert-link{color:#843534}.btn-group,.btn-group-vertical{position:relative;display:inline-block;vertical-align:middle}.btn-group-vertical>.btn,.btn-group>.btn{position:relative;float:left}.btn-group-vertical>.btn.active,.btn-group-vertical>.btn:active,.btn-group-vertical>.btn:focus,.btn-group-vertical>.btn:hover,.btn-group>.btn.active,.btn-group>.btn:active,.btn-group>.btn:focus,.btn-group>.btn:hover{z-index:1}.btn-group .btn+.btn,.btn-group .btn+.btn-group,.btn-group .btn-group+.btn,.btn-group .btn-group+.btn-group{margin-left:-1px}.btn-toolbar{margin-left:-5px}.btn-toolbar:after,.btn-toolbar:before{content:" ";display:table}.btn-toolbar:after{clear:both}.btn-toolbar .btn,.btn-toolbar .btn-group,.btn-toolbar .input-group{float:left}.btn-toolbar>.btn,.btn-toolbar>.btn-group,.btn-toolbar>.input-group{margin-left:5px}.btn-group>.btn:not(:first-child):not(:last-child):not(.dropdown-toggle){border-radius:0}.btn-group>.btn:first-child{margin-left:0}.btn-group>.btn:first-child:not(:last-child):not(.dropdown-toggle){border-bottom-right-radius:0;border-top-right-radius:0}.btn-group>.btn:last-child:not(:first-child),.btn-group>.dropdown-toggle:not(:first-child){border-bottom-left-radius:0;border-top-left-radius:0}.btn-group>.btn-group{float:left}.btn-group>.btn-group:not(:first-child):not(:last-child)>.btn{border-radius:0}.btn-group>.btn-group:first-child:not(:last-child)>.btn:last-child,.btn-group>.btn-group:first-child:not(:last-child)>.dropdown-toggle{border-bottom-right-radius:0;border-top-right-radius:0}.btn-group>.btn-group:last-child:not(:first-child)>.btn:first-child{border-bottom-left-radius:0;border-top-left-radius:0}.btn-group .dropdown-toggle:active,.btn-group.open .dropdown-toggle{outline:0}.btn-group>.btn+.dropdown-toggle{padding-left:8px;padding-right:8px}.btn-group-lg.btn-group>.btn+.dropdown-toggle,.btn-group>.btn-lg+.dropdown-toggle{padding-left:12px;padding-right:12px}.btn-group.open .dropdown-toggle{box-shadow:inset 0 3px 5px rgba(0,0,0,.125)}.btn-group.open .dropdown-toggle.btn-link{box-shadow:none}.btn .caret{margin-left:0}.btn-group-lg>.btn .caret,.btn-lg .caret{border-width:5px 5px 0;border-bottom-width:0}.dropup .btn-group-lg>.btn .caret,.dropup .btn-lg .caret{border-width:0 5px 5px}.btn-group-vertical>.btn,.btn-group-vertical>.btn-group,.btn-group-vertical>.btn-group>.btn{display:block;float:none;width:100%;max-width:100%}.btn-group-vertical>.btn-group:after,.btn-group-vertical>.btn-group:before{content:" ";display:table}.btn-group-vertical>.btn-group:after{clear:both}.btn-group-vertical>.btn-group>.btn{float:none}.btn-group-vertical>.btn+.btn,.btn-group-vertical>.btn+.btn-group,.btn-group-vertical>.btn-group+.btn,.btn-group-vertical>.btn-group+.btn-group{margin-top:-1px;margin-left:0}.btn-group-vertical>.btn:not(:first-child):not(:last-child){border-radius:0}.btn-group-vertical>.btn:first-child:not(:last-child){border-top-right-radius:4px;border-top-left-radius:4px;border-bottom-right-radius:0;border-bottom-left-radius:0}.btn-group-vertical>.btn:last-child:not(:first-child){border-top-right-radius:0;border-top-left-radius:0;border-bottom-right-radius:4px;border-bottom-left-radius:4px}.btn-group-vertical>.btn-group:not(:first-child):not(:last-child)>.btn{border-radius:0}.btn-group-vertical>.btn-group:first-child:not(:last-child)>.btn:last-child,.btn-group-vertical>.btn-group:first-child:not(:last-child)>.dropdown-toggle{border-bottom-right-radius:0;border-bottom-left-radius:0}.btn-group-vertical>.btn-group:last-child:not(:first-child)>.btn:first-child{border-top-right-radius:0;border-top-left-radius:0}.btn-group-justified{display:table;width:100%;table-layout:fixed;border-collapse:separate}.btn-group-justified>.btn,.btn-group-justified>.btn-group{float:none;display:table-cell;width:1%}.btn-group-justified>.btn-group .btn{width:100%}.btn-group-justified>.btn-group .dropdown-menu{left:auto}[data-toggle=buttons]>.btn-group>.btn input[type=checkbox],[data-toggle=buttons]>.btn-group>.btn input[type=radio],[data-toggle=buttons]>.btn input[type=checkbox],[data-toggle=buttons]>.btn input[type=radio]{position:absolute;clip:rect(0,0,0,0);pointer-events:none}.close{float:right;font-size:21px;font-weight:700;line-height:1;color:#000;text-shadow:0 1px 0 #fff;opacity:.2;filter:alpha(opacity=20)}.close:focus,.close:hover{color:#000;text-decoration:none;cursor:pointer;opacity:.5;filter:alpha(opacity=50)}button.close{padding:0;cursor:pointer;background:transparent;border:0;-webkit-appearance:none}.fade{opacity:0;-webkit-transition:opacity .15s linear;transition:opacity .15s linear}.fade.in{opacity:1}.collapse{display:none}.collapse.in{display:block}tr.collapse.in{display:table-row}tbody.collapse.in{display:table-row-group}.collapsing{position:relative;height:0;overflow:hidden;-webkit-transition-property:height,visibility;transition-property:height,visibility;-webkit-transition-duration:.35s;transition-duration:.35s;-webkit-transition-timing-function:ease;transition-timing-function:ease}.caret{display:inline-block;width:0;height:0;margin-left:2px;vertical-align:middle;border-top:4px dashed;border-top:4px solid\9;border-right:4px solid transparent;border-left:4px solid transparent}.dropdown,.dropup{position:relative}.dropdown-toggle:focus{outline:0}.dropdown-menu{position:absolute;top:100%;left:0;z-index:4;display:none;float:left;min-width:160px;padding:5px 0;margin:2px 0 0;list-style:none;font-size:14px;text-align:left;background-color:#fff;border:1px solid #ccc;border:1px solid rgba(0,0,0,.15);border-radius:4px;box-shadow:0 6px 12px rgba(0,0,0,.175);background-clip:padding-box}.dropdown-menu.pull-right{right:0;left:auto}.dropdown-menu .divider{height:1px;margin:9px 0;overflow:hidden;background-color:#e5e5e5}.dropdown-menu>li>a{display:block;padding:3px 20px;clear:both;font-weight:400;line-height:1.42857;color:#333;white-space:nowrap}.dropdown-menu>li>a:focus,.dropdown-menu>li>a:hover{text-decoration:none;color:#262626;background-color:#f5f5f5}.dropdown-menu>.active>a,.dropdown-menu>.active>a:focus,.dropdown-menu>.active>a:hover{color:#fff;text-decoration:none;outline:0;background-color:#337ab7}.dropdown-menu>.disabled>a,.dropdown-menu>.disabled>a:focus,.dropdown-menu>.disabled>a:hover{color:#777}.dropdown-menu>.disabled>a:focus,.dropdown-menu>.disabled>a:hover{text-decoration:none;background-color:transparent;background-image:none;filter:progid:DXImageTransform.Microsoft.gradient(enabled = false);cursor:not-allowed}.open>.dropdown-menu{display:block}.open>a{outline:0}.dropdown-menu-right{left:auto;right:0}.dropdown-menu-left{left:0;right:auto}.dropdown-header{display:block;padding:3px 20px;font-size:12px;line-height:1.42857;color:#777;white-space:nowrap}.dropdown-backdrop{position:fixed;left:0;right:0;bottom:0;top:0;z-index:3}.pull-right>.dropdown-menu{right:0;left:auto}.dropup .caret,.navbar-fixed-bottom .dropdown .caret{border-top:0;border-bottom:4px dashed;border-bottom:4px solid\9;content:""}.dropup .dropdown-menu,.navbar-fixed-bottom .dropdown .dropdown-menu{top:auto;bottom:100%;margin-bottom:2px}@media (min-width:768px){.navbar-right .dropdown-menu{right:0;left:auto}.navbar-right .dropdown-menu-left{left:0;right:auto}}.input-group{position:relative;display:table;border-collapse:separate}.input-group[class*=col-]{float:none;padding-left:0;padding-right:0}.input-group .form-control{position:relative;z-index:1;float:left;width:100%;margin-bottom:0}.input-group .form-control:focus{z-index:2}.input-group-addon,.input-group-btn,.input-group .form-control{display:table-cell}.input-group-addon:not(:first-child):not(:last-child),.input-group-btn:not(:first-child):not(:last-child),.input-group .form-control:not(:first-child):not(:last-child){border-radius:0}.input-group-addon,.input-group-btn{width:1%;white-space:nowrap;vertical-align:middle}.input-group-addon{padding:6px 12px;font-size:14px;font-weight:400;line-height:1;color:#555;text-align:center;background-color:#eee;border:1px solid #ccc;border-radius:4px}.input-group-addon.input-sm,.input-group-sm>.input-group-addon,.input-group-sm>.input-group-btn>.input-group-addon.btn{padding:5px 10px;font-size:12px;border-radius:3px}.input-group-addon.input-lg,.input-group-lg>.input-group-addon,.input-group-lg>.input-group-btn>.input-group-addon.btn{padding:10px 16px;font-size:18px;border-radius:6px}.input-group-addon input[type=checkbox],.input-group-addon input[type=radio]{margin-top:0}.input-group-addon:first-child,.input-group-btn:first-child>.btn,.input-group-btn:first-child>.btn-group>.btn,.input-group-btn:first-child>.dropdown-toggle,.input-group-btn:last-child>.btn-group:not(:last-child)>.btn,.input-group-btn:last-child>.btn:not(:last-child):not(.dropdown-toggle),.input-group .form-control:first-child{border-bottom-right-radius:0;border-top-right-radius:0}.input-group-addon:first-child{border-right:0}.input-group-addon:last-child,.input-group-btn:first-child>.btn-group:not(:first-child)>.btn,.input-group-btn:first-child>.btn:not(:first-child),.input-group-btn:last-child>.btn,.input-group-btn:last-child>.btn-group>.btn,.input-group-btn:last-child>.dropdown-toggle,.input-group .form-control:last-child{border-bottom-left-radius:0;border-top-left-radius:0}.input-group-addon:last-child{border-left:0}.input-group-btn{font-size:0;white-space:nowrap}.input-group-btn,.input-group-btn>.btn{position:relative}.input-group-btn>.btn+.btn{margin-left:-1px}.input-group-btn>.btn:active,.input-group-btn>.btn:focus,.input-group-btn>.btn:hover{z-index:1}.input-group-btn:first-child>.btn,.input-group-btn:first-child>.btn-group{margin-right:-1px}.input-group-btn:last-child>.btn,.input-group-btn:last-child>.btn-group{z-index:1;margin-left:-1px}.modal,.modal-open{overflow:hidden}.modal{display:none;position:fixed;top:0;right:0;bottom:0;left:0;z-index:7;-webkit-overflow-scrolling:touch;outline:0}.modal.fade .modal-dialog{-webkit-transform:translateY(-25%);transform:translateY(-25%);-webkit-transition:-webkit-transform .3s ease-out;transition:transform .3s ease-out}.modal.in .modal-dialog{-webkit-transform:translate(0);transform:translate(0)}.modal-open .modal{overflow-x:hidden;overflow-y:auto}.modal-dialog{position:relative;width:auto;margin:10px}.modal-content{position:relative;background-color:#fff;border:1px solid #999;border:1px solid rgba(0,0,0,.2);border-radius:6px;box-shadow:0 3px 9px rgba(0,0,0,.5);background-clip:padding-box;outline:0}.modal-backdrop{position:fixed;top:0;right:0;bottom:0;left:0;z-index:6;background-color:#000}.modal-backdrop.fade{opacity:0;filter:alpha(opacity=0)}.modal-backdrop.in{opacity:.5;filter:alpha(opacity=50)}.modal-header{padding:15px;border-bottom:1px solid #e5e5e5}.modal-header:after,.modal-header:before{content:" ";display:table}.modal-header:after{clear:both}.modal-header .close{margin-top:-2px}.modal-title{margin:0;line-height:1.42857}.modal-body{position:relative;padding:15px}.modal-footer{padding:15px;text-align:right;border-top:1px solid #e5e5e5}.modal-footer:after,.modal-footer:before{content:" ";display:table}.modal-footer:after{clear:both}.modal-footer .btn+.btn{margin-left:5px;margin-bottom:0}.modal-footer .btn-group .btn+.btn{margin-left:-1px}.modal-footer .btn-block+.btn-block{margin-left:0}.modal-scrollbar-measure{position:absolute;top:-9999px;width:50px;height:50px;overflow:scroll}@media (min-width:768px){.modal-dialog{width:600px;margin:30px auto}.modal-content{box-shadow:0 5px 15px rgba(0,0,0,.5)}.modal-sm{width:300px}}@media (min-width:992px){.modal-lg{width:900px}}.clearfix:after,.clearfix:before{content:" ";display:table}.clearfix:after{clear:both}.center-block{display:block;margin-left:auto;margin-right:auto}.pull-right{float:right!important}.pull-left{float:left!important}.hide{display:none!important}.show{display:block!important}.invisible{visibility:hidden}.text-hide{font:0/0 a;color:transparent;text-shadow:none;background-color:transparent;border:0}.hidden{display:none!important}.affix{position:fixed}@-ms-viewport{width:device-width}.visible-lg,.visible-lg-block,.visible-lg-inline,.visible-lg-inline-block,.visible-md,.visible-md-block,.visible-md-inline,.visible-md-inline-block,.visible-sm,.visible-sm-block,.visible-sm-inline,.visible-sm-inline-block,.visible-xs,.visible-xs-block,.visible-xs-inline,.visible-xs-inline-block{display:none!important}@media (max-width:767px){.visible-xs{display:block!important}table.visible-xs{display:table!important}tr.visible-xs{display:table-row!important}td.visible-xs,th.visible-xs{display:table-cell!important}}@media (max-width:767px){.visible-xs-block{display:block!important}}@media (max-width:767px){.visible-xs-inline{display:inline!important}}@media (max-width:767px){.visible-xs-inline-block{display:inline-block!important}}@media (min-width:768px) and (max-width:991px){.visible-sm{display:block!important}table.visible-sm{display:table!important}tr.visible-sm{display:table-row!important}td.visible-sm,th.visible-sm{display:table-cell!important}}@media (min-width:768px) and (max-width:991px){.visible-sm-block{display:block!important}}@media (min-width:768px) and (max-width:991px){.visible-sm-inline{display:inline!important}}@media (min-width:768px) and (max-width:991px){.visible-sm-inline-block{display:inline-block!important}}@media (min-width:992px) and (max-width:1199px){.visible-md{display:block!important}table.visible-md{display:table!important}tr.visible-md{display:table-row!important}td.visible-md,th.visible-md{display:table-cell!important}}@media (min-width:992px) and (max-width:1199px){.visible-md-block{display:block!important}}@media (min-width:992px) and (max-width:1199px){.visible-md-inline{display:inline!important}}@media (min-width:992px) and (max-width:1199px){.visible-md-inline-block{display:inline-block!important}}@media (min-width:1200px){.visible-lg{display:block!important}table.visible-lg{display:table!important}tr.visible-lg{display:table-row!important}td.visible-lg,th.visible-lg{display:table-cell!important}}@media (min-width:1200px){.visible-lg-block{display:block!important}}@media (min-width:1200px){.visible-lg-inline{display:inline!important}}@media (min-width:1200px){.visible-lg-inline-block{display:inline-block!important}}@media (max-width:767px){.hidden-xs{display:none!important}}@media (min-width:768px) and (max-width:991px){.hidden-sm{display:none!important}}@media (min-width:992px) and (max-width:1199px){.hidden-md{display:none!important}}@media (min-width:1200px){.hidden-lg{display:none!important}}.visible-print{display:none!important}@media print{.visible-print{display:block!important}table.visible-print{display:table!important}tr.visible-print{display:table-row!important}td.visible-print,th.visible-print{display:table-cell!important}}.visible-print-block{display:none!important}@media print{.visible-print-block{display:block!important}}.visible-print-inline{display:none!important}@media print{.visible-print-inline{display:inline!important}}.visible-print-inline-block{display:none!important}@media print{.visible-print-inline-block{display:inline-block!important}}@media print{.hidden-print{display:none!important}}/*! + Ionicons, v2.0.0 + Created by Ben Sperry for the Ionic Framework, http://ionicons.com/ + https://twitter.com/benjsperry https://twitter.com/ionicframework + MIT License: https://github.com/driftyco/ionicons + + Android-style icons originally built by Google’s + Material Design Icons: https://github.com/google/material-design-icons + used under CC BY http://creativecommons.org/licenses/by/4.0/ + Modified icons to fit ionicon’s grid from original. */@font-face{font-family:Ionicons;src:url(../fonts/ionicons.eot?v=2.0.0);src:url(../fonts/ionicons.eot?v=2.0.0#iefix) format("embedded-opentype"),url(../fonts/ionicons.ttf?v=2.0.0) format("truetype"),url(../fonts/ionicons.woff?v=2.0.0) format("woff"),url(../fonts/ionicons.svg?v=2.0.0#Ionicons) format("svg");font-weight:400;font-style:normal}.ion,.ion-alert-circled:before,.ion-alert:before,.ion-android-add-circle:before,.ion-android-add:before,.ion-android-alarm-clock:before,.ion-android-alert:before,.ion-android-apps:before,.ion-android-archive:before,.ion-android-arrow-back:before,.ion-android-arrow-down:before,.ion-android-arrow-dropdown-circle:before,.ion-android-arrow-dropdown:before,.ion-android-arrow-dropleft-circle:before,.ion-android-arrow-dropleft:before,.ion-android-arrow-dropright-circle:before,.ion-android-arrow-dropright:before,.ion-android-arrow-dropup-circle:before,.ion-android-arrow-dropup:before,.ion-android-arrow-forward:before,.ion-android-arrow-up:before,.ion-android-attach:before,.ion-android-bar:before,.ion-android-bicycle:before,.ion-android-boat:before,.ion-android-bookmark:before,.ion-android-bulb:before,.ion-android-bus:before,.ion-android-calendar:before,.ion-android-call:before,.ion-android-camera:before,.ion-android-cancel:before,.ion-android-car:before,.ion-android-cart:before,.ion-android-chat:before,.ion-android-checkbox-blank:before,.ion-android-checkbox-outline-blank:before,.ion-android-checkbox-outline:before,.ion-android-checkbox:before,.ion-android-checkmark-circle:before,.ion-android-clipboard:before,.ion-android-close:before,.ion-android-cloud-circle:before,.ion-android-cloud-done:before,.ion-android-cloud-outline:before,.ion-android-cloud:before,.ion-android-color-palette:before,.ion-android-compass:before,.ion-android-contact:before,.ion-android-contacts:before,.ion-android-contract:before,.ion-android-create:before,.ion-android-delete:before,.ion-android-desktop:before,.ion-android-document:before,.ion-android-done-all:before,.ion-android-done:before,.ion-android-download:before,.ion-android-drafts:before,.ion-android-exit:before,.ion-android-expand:before,.ion-android-favorite-outline:before,.ion-android-favorite:before,.ion-android-film:before,.ion-android-folder-open:before,.ion-android-folder:before,.ion-android-funnel:before,.ion-android-globe:before,.ion-android-hand:before,.ion-android-hangout:before,.ion-android-happy:before,.ion-android-home:before,.ion-android-image:before,.ion-android-laptop:before,.ion-android-list:before,.ion-android-locate:before,.ion-android-lock:before,.ion-android-mail:before,.ion-android-map:before,.ion-android-menu:before,.ion-android-microphone-off:before,.ion-android-microphone:before,.ion-android-more-horizontal:before,.ion-android-more-vertical:before,.ion-android-navigate:before,.ion-android-notifications-none:before,.ion-android-notifications-off:before,.ion-android-notifications:before,.ion-android-open:before,.ion-android-options:before,.ion-android-people:before,.ion-android-person-add:before,.ion-android-person:before,.ion-android-phone-landscape:before,.ion-android-phone-portrait:before,.ion-android-pin:before,.ion-android-plane:before,.ion-android-playstore:before,.ion-android-print:before,.ion-android-radio-button-off:before,.ion-android-radio-button-on:before,.ion-android-refresh:before,.ion-android-remove-circle:before,.ion-android-remove:before,.ion-android-restaurant:before,.ion-android-sad:before,.ion-android-search:before,.ion-android-send:before,.ion-android-settings:before,.ion-android-share-alt:before,.ion-android-share:before,.ion-android-star-half:before,.ion-android-star-outline:before,.ion-android-star:before,.ion-android-stopwatch:before,.ion-android-subway:before,.ion-android-sunny:before,.ion-android-sync:before,.ion-android-textsms:before,.ion-android-time:before,.ion-android-train:before,.ion-android-unlock:before,.ion-android-upload:before,.ion-android-volume-down:before,.ion-android-volume-mute:before,.ion-android-volume-off:before,.ion-android-volume-up:before,.ion-android-walk:before,.ion-android-warning:before,.ion-android-watch:before,.ion-android-wifi:before,.ion-aperture:before,.ion-archive:before,.ion-arrow-down-a:before,.ion-arrow-down-b:before,.ion-arrow-down-c:before,.ion-arrow-expand:before,.ion-arrow-graph-down-left:before,.ion-arrow-graph-down-right:before,.ion-arrow-graph-up-left:before,.ion-arrow-graph-up-right:before,.ion-arrow-left-a:before,.ion-arrow-left-b:before,.ion-arrow-left-c:before,.ion-arrow-move:before,.ion-arrow-resize:before,.ion-arrow-return-left:before,.ion-arrow-return-right:before,.ion-arrow-right-a:before,.ion-arrow-right-b:before,.ion-arrow-right-c:before,.ion-arrow-shrink:before,.ion-arrow-swap:before,.ion-arrow-up-a:before,.ion-arrow-up-b:before,.ion-arrow-up-c:before,.ion-asterisk:before,.ion-at:before,.ion-backspace-outline:before,.ion-backspace:before,.ion-bag:before,.ion-battery-charging:before,.ion-battery-empty:before,.ion-battery-full:before,.ion-battery-half:before,.ion-battery-low:before,.ion-beaker:before,.ion-beer:before,.ion-bluetooth:before,.ion-bonfire:before,.ion-bookmark:before,.ion-bowtie:before,.ion-briefcase:before,.ion-bug:before,.ion-calculator:before,.ion-calendar:before,.ion-camera:before,.ion-card:before,.ion-cash:before,.ion-chatbox-working:before,.ion-chatbox:before,.ion-chatboxes:before,.ion-chatbubble-working:before,.ion-chatbubble:before,.ion-chatbubbles:before,.ion-checkmark-circled:before,.ion-checkmark-round:before,.ion-checkmark:before,.ion-chevron-down:before,.ion-chevron-left:before,.ion-chevron-right:before,.ion-chevron-up:before,.ion-clipboard:before,.ion-clock:before,.ion-close-circled:before,.ion-close-round:before,.ion-close:before,.ion-closed-captioning:before,.ion-cloud:before,.ion-code-download:before,.ion-code-working:before,.ion-code:before,.ion-coffee:before,.ion-compass:before,.ion-compose:before,.ion-connection-bars:before,.ion-contrast:before,.ion-crop:before,.ion-cube:before,.ion-disc:before,.ion-document-text:before,.ion-document:before,.ion-drag:before,.ion-earth:before,.ion-easel:before,.ion-edit:before,.ion-egg:before,.ion-eject:before,.ion-email-unread:before,.ion-email:before,.ion-erlenmeyer-flask-bubbles:before,.ion-erlenmeyer-flask:before,.ion-eye-disabled:before,.ion-eye:before,.ion-female:before,.ion-filing:before,.ion-film-marker:before,.ion-fireball:before,.ion-flag:before,.ion-flame:before,.ion-flash-off:before,.ion-flash:before,.ion-folder:before,.ion-fork-repo:before,.ion-fork:before,.ion-forward:before,.ion-funnel:before,.ion-gear-a:before,.ion-gear-b:before,.ion-grid:before,.ion-hammer:before,.ion-happy-outline:before,.ion-happy:before,.ion-headphone:before,.ion-heart-broken:before,.ion-heart:before,.ion-help-buoy:before,.ion-help-circled:before,.ion-help:before,.ion-home:before,.ion-icecream:before,.ion-image:before,.ion-images:before,.ion-information-circled:before,.ion-information:before,.ion-ionic:before,.ion-ios-alarm-outline:before,.ion-ios-alarm:before,.ion-ios-albums-outline:before,.ion-ios-albums:before,.ion-ios-americanfootball-outline:before,.ion-ios-americanfootball:before,.ion-ios-analytics-outline:before,.ion-ios-analytics:before,.ion-ios-arrow-back:before,.ion-ios-arrow-down:before,.ion-ios-arrow-forward:before,.ion-ios-arrow-left:before,.ion-ios-arrow-right:before,.ion-ios-arrow-thin-down:before,.ion-ios-arrow-thin-left:before,.ion-ios-arrow-thin-right:before,.ion-ios-arrow-thin-up:before,.ion-ios-arrow-up:before,.ion-ios-at-outline:before,.ion-ios-at:before,.ion-ios-barcode-outline:before,.ion-ios-barcode:before,.ion-ios-baseball-outline:before,.ion-ios-baseball:before,.ion-ios-basketball-outline:before,.ion-ios-basketball:before,.ion-ios-bell-outline:before,.ion-ios-bell:before,.ion-ios-body-outline:before,.ion-ios-body:before,.ion-ios-bolt-outline:before,.ion-ios-bolt:before,.ion-ios-book-outline:before,.ion-ios-book:before,.ion-ios-bookmarks-outline:before,.ion-ios-bookmarks:before,.ion-ios-box-outline:before,.ion-ios-box:before,.ion-ios-briefcase-outline:before,.ion-ios-briefcase:before,.ion-ios-browsers-outline:before,.ion-ios-browsers:before,.ion-ios-calculator-outline:before,.ion-ios-calculator:before,.ion-ios-calendar-outline:before,.ion-ios-calendar:before,.ion-ios-camera-outline:before,.ion-ios-camera:before,.ion-ios-cart-outline:before,.ion-ios-cart:before,.ion-ios-chatboxes-outline:before,.ion-ios-chatboxes:before,.ion-ios-chatbubble-outline:before,.ion-ios-chatbubble:before,.ion-ios-checkmark-empty:before,.ion-ios-checkmark-outline:before,.ion-ios-checkmark:before,.ion-ios-circle-filled:before,.ion-ios-circle-outline:before,.ion-ios-clock-outline:before,.ion-ios-clock:before,.ion-ios-close-empty:before,.ion-ios-close-outline:before,.ion-ios-close:before,.ion-ios-cloud-download-outline:before,.ion-ios-cloud-download:before,.ion-ios-cloud-outline:before,.ion-ios-cloud-upload-outline:before,.ion-ios-cloud-upload:before,.ion-ios-cloud:before,.ion-ios-cloudy-night-outline:before,.ion-ios-cloudy-night:before,.ion-ios-cloudy-outline:before,.ion-ios-cloudy:before,.ion-ios-cog-outline:before,.ion-ios-cog:before,.ion-ios-color-filter-outline:before,.ion-ios-color-filter:before,.ion-ios-color-wand-outline:before,.ion-ios-color-wand:before,.ion-ios-compose-outline:before,.ion-ios-compose:before,.ion-ios-contact-outline:before,.ion-ios-contact:before,.ion-ios-copy-outline:before,.ion-ios-copy:before,.ion-ios-crop-strong:before,.ion-ios-crop:before,.ion-ios-download-outline:before,.ion-ios-download:before,.ion-ios-drag:before,.ion-ios-email-outline:before,.ion-ios-email:before,.ion-ios-eye-outline:before,.ion-ios-eye:before,.ion-ios-fastforward-outline:before,.ion-ios-fastforward:before,.ion-ios-filing-outline:before,.ion-ios-filing:before,.ion-ios-film-outline:before,.ion-ios-film:before,.ion-ios-flag-outline:before,.ion-ios-flag:before,.ion-ios-flame-outline:before,.ion-ios-flame:before,.ion-ios-flask-outline:before,.ion-ios-flask:before,.ion-ios-flower-outline:before,.ion-ios-flower:before,.ion-ios-folder-outline:before,.ion-ios-folder:before,.ion-ios-football-outline:before,.ion-ios-football:before,.ion-ios-game-controller-a-outline:before,.ion-ios-game-controller-a:before,.ion-ios-game-controller-b-outline:before,.ion-ios-game-controller-b:before,.ion-ios-gear-outline:before,.ion-ios-gear:before,.ion-ios-glasses-outline:before,.ion-ios-glasses:before,.ion-ios-grid-view-outline:before,.ion-ios-grid-view:before,.ion-ios-heart-outline:before,.ion-ios-heart:before,.ion-ios-help-empty:before,.ion-ios-help-outline:before,.ion-ios-help:before,.ion-ios-home-outline:before,.ion-ios-home:before,.ion-ios-infinite-outline:before,.ion-ios-infinite:before,.ion-ios-information-empty:before,.ion-ios-information-outline:before,.ion-ios-information:before,.ion-ios-ionic-outline:before,.ion-ios-keypad-outline:before,.ion-ios-keypad:before,.ion-ios-lightbulb-outline:before,.ion-ios-lightbulb:before,.ion-ios-list-outline:before,.ion-ios-list:before,.ion-ios-location-outline:before,.ion-ios-location:before,.ion-ios-locked-outline:before,.ion-ios-locked:before,.ion-ios-loop-strong:before,.ion-ios-loop:before,.ion-ios-medical-outline:before,.ion-ios-medical:before,.ion-ios-medkit-outline:before,.ion-ios-medkit:before,.ion-ios-mic-off:before,.ion-ios-mic-outline:before,.ion-ios-mic:before,.ion-ios-minus-empty:before,.ion-ios-minus-outline:before,.ion-ios-minus:before,.ion-ios-monitor-outline:before,.ion-ios-monitor:before,.ion-ios-moon-outline:before,.ion-ios-moon:before,.ion-ios-more-outline:before,.ion-ios-more:before,.ion-ios-musical-note:before,.ion-ios-musical-notes:before,.ion-ios-navigate-outline:before,.ion-ios-navigate:before,.ion-ios-nutrition-outline:before,.ion-ios-nutrition:before,.ion-ios-paper-outline:before,.ion-ios-paper:before,.ion-ios-paperplane-outline:before,.ion-ios-paperplane:before,.ion-ios-partlysunny-outline:before,.ion-ios-partlysunny:before,.ion-ios-pause-outline:before,.ion-ios-pause:before,.ion-ios-paw-outline:before,.ion-ios-paw:before,.ion-ios-people-outline:before,.ion-ios-people:before,.ion-ios-person-outline:before,.ion-ios-person:before,.ion-ios-personadd-outline:before,.ion-ios-personadd:before,.ion-ios-photos-outline:before,.ion-ios-photos:before,.ion-ios-pie-outline:before,.ion-ios-pie:before,.ion-ios-pint-outline:before,.ion-ios-pint:before,.ion-ios-play-outline:before,.ion-ios-play:before,.ion-ios-plus-empty:before,.ion-ios-plus-outline:before,.ion-ios-plus:before,.ion-ios-pricetag-outline:before,.ion-ios-pricetag:before,.ion-ios-pricetags-outline:before,.ion-ios-pricetags:before,.ion-ios-printer-outline:before,.ion-ios-printer:before,.ion-ios-pulse-strong:before,.ion-ios-pulse:before,.ion-ios-rainy-outline:before,.ion-ios-rainy:before,.ion-ios-recording-outline:before,.ion-ios-recording:before,.ion-ios-redo-outline:before,.ion-ios-redo:before,.ion-ios-refresh-empty:before,.ion-ios-refresh-outline:before,.ion-ios-refresh:before,.ion-ios-reload:before,.ion-ios-reverse-camera-outline:before,.ion-ios-reverse-camera:before,.ion-ios-rewind-outline:before,.ion-ios-rewind:before,.ion-ios-rose-outline:before,.ion-ios-rose:before,.ion-ios-search-strong:before,.ion-ios-search:before,.ion-ios-settings-strong:before,.ion-ios-settings:before,.ion-ios-shuffle-strong:before,.ion-ios-shuffle:before,.ion-ios-skipbackward-outline:before,.ion-ios-skipbackward:before,.ion-ios-skipforward-outline:before,.ion-ios-skipforward:before,.ion-ios-snowy:before,.ion-ios-speedometer-outline:before,.ion-ios-speedometer:before,.ion-ios-star-half:before,.ion-ios-star-outline:before,.ion-ios-star:before,.ion-ios-stopwatch-outline:before,.ion-ios-stopwatch:before,.ion-ios-sunny-outline:before,.ion-ios-sunny:before,.ion-ios-telephone-outline:before,.ion-ios-telephone:before,.ion-ios-tennisball-outline:before,.ion-ios-tennisball:before,.ion-ios-thunderstorm-outline:before,.ion-ios-thunderstorm:before,.ion-ios-time-outline:before,.ion-ios-time:before,.ion-ios-timer-outline:before,.ion-ios-timer:before,.ion-ios-toggle-outline:before,.ion-ios-toggle:before,.ion-ios-trash-outline:before,.ion-ios-trash:before,.ion-ios-undo-outline:before,.ion-ios-undo:before,.ion-ios-unlocked-outline:before,.ion-ios-unlocked:before,.ion-ios-upload-outline:before,.ion-ios-upload:before,.ion-ios-videocam-outline:before,.ion-ios-videocam:before,.ion-ios-volume-high:before,.ion-ios-volume-low:before,.ion-ios-wineglass-outline:before,.ion-ios-wineglass:before,.ion-ios-world-outline:before,.ion-ios-world:before,.ion-ipad:before,.ion-iphone:before,.ion-ipod:before,.ion-jet:before,.ion-key:before,.ion-knife:before,.ion-laptop:before,.ion-leaf:before,.ion-levels:before,.ion-lightbulb:before,.ion-link:before,.ion-load-a:before,.ion-load-b:before,.ion-load-c:before,.ion-load-d:before,.ion-location:before,.ion-lock-combination:before,.ion-locked:before,.ion-log-in:before,.ion-log-out:before,.ion-loop:before,.ion-magnet:before,.ion-male:before,.ion-man:before,.ion-map:before,.ion-medkit:before,.ion-merge:before,.ion-mic-a:before,.ion-mic-b:before,.ion-mic-c:before,.ion-minus-circled:before,.ion-minus-round:before,.ion-minus:before,.ion-model-s:before,.ion-monitor:before,.ion-more:before,.ion-mouse:before,.ion-music-note:before,.ion-navicon-round:before,.ion-navicon:before,.ion-navigate:before,.ion-network:before,.ion-no-smoking:before,.ion-nuclear:before,.ion-outlet:before,.ion-paintbrush:before,.ion-paintbucket:before,.ion-paper-airplane:before,.ion-paperclip:before,.ion-pause:before,.ion-person-add:before,.ion-person-stalker:before,.ion-person:before,.ion-pie-graph:before,.ion-pin:before,.ion-pinpoint:before,.ion-pizza:before,.ion-plane:before,.ion-planet:before,.ion-play:before,.ion-playstation:before,.ion-plus-circled:before,.ion-plus-round:before,.ion-plus:before,.ion-podium:before,.ion-pound:before,.ion-power:before,.ion-pricetag:before,.ion-pricetags:before,.ion-printer:before,.ion-pull-request:before,.ion-qr-scanner:before,.ion-quote:before,.ion-radio-waves:before,.ion-record:before,.ion-refresh:before,.ion-reply-all:before,.ion-reply:before,.ion-ribbon-a:before,.ion-ribbon-b:before,.ion-sad-outline:before,.ion-sad:before,.ion-scissors:before,.ion-search:before,.ion-settings:before,.ion-share:before,.ion-shuffle:before,.ion-skip-backward:before,.ion-skip-forward:before,.ion-social-android-outline:before,.ion-social-android:before,.ion-social-angular-outline:before,.ion-social-angular:before,.ion-social-apple-outline:before,.ion-social-apple:before,.ion-social-bitcoin-outline:before,.ion-social-bitcoin:before,.ion-social-buffer-outline:before,.ion-social-buffer:before,.ion-social-chrome-outline:before,.ion-social-chrome:before,.ion-social-codepen-outline:before,.ion-social-codepen:before,.ion-social-css3-outline:before,.ion-social-css3:before,.ion-social-designernews-outline:before,.ion-social-designernews:before,.ion-social-dribbble-outline:before,.ion-social-dribbble:before,.ion-social-dropbox-outline:before,.ion-social-dropbox:before,.ion-social-euro-outline:before,.ion-social-euro:before,.ion-social-facebook-outline:before,.ion-social-facebook:before,.ion-social-foursquare-outline:before,.ion-social-foursquare:before,.ion-social-freebsd-devil:before,.ion-social-github-outline:before,.ion-social-github:before,.ion-social-google-outline:before,.ion-social-google:before,.ion-social-googleplus-outline:before,.ion-social-googleplus:before,.ion-social-hackernews-outline:before,.ion-social-hackernews:before,.ion-social-html5-outline:before,.ion-social-html5:before,.ion-social-instagram-outline:before,.ion-social-instagram:before,.ion-social-javascript-outline:before,.ion-social-javascript:before,.ion-social-linkedin-outline:before,.ion-social-linkedin:before,.ion-social-markdown:before,.ion-social-nodejs:before,.ion-social-octocat:before,.ion-social-pinterest-outline:before,.ion-social-pinterest:before,.ion-social-python:before,.ion-social-reddit-outline:before,.ion-social-reddit:before,.ion-social-rss-outline:before,.ion-social-rss:before,.ion-social-sass:before,.ion-social-skype-outline:before,.ion-social-skype:before,.ion-social-snapchat-outline:before,.ion-social-snapchat:before,.ion-social-tumblr-outline:before,.ion-social-tumblr:before,.ion-social-tux:before,.ion-social-twitch-outline:before,.ion-social-twitch:before,.ion-social-twitter-outline:before,.ion-social-twitter:before,.ion-social-usd-outline:before,.ion-social-usd:before,.ion-social-vimeo-outline:before,.ion-social-vimeo:before,.ion-social-whatsapp-outline:before,.ion-social-whatsapp:before,.ion-social-windows-outline:before,.ion-social-windows:before,.ion-social-wordpress-outline:before,.ion-social-wordpress:before,.ion-social-yahoo-outline:before,.ion-social-yahoo:before,.ion-social-yen-outline:before,.ion-social-yen:before,.ion-social-youtube-outline:before,.ion-social-youtube:before,.ion-soup-can-outline:before,.ion-soup-can:before,.ion-speakerphone:before,.ion-speedometer:before,.ion-spoon:before,.ion-star:before,.ion-stats-bars:before,.ion-steam:before,.ion-stop:before,.ion-thermometer:before,.ion-thumbsdown:before,.ion-thumbsup:before,.ion-toggle-filled:before,.ion-toggle:before,.ion-transgender:before,.ion-trash-a:before,.ion-trash-b:before,.ion-trophy:before,.ion-tshirt-outline:before,.ion-tshirt:before,.ion-umbrella:before,.ion-university:before,.ion-unlocked:before,.ion-upload:before,.ion-usb:before,.ion-videocamera:before,.ion-volume-high:before,.ion-volume-low:before,.ion-volume-medium:before,.ion-volume-mute:before,.ion-wand:before,.ion-waterdrop:before,.ion-wifi:before,.ion-wineglass:before,.ion-woman:before,.ion-wrench:before,.ion-xbox:before,.ionicons{display:inline-block;font-family:Ionicons;speak:none;font-style:normal;font-weight:400;font-variant:normal;text-transform:none;text-rendering:auto;line-height:1;-webkit-font-smoothing:antialiased;-moz-osx-font-smoothing:grayscale}.ion-alert:before{content:""}.ion-alert-circled:before{content:""}.ion-android-add:before{content:""}.ion-android-add-circle:before{content:""}.ion-android-alarm-clock:before{content:""}.ion-android-alert:before{content:""}.ion-android-apps:before{content:""}.ion-android-archive:before{content:""}.ion-android-arrow-back:before{content:""}.ion-android-arrow-down:before{content:""}.ion-android-arrow-dropdown:before{content:""}.ion-android-arrow-dropdown-circle:before{content:""}.ion-android-arrow-dropleft:before{content:""}.ion-android-arrow-dropleft-circle:before{content:""}.ion-android-arrow-dropright:before{content:""}.ion-android-arrow-dropright-circle:before{content:""}.ion-android-arrow-dropup:before{content:""}.ion-android-arrow-dropup-circle:before{content:""}.ion-android-arrow-forward:before{content:""}.ion-android-arrow-up:before{content:""}.ion-android-attach:before{content:""}.ion-android-bar:before{content:""}.ion-android-bicycle:before{content:""}.ion-android-boat:before{content:""}.ion-android-bookmark:before{content:""}.ion-android-bulb:before{content:""}.ion-android-bus:before{content:""}.ion-android-calendar:before{content:""}.ion-android-call:before{content:""}.ion-android-camera:before{content:""}.ion-android-cancel:before{content:""}.ion-android-car:before{content:""}.ion-android-cart:before{content:""}.ion-android-chat:before{content:""}.ion-android-checkbox:before{content:""}.ion-android-checkbox-blank:before{content:""}.ion-android-checkbox-outline:before{content:""}.ion-android-checkbox-outline-blank:before{content:""}.ion-android-checkmark-circle:before{content:""}.ion-android-clipboard:before{content:""}.ion-android-close:before{content:""}.ion-android-cloud:before{content:""}.ion-android-cloud-circle:before{content:""}.ion-android-cloud-done:before{content:""}.ion-android-cloud-outline:before{content:""}.ion-android-color-palette:before{content:""}.ion-android-compass:before{content:""}.ion-android-contact:before{content:""}.ion-android-contacts:before{content:""}.ion-android-contract:before{content:""}.ion-android-create:before{content:""}.ion-android-delete:before{content:""}.ion-android-desktop:before{content:""}.ion-android-document:before{content:""}.ion-android-done:before{content:""}.ion-android-done-all:before{content:""}.ion-android-download:before{content:""}.ion-android-drafts:before{content:""}.ion-android-exit:before{content:""}.ion-android-expand:before{content:""}.ion-android-favorite:before{content:""}.ion-android-favorite-outline:before{content:""}.ion-android-film:before{content:""}.ion-android-folder:before{content:""}.ion-android-folder-open:before{content:""}.ion-android-funnel:before{content:""}.ion-android-globe:before{content:""}.ion-android-hand:before{content:""}.ion-android-hangout:before{content:""}.ion-android-happy:before{content:""}.ion-android-home:before{content:""}.ion-android-image:before{content:""}.ion-android-laptop:before{content:""}.ion-android-list:before{content:""}.ion-android-locate:before{content:""}.ion-android-lock:before{content:""}.ion-android-mail:before{content:""}.ion-android-map:before{content:""}.ion-android-menu:before{content:""}.ion-android-microphone:before{content:""}.ion-android-microphone-off:before{content:""}.ion-android-more-horizontal:before{content:""}.ion-android-more-vertical:before{content:""}.ion-android-navigate:before{content:""}.ion-android-notifications:before{content:""}.ion-android-notifications-none:before{content:""}.ion-android-notifications-off:before{content:""}.ion-android-open:before{content:""}.ion-android-options:before{content:""}.ion-android-people:before{content:""}.ion-android-person:before{content:""}.ion-android-person-add:before{content:""}.ion-android-phone-landscape:before{content:""}.ion-android-phone-portrait:before{content:""}.ion-android-pin:before{content:""}.ion-android-plane:before{content:""}.ion-android-playstore:before{content:""}.ion-android-print:before{content:""}.ion-android-radio-button-off:before{content:""}.ion-android-radio-button-on:before{content:""}.ion-android-refresh:before{content:""}.ion-android-remove:before{content:""}.ion-android-remove-circle:before{content:""}.ion-android-restaurant:before{content:""}.ion-android-sad:before{content:""}.ion-android-search:before{content:""}.ion-android-send:before{content:""}.ion-android-settings:before{content:""}.ion-android-share:before{content:""}.ion-android-share-alt:before{content:""}.ion-android-star:before{content:""}.ion-android-star-half:before{content:""}.ion-android-star-outline:before{content:""}.ion-android-stopwatch:before{content:""}.ion-android-subway:before{content:""}.ion-android-sunny:before{content:""}.ion-android-sync:before{content:""}.ion-android-textsms:before{content:""}.ion-android-time:before{content:""}.ion-android-train:before{content:""}.ion-android-unlock:before{content:""}.ion-android-upload:before{content:""}.ion-android-volume-down:before{content:""}.ion-android-volume-mute:before{content:""}.ion-android-volume-off:before{content:""}.ion-android-volume-up:before{content:""}.ion-android-walk:before{content:""}.ion-android-warning:before{content:""}.ion-android-watch:before{content:""}.ion-android-wifi:before{content:""}.ion-aperture:before{content:""}.ion-archive:before{content:""}.ion-arrow-down-a:before{content:""}.ion-arrow-down-b:before{content:""}.ion-arrow-down-c:before{content:""}.ion-arrow-expand:before{content:""}.ion-arrow-graph-down-left:before{content:""}.ion-arrow-graph-down-right:before{content:""}.ion-arrow-graph-up-left:before{content:""}.ion-arrow-graph-up-right:before{content:""}.ion-arrow-left-a:before{content:""}.ion-arrow-left-b:before{content:""}.ion-arrow-left-c:before{content:""}.ion-arrow-move:before{content:""}.ion-arrow-resize:before{content:""}.ion-arrow-return-left:before{content:""}.ion-arrow-return-right:before{content:""}.ion-arrow-right-a:before{content:""}.ion-arrow-right-b:before{content:""}.ion-arrow-right-c:before{content:""}.ion-arrow-shrink:before{content:""}.ion-arrow-swap:before{content:""}.ion-arrow-up-a:before{content:""}.ion-arrow-up-b:before{content:""}.ion-arrow-up-c:before{content:""}.ion-asterisk:before{content:""}.ion-at:before{content:""}.ion-backspace:before{content:""}.ion-backspace-outline:before{content:""}.ion-bag:before{content:""}.ion-battery-charging:before{content:""}.ion-battery-empty:before{content:""}.ion-battery-full:before{content:""}.ion-battery-half:before{content:""}.ion-battery-low:before{content:""}.ion-beaker:before{content:""}.ion-beer:before{content:""}.ion-bluetooth:before{content:""}.ion-bonfire:before{content:""}.ion-bookmark:before{content:""}.ion-bowtie:before{content:""}.ion-briefcase:before{content:""}.ion-bug:before{content:""}.ion-calculator:before{content:""}.ion-calendar:before{content:""}.ion-camera:before{content:""}.ion-card:before{content:""}.ion-cash:before{content:""}.ion-chatbox:before{content:""}.ion-chatbox-working:before{content:""}.ion-chatboxes:before{content:""}.ion-chatbubble:before{content:""}.ion-chatbubble-working:before{content:""}.ion-chatbubbles:before{content:""}.ion-checkmark:before{content:""}.ion-checkmark-circled:before{content:""}.ion-checkmark-round:before{content:""}.ion-chevron-down:before{content:""}.ion-chevron-left:before{content:""}.ion-chevron-right:before{content:""}.ion-chevron-up:before{content:""}.ion-clipboard:before{content:""}.ion-clock:before{content:""}.ion-close:before{content:""}.ion-close-circled:before{content:""}.ion-close-round:before{content:""}.ion-closed-captioning:before{content:""}.ion-cloud:before{content:""}.ion-code:before{content:""}.ion-code-download:before{content:""}.ion-code-working:before{content:""}.ion-coffee:before{content:""}.ion-compass:before{content:""}.ion-compose:before{content:""}.ion-connection-bars:before{content:""}.ion-contrast:before{content:""}.ion-crop:before{content:""}.ion-cube:before{content:""}.ion-disc:before{content:""}.ion-document:before{content:""}.ion-document-text:before{content:""}.ion-drag:before{content:""}.ion-earth:before{content:""}.ion-easel:before{content:""}.ion-edit:before{content:""}.ion-egg:before{content:""}.ion-eject:before{content:""}.ion-email:before{content:""}.ion-email-unread:before{content:""}.ion-erlenmeyer-flask:before{content:""}.ion-erlenmeyer-flask-bubbles:before{content:""}.ion-eye:before{content:""}.ion-eye-disabled:before{content:""}.ion-female:before{content:""}.ion-filing:before{content:""}.ion-film-marker:before{content:""}.ion-fireball:before{content:""}.ion-flag:before{content:""}.ion-flame:before{content:""}.ion-flash:before{content:""}.ion-flash-off:before{content:""}.ion-folder:before{content:""}.ion-fork:before{content:""}.ion-fork-repo:before{content:""}.ion-forward:before{content:""}.ion-funnel:before{content:""}.ion-gear-a:before{content:""}.ion-gear-b:before{content:""}.ion-grid:before{content:""}.ion-hammer:before{content:""}.ion-happy:before{content:""}.ion-happy-outline:before{content:""}.ion-headphone:before{content:""}.ion-heart:before{content:""}.ion-heart-broken:before{content:""}.ion-help:before{content:""}.ion-help-buoy:before{content:""}.ion-help-circled:before{content:""}.ion-home:before{content:""}.ion-icecream:before{content:""}.ion-image:before{content:""}.ion-images:before{content:""}.ion-information:before{content:""}.ion-information-circled:before{content:""}.ion-ionic:before{content:""}.ion-ios-alarm:before{content:""}.ion-ios-alarm-outline:before{content:""}.ion-ios-albums:before{content:""}.ion-ios-albums-outline:before{content:""}.ion-ios-americanfootball:before{content:""}.ion-ios-americanfootball-outline:before{content:""}.ion-ios-analytics:before{content:""}.ion-ios-analytics-outline:before{content:""}.ion-ios-arrow-back:before{content:""}.ion-ios-arrow-down:before{content:""}.ion-ios-arrow-forward:before{content:""}.ion-ios-arrow-left:before{content:""}.ion-ios-arrow-right:before{content:""}.ion-ios-arrow-thin-down:before{content:""}.ion-ios-arrow-thin-left:before{content:""}.ion-ios-arrow-thin-right:before{content:""}.ion-ios-arrow-thin-up:before{content:""}.ion-ios-arrow-up:before{content:""}.ion-ios-at:before{content:""}.ion-ios-at-outline:before{content:""}.ion-ios-barcode:before{content:""}.ion-ios-barcode-outline:before{content:""}.ion-ios-baseball:before{content:""}.ion-ios-baseball-outline:before{content:""}.ion-ios-basketball:before{content:""}.ion-ios-basketball-outline:before{content:""}.ion-ios-bell:before{content:""}.ion-ios-bell-outline:before{content:""}.ion-ios-body:before{content:""}.ion-ios-body-outline:before{content:""}.ion-ios-bolt:before{content:""}.ion-ios-bolt-outline:before{content:""}.ion-ios-book:before{content:""}.ion-ios-book-outline:before{content:""}.ion-ios-bookmarks:before{content:""}.ion-ios-bookmarks-outline:before{content:""}.ion-ios-box:before{content:""}.ion-ios-box-outline:before{content:""}.ion-ios-briefcase:before{content:""}.ion-ios-briefcase-outline:before{content:""}.ion-ios-browsers:before{content:""}.ion-ios-browsers-outline:before{content:""}.ion-ios-calculator:before{content:""}.ion-ios-calculator-outline:before{content:""}.ion-ios-calendar:before{content:""}.ion-ios-calendar-outline:before{content:""}.ion-ios-camera:before{content:""}.ion-ios-camera-outline:before{content:""}.ion-ios-cart:before{content:""}.ion-ios-cart-outline:before{content:""}.ion-ios-chatboxes:before{content:""}.ion-ios-chatboxes-outline:before{content:""}.ion-ios-chatbubble:before{content:""}.ion-ios-chatbubble-outline:before{content:""}.ion-ios-checkmark:before{content:""}.ion-ios-checkmark-empty:before{content:""}.ion-ios-checkmark-outline:before{content:""}.ion-ios-circle-filled:before{content:""}.ion-ios-circle-outline:before{content:""}.ion-ios-clock:before{content:""}.ion-ios-clock-outline:before{content:""}.ion-ios-close:before{content:""}.ion-ios-close-empty:before{content:""}.ion-ios-close-outline:before{content:""}.ion-ios-cloud:before{content:""}.ion-ios-cloud-download:before{content:""}.ion-ios-cloud-download-outline:before{content:""}.ion-ios-cloud-outline:before{content:""}.ion-ios-cloud-upload:before{content:""}.ion-ios-cloud-upload-outline:before{content:""}.ion-ios-cloudy:before{content:""}.ion-ios-cloudy-night:before{content:""}.ion-ios-cloudy-night-outline:before{content:""}.ion-ios-cloudy-outline:before{content:""}.ion-ios-cog:before{content:""}.ion-ios-cog-outline:before{content:""}.ion-ios-color-filter:before{content:""}.ion-ios-color-filter-outline:before{content:""}.ion-ios-color-wand:before{content:""}.ion-ios-color-wand-outline:before{content:""}.ion-ios-compose:before{content:""}.ion-ios-compose-outline:before{content:""}.ion-ios-contact:before{content:""}.ion-ios-contact-outline:before{content:""}.ion-ios-copy:before{content:""}.ion-ios-copy-outline:before{content:""}.ion-ios-crop:before{content:""}.ion-ios-crop-strong:before{content:""}.ion-ios-download:before{content:""}.ion-ios-download-outline:before{content:""}.ion-ios-drag:before{content:""}.ion-ios-email:before{content:""}.ion-ios-email-outline:before{content:""}.ion-ios-eye:before{content:""}.ion-ios-eye-outline:before{content:""}.ion-ios-fastforward:before{content:""}.ion-ios-fastforward-outline:before{content:""}.ion-ios-filing:before{content:""}.ion-ios-filing-outline:before{content:""}.ion-ios-film:before{content:""}.ion-ios-film-outline:before{content:""}.ion-ios-flag:before{content:""}.ion-ios-flag-outline:before{content:""}.ion-ios-flame:before{content:""}.ion-ios-flame-outline:before{content:""}.ion-ios-flask:before{content:""}.ion-ios-flask-outline:before{content:""}.ion-ios-flower:before{content:""}.ion-ios-flower-outline:before{content:""}.ion-ios-folder:before{content:""}.ion-ios-folder-outline:before{content:""}.ion-ios-football:before{content:""}.ion-ios-football-outline:before{content:""}.ion-ios-game-controller-a:before{content:""}.ion-ios-game-controller-a-outline:before{content:""}.ion-ios-game-controller-b:before{content:""}.ion-ios-game-controller-b-outline:before{content:""}.ion-ios-gear:before{content:""}.ion-ios-gear-outline:before{content:""}.ion-ios-glasses:before{content:""}.ion-ios-glasses-outline:before{content:""}.ion-ios-grid-view:before{content:""}.ion-ios-grid-view-outline:before{content:""}.ion-ios-heart:before{content:""}.ion-ios-heart-outline:before{content:""}.ion-ios-help:before{content:""}.ion-ios-help-empty:before{content:""}.ion-ios-help-outline:before{content:""}.ion-ios-home:before{content:""}.ion-ios-home-outline:before{content:""}.ion-ios-infinite:before{content:""}.ion-ios-infinite-outline:before{content:""}.ion-ios-information:before{content:""}.ion-ios-information-empty:before{content:""}.ion-ios-information-outline:before{content:""}.ion-ios-ionic-outline:before{content:""}.ion-ios-keypad:before{content:""}.ion-ios-keypad-outline:before{content:""}.ion-ios-lightbulb:before{content:""}.ion-ios-lightbulb-outline:before{content:""}.ion-ios-list:before{content:""}.ion-ios-list-outline:before{content:""}.ion-ios-location:before{content:""}.ion-ios-location-outline:before{content:""}.ion-ios-locked:before{content:""}.ion-ios-locked-outline:before{content:""}.ion-ios-loop:before{content:""}.ion-ios-loop-strong:before{content:""}.ion-ios-medical:before{content:""}.ion-ios-medical-outline:before{content:""}.ion-ios-medkit:before{content:""}.ion-ios-medkit-outline:before{content:""}.ion-ios-mic:before{content:""}.ion-ios-mic-off:before{content:""}.ion-ios-mic-outline:before{content:""}.ion-ios-minus:before{content:""}.ion-ios-minus-empty:before{content:""}.ion-ios-minus-outline:before{content:""}.ion-ios-monitor:before{content:""}.ion-ios-monitor-outline:before{content:""}.ion-ios-moon:before{content:""}.ion-ios-moon-outline:before{content:""}.ion-ios-more:before{content:""}.ion-ios-more-outline:before{content:""}.ion-ios-musical-note:before{content:""}.ion-ios-musical-notes:before{content:""}.ion-ios-navigate:before{content:""}.ion-ios-navigate-outline:before{content:""}.ion-ios-nutrition:before{content:""}.ion-ios-nutrition-outline:before{content:""}.ion-ios-paper:before{content:""}.ion-ios-paper-outline:before{content:""}.ion-ios-paperplane:before{content:""}.ion-ios-paperplane-outline:before{content:""}.ion-ios-partlysunny:before{content:""}.ion-ios-partlysunny-outline:before{content:""}.ion-ios-pause:before{content:""}.ion-ios-pause-outline:before{content:""}.ion-ios-paw:before{content:""}.ion-ios-paw-outline:before{content:""}.ion-ios-people:before{content:""}.ion-ios-people-outline:before{content:""}.ion-ios-person:before{content:""}.ion-ios-person-outline:before{content:""}.ion-ios-personadd:before{content:""}.ion-ios-personadd-outline:before{content:""}.ion-ios-photos:before{content:""}.ion-ios-photos-outline:before{content:""}.ion-ios-pie:before{content:""}.ion-ios-pie-outline:before{content:""}.ion-ios-pint:before{content:""}.ion-ios-pint-outline:before{content:""}.ion-ios-play:before{content:""}.ion-ios-play-outline:before{content:""}.ion-ios-plus:before{content:""}.ion-ios-plus-empty:before{content:""}.ion-ios-plus-outline:before{content:""}.ion-ios-pricetag:before{content:""}.ion-ios-pricetag-outline:before{content:""}.ion-ios-pricetags:before{content:""}.ion-ios-pricetags-outline:before{content:""}.ion-ios-printer:before{content:""}.ion-ios-printer-outline:before{content:""}.ion-ios-pulse:before{content:""}.ion-ios-pulse-strong:before{content:""}.ion-ios-rainy:before{content:""}.ion-ios-rainy-outline:before{content:""}.ion-ios-recording:before{content:""}.ion-ios-recording-outline:before{content:""}.ion-ios-redo:before{content:""}.ion-ios-redo-outline:before{content:""}.ion-ios-refresh:before{content:""}.ion-ios-refresh-empty:before{content:""}.ion-ios-refresh-outline:before{content:""}.ion-ios-reload:before{content:""}.ion-ios-reverse-camera:before{content:""}.ion-ios-reverse-camera-outline:before{content:""}.ion-ios-rewind:before{content:""}.ion-ios-rewind-outline:before{content:""}.ion-ios-rose:before{content:""}.ion-ios-rose-outline:before{content:""}.ion-ios-search:before{content:""}.ion-ios-search-strong:before{content:""}.ion-ios-settings:before{content:""}.ion-ios-settings-strong:before{content:""}.ion-ios-shuffle:before{content:""}.ion-ios-shuffle-strong:before{content:""}.ion-ios-skipbackward:before{content:""}.ion-ios-skipbackward-outline:before{content:""}.ion-ios-skipforward:before{content:""}.ion-ios-skipforward-outline:before{content:""}.ion-ios-snowy:before{content:""}.ion-ios-speedometer:before{content:""}.ion-ios-speedometer-outline:before{content:""}.ion-ios-star:before{content:""}.ion-ios-star-half:before{content:""}.ion-ios-star-outline:before{content:""}.ion-ios-stopwatch:before{content:""}.ion-ios-stopwatch-outline:before{content:""}.ion-ios-sunny:before{content:""}.ion-ios-sunny-outline:before{content:""}.ion-ios-telephone:before{content:""}.ion-ios-telephone-outline:before{content:""}.ion-ios-tennisball:before{content:""}.ion-ios-tennisball-outline:before{content:""}.ion-ios-thunderstorm:before{content:""}.ion-ios-thunderstorm-outline:before{content:""}.ion-ios-time:before{content:""}.ion-ios-time-outline:before{content:""}.ion-ios-timer:before{content:""}.ion-ios-timer-outline:before{content:""}.ion-ios-toggle:before{content:""}.ion-ios-toggle-outline:before{content:""}.ion-ios-trash:before{content:""}.ion-ios-trash-outline:before{content:""}.ion-ios-undo:before{content:""}.ion-ios-undo-outline:before{content:""}.ion-ios-unlocked:before{content:""}.ion-ios-unlocked-outline:before{content:""}.ion-ios-upload:before{content:""}.ion-ios-upload-outline:before{content:""}.ion-ios-videocam:before{content:""}.ion-ios-videocam-outline:before{content:""}.ion-ios-volume-high:before{content:""}.ion-ios-volume-low:before{content:""}.ion-ios-wineglass:before{content:""}.ion-ios-wineglass-outline:before{content:""}.ion-ios-world:before{content:""}.ion-ios-world-outline:before{content:""}.ion-ipad:before{content:""}.ion-iphone:before{content:""}.ion-ipod:before{content:""}.ion-jet:before{content:""}.ion-key:before{content:""}.ion-knife:before{content:""}.ion-laptop:before{content:""}.ion-leaf:before{content:""}.ion-levels:before{content:""}.ion-lightbulb:before{content:""}.ion-link:before{content:""}.ion-load-a:before{content:""}.ion-load-b:before{content:""}.ion-load-c:before{content:""}.ion-load-d:before{content:""}.ion-location:before{content:""}.ion-lock-combination:before{content:""}.ion-locked:before{content:""}.ion-log-in:before{content:""}.ion-log-out:before{content:""}.ion-loop:before{content:""}.ion-magnet:before{content:""}.ion-male:before{content:""}.ion-man:before{content:""}.ion-map:before{content:""}.ion-medkit:before{content:""}.ion-merge:before{content:""}.ion-mic-a:before{content:""}.ion-mic-b:before{content:""}.ion-mic-c:before{content:""}.ion-minus:before{content:""}.ion-minus-circled:before{content:""}.ion-minus-round:before{content:""}.ion-model-s:before{content:""}.ion-monitor:before{content:""}.ion-more:before{content:""}.ion-mouse:before{content:""}.ion-music-note:before{content:""}.ion-navicon:before{content:""}.ion-navicon-round:before{content:""}.ion-navigate:before{content:""}.ion-network:before{content:""}.ion-no-smoking:before{content:""}.ion-nuclear:before{content:""}.ion-outlet:before{content:""}.ion-paintbrush:before{content:""}.ion-paintbucket:before{content:""}.ion-paper-airplane:before{content:""}.ion-paperclip:before{content:""}.ion-pause:before{content:""}.ion-person:before{content:""}.ion-person-add:before{content:""}.ion-person-stalker:before{content:""}.ion-pie-graph:before{content:""}.ion-pin:before{content:""}.ion-pinpoint:before{content:""}.ion-pizza:before{content:""}.ion-plane:before{content:""}.ion-planet:before{content:""}.ion-play:before{content:""}.ion-playstation:before{content:""}.ion-plus:before{content:""}.ion-plus-circled:before{content:""}.ion-plus-round:before{content:""}.ion-podium:before{content:""}.ion-pound:before{content:""}.ion-power:before{content:""}.ion-pricetag:before{content:""}.ion-pricetags:before{content:""}.ion-printer:before{content:""}.ion-pull-request:before{content:""}.ion-qr-scanner:before{content:""}.ion-quote:before{content:""}.ion-radio-waves:before{content:""}.ion-record:before{content:""}.ion-refresh:before{content:""}.ion-reply:before{content:""}.ion-reply-all:before{content:""}.ion-ribbon-a:before{content:""}.ion-ribbon-b:before{content:""}.ion-sad:before{content:""}.ion-sad-outline:before{content:""}.ion-scissors:before{content:""}.ion-search:before{content:""}.ion-settings:before{content:""}.ion-share:before{content:""}.ion-shuffle:before{content:""}.ion-skip-backward:before{content:""}.ion-skip-forward:before{content:""}.ion-social-android:before{content:""}.ion-social-android-outline:before{content:""}.ion-social-angular:before{content:""}.ion-social-angular-outline:before{content:""}.ion-social-apple:before{content:""}.ion-social-apple-outline:before{content:""}.ion-social-bitcoin:before{content:""}.ion-social-bitcoin-outline:before{content:""}.ion-social-buffer:before{content:""}.ion-social-buffer-outline:before{content:""}.ion-social-chrome:before{content:""}.ion-social-chrome-outline:before{content:""}.ion-social-codepen:before{content:""}.ion-social-codepen-outline:before{content:""}.ion-social-css3:before{content:""}.ion-social-css3-outline:before{content:""}.ion-social-designernews:before{content:""}.ion-social-designernews-outline:before{content:""}.ion-social-dribbble:before{content:""}.ion-social-dribbble-outline:before{content:""}.ion-social-dropbox:before{content:""}.ion-social-dropbox-outline:before{content:""}.ion-social-euro:before{content:""}.ion-social-euro-outline:before{content:""}.ion-social-facebook:before{content:""}.ion-social-facebook-outline:before{content:""}.ion-social-foursquare:before{content:""}.ion-social-foursquare-outline:before{content:""}.ion-social-freebsd-devil:before{content:""}.ion-social-github:before{content:""}.ion-social-github-outline:before{content:""}.ion-social-google:before{content:""}.ion-social-google-outline:before{content:""}.ion-social-googleplus:before{content:""}.ion-social-googleplus-outline:before{content:""}.ion-social-hackernews:before{content:""}.ion-social-hackernews-outline:before{content:""}.ion-social-html5:before{content:""}.ion-social-html5-outline:before{content:""}.ion-social-instagram:before{content:""}.ion-social-instagram-outline:before{content:""}.ion-social-javascript:before{content:""}.ion-social-javascript-outline:before{content:""}.ion-social-linkedin:before{content:""}.ion-social-linkedin-outline:before{content:""}.ion-social-markdown:before{content:""}.ion-social-nodejs:before{content:""}.ion-social-octocat:before{content:""}.ion-social-pinterest:before{content:""}.ion-social-pinterest-outline:before{content:""}.ion-social-python:before{content:""}.ion-social-reddit:before{content:""}.ion-social-reddit-outline:before{content:""}.ion-social-rss:before{content:""}.ion-social-rss-outline:before{content:""}.ion-social-sass:before{content:""}.ion-social-skype:before{content:""}.ion-social-skype-outline:before{content:""}.ion-social-snapchat:before{content:""}.ion-social-snapchat-outline:before{content:""}.ion-social-tumblr:before{content:""}.ion-social-tumblr-outline:before{content:""}.ion-social-tux:before{content:""}.ion-social-twitch:before{content:""}.ion-social-twitch-outline:before{content:""}.ion-social-twitter:before{content:""}.ion-social-twitter-outline:before{content:""}.ion-social-usd:before{content:""}.ion-social-usd-outline:before{content:""}.ion-social-vimeo:before{content:""}.ion-social-vimeo-outline:before{content:""}.ion-social-whatsapp:before{content:""}.ion-social-whatsapp-outline:before{content:""}.ion-social-windows:before{content:""}.ion-social-windows-outline:before{content:""}.ion-social-wordpress:before{content:""}.ion-social-wordpress-outline:before{content:""}.ion-social-yahoo:before{content:""}.ion-social-yahoo-outline:before{content:""}.ion-social-yen:before{content:""}.ion-social-yen-outline:before{content:""}.ion-social-youtube:before{content:""}.ion-social-youtube-outline:before{content:""}.ion-soup-can:before{content:""}.ion-soup-can-outline:before{content:""}.ion-speakerphone:before{content:""}.ion-speedometer:before{content:""}.ion-spoon:before{content:""}.ion-star:before{content:""}.ion-stats-bars:before{content:""}.ion-steam:before{content:""}.ion-stop:before{content:""}.ion-thermometer:before{content:""}.ion-thumbsdown:before{content:""}.ion-thumbsup:before{content:""}.ion-toggle:before{content:""}.ion-toggle-filled:before{content:""}.ion-transgender:before{content:""}.ion-trash-a:before{content:""}.ion-trash-b:before{content:""}.ion-trophy:before{content:""}.ion-tshirt:before{content:""}.ion-tshirt-outline:before{content:""}.ion-umbrella:before{content:""}.ion-university:before{content:""}.ion-unlocked:before{content:""}.ion-upload:before{content:""}.ion-usb:before{content:""}.ion-videocamera:before{content:""}.ion-volume-high:before{content:""}.ion-volume-low:before{content:""}.ion-volume-medium:before{content:""}.ion-volume-mute:before{content:""}.ion-wand:before{content:""}.ion-waterdrop:before{content:""}.ion-wifi:before{content:""}.ion-wineglass:before{content:""}.ion-woman:before{content:""}.ion-wrench:before{content:""}.ion-xbox:before{content:""}footer,header.title,header h1{text-align:center}footer{font-size:.8em;letter-spacing:-.02em;color:#777;margin:4em 2em 2em}h1,h2,h3,h4,h5,h6{margin-bottom:1em}time{border-bottom:1px dotted #1e282c}.ft-login{padding:1em;margin:2em 0;box-shadow:0 5px 10px #ddd;border:1px solid #dcdcdc}.ft-pagination{text-align:center;color:#2c3b41;font-size:2em;margin:1em 0}.ft-pagination-a{padding:0 .2em}.ft-dash-misc{color:#8aa4af}.ft-nav-container{padding:2rem;margin-top:3em;transition:padding-left .5s ease;transition:margin .5s ease}.ft-nav-container.ft-nav-enabled{padding-left:27rem}.ft-nav-toggle{display:block;z-index:5;left:0}.ft-nav,.ft-nav-toggle{position:fixed;top:0;transition:all .5s ease}.ft-nav{z-index:4;height:100%;width:25rem;left:-25rem;bottom:0;overflow-y:auto}.ft-nav.ft-nav-enabled{left:0}@media (min-width:768px){.ft-nav-toggle{left:-3rem;display:none}.ft-nav-container{padding-left:27rem;margin-top:0}.ft-nav{left:0}}.ft-nav-toggle{background:#1a2226}.ft-nav-toggle a{display:block;padding:.2em .8em;font-family:Lobster,cursive;font-size:2em}.ft-nav-toggle a,.ft-nav-toggle a:active,.ft-nav-toggle a:hover,.ft-nav-toggle a:visited{color:#fff;text-decoration:none}.ft-nav{background:#1a2226;color:#fff}.ft-nav span.icon{font-size:1.5em;margin-right:.4em}.ft-nav ul{list-style:none}.ft-nav ul,.ft-nav ul li{margin:0;padding:0}.ft-nav ul li a.ft-nav-entry{border-left:.7rem solid transparent;color:#8aa4af;background:#222d32;padding:1rem;display:inline-block;letter-spacing:.05em;text-transform:uppercase;width:100%;margin-right:-6rem}.ft-nav ul li a.ft-nav-entry:hover{border-left:.7rem solid #3c8dbc;color:#fff;background:#1e282c;text-decoration:none}.ft-nav ul li a.ft-nav-quicklink{margin:0;padding:1rem;width:4rem;display:inline-block;color:#8aa4af;background:transparent;text-align:center}.ft-nav ul li a.ft-nav-quicklink:hover{color:#fff;background:#3c8dbc;text-decoration:none}.ft-nav li>ul li a.ft-nav-entry{color:#b8c7ce;background:#2c3b41;padding-left:3em;text-transform:none}.ft-nav li>ul li a.ft-nav-entry:hover{color:#fff;background:#2c3b41}.ft-nav ul li a.ft-nav-active{border-left:5px solid #d33939;color:#fff;background:#1e282c}.ft-nav li>ul li a.ft-nav-active{color:#fff;background:#2c3b41}.ft-nav-title{font-size:2rem;font-weight:700;text-align:center;padding:2rem 0;margin:0}.ft-nav-brand{font-family:Lobster,cursive;font-size:2em;text-shadow:2px 5px 0 #2c3b41}.ft-nav-auth{color:#2c3b41;font-size:.8em;text-align:center;margin:2em 0}.ft-summary-source{font-size:2em}.ft-write-form textarea{resize:vertical;outline:none;overflow:auto;font-family:Courier,Courier New,sans-serif;padding:1em}#ft-publog{position:fixed;right:0;bottom:0;width:42%;min-width:20em;margin:.5em;color:#fff;background:#3c8dbc;border-radius:.5em;box-shadow:0 0 10px #000}#ft-publog button{padding:.2em .4em}#ft-publog-container{margin:1em}#ft-publog-container div{margin:.1em} \ No newline at end of file diff -r 7a1903ede496 -r 2e5c5d33d62c piecrust/admin/static/img/apple-touch-icon.png Binary file piecrust/admin/static/img/apple-touch-icon.png has changed diff -r 7a1903ede496 -r 2e5c5d33d62c piecrust/admin/static/js/foodtruck.min.js --- a/piecrust/admin/static/js/foodtruck.min.js Tue Nov 21 11:00:06 2017 -0800 +++ b/piecrust/admin/static/js/foodtruck.min.js Tue Nov 21 22:07:12 2017 -0800 @@ -1,12010 +1,11974 @@ -/*! - * jQuery JavaScript Library v3.2.1 - * https://jquery.com/ - * - * Includes Sizzle.js - * https://sizzlejs.com/ - * - * Copyright JS Foundation and other contributors - * Released under the MIT license - * https://jquery.org/license - * - * Date: 2017-03-20T18:59Z - */ -( function( global, factory ) { - - "use strict"; - - if ( typeof module === "object" && typeof module.exports === "object" ) { - - // For CommonJS and CommonJS-like environments where a proper `window` - // is present, execute the factory and get jQuery. - // For environments that do not have a `window` with a `document` - // (such as Node.js), expose a factory as module.exports. - // This accentuates the need for the creation of a real `window`. - // e.g. var jQuery = require("jquery")(window); - // See ticket #14549 for more info. - module.exports = global.document ? - factory( global, true ) : - function( w ) { - if ( !w.document ) { - throw new Error( "jQuery requires a window with a document" ); - } - return factory( w ); - }; - } else { - factory( global ); - } - -// Pass this if window is not defined yet -} )( typeof window !== "undefined" ? window : this, function( window, noGlobal ) { - -// Edge <= 12 - 13+, Firefox <=18 - 45+, IE 10 - 11, Safari 5.1 - 9+, iOS 6 - 9.1 -// throw exceptions when non-strict code (e.g., ASP.NET 4.5) accesses strict mode -// arguments.callee.caller (trac-13335). But as of jQuery 3.0 (2016), strict mode should be common -// enough that all such attempts are guarded in a try block. -"use strict"; - -var arr = []; - -var document = window.document; - -var getProto = Object.getPrototypeOf; - -var slice = arr.slice; - -var concat = arr.concat; - -var push = arr.push; - -var indexOf = arr.indexOf; - -var class2type = {}; - -var toString = class2type.toString; - -var hasOwn = class2type.hasOwnProperty; - -var fnToString = hasOwn.toString; - -var ObjectFunctionString = fnToString.call( Object ); - -var support = {}; - - - - function DOMEval( code, doc ) { - doc = doc || document; - - var script = doc.createElement( "script" ); - - script.text = code; - doc.head.appendChild( script ).parentNode.removeChild( script ); - } -/* global Symbol */ -// Defining this global in .eslintrc.json would create a danger of using the global -// unguarded in another place, it seems safer to define global only for this module - - - -var - version = "3.2.1", - - // Define a local copy of jQuery - jQuery = function( selector, context ) { - - // The jQuery object is actually just the init constructor 'enhanced' - // Need init if jQuery is called (just allow error to be thrown if not included) - return new jQuery.fn.init( selector, context ); - }, - - // Support: Android <=4.0 only - // Make sure we trim BOM and NBSP - rtrim = /^[\s\uFEFF\xA0]+|[\s\uFEFF\xA0]+$/g, - - // Matches dashed string for camelizing - rmsPrefix = /^-ms-/, - rdashAlpha = /-([a-z])/g, - - // Used by jQuery.camelCase as callback to replace() - fcamelCase = function( all, letter ) { - return letter.toUpperCase(); - }; - -jQuery.fn = jQuery.prototype = { - - // The current version of jQuery being used - jquery: version, - - constructor: jQuery, - - // The default length of a jQuery object is 0 - length: 0, - - toArray: function() { - return slice.call( this ); - }, - - // Get the Nth element in the matched element set OR - // Get the whole matched element set as a clean array - get: function( num ) { - - // Return all the elements in a clean array - if ( num == null ) { - return slice.call( this ); - } - - // Return just the one element from the set - return num < 0 ? this[ num + this.length ] : this[ num ]; - }, - - // Take an array of elements and push it onto the stack - // (returning the new matched element set) - pushStack: function( elems ) { - - // Build a new jQuery matched element set - var ret = jQuery.merge( this.constructor(), elems ); - - // Add the old object onto the stack (as a reference) - ret.prevObject = this; - - // Return the newly-formed element set - return ret; - }, - - // Execute a callback for every element in the matched set. - each: function( callback ) { - return jQuery.each( this, callback ); - }, - - map: function( callback ) { - return this.pushStack( jQuery.map( this, function( elem, i ) { - return callback.call( elem, i, elem ); - } ) ); - }, - - slice: function() { - return this.pushStack( slice.apply( this, arguments ) ); - }, - - first: function() { - return this.eq( 0 ); - }, - - last: function() { - return this.eq( -1 ); - }, - - eq: function( i ) { - var len = this.length, - j = +i + ( i < 0 ? len : 0 ); - return this.pushStack( j >= 0 && j < len ? [ this[ j ] ] : [] ); - }, - - end: function() { - return this.prevObject || this.constructor(); - }, - - // For internal use only. - // Behaves like an Array's method, not like a jQuery method. - push: push, - sort: arr.sort, - splice: arr.splice -}; - -jQuery.extend = jQuery.fn.extend = function() { - var options, name, src, copy, copyIsArray, clone, - target = arguments[ 0 ] || {}, - i = 1, - length = arguments.length, - deep = false; - - // Handle a deep copy situation - if ( typeof target === "boolean" ) { - deep = target; - - // Skip the boolean and the target - target = arguments[ i ] || {}; - i++; - } - - // Handle case when target is a string or something (possible in deep copy) - if ( typeof target !== "object" && !jQuery.isFunction( target ) ) { - target = {}; - } - - // Extend jQuery itself if only one argument is passed - if ( i === length ) { - target = this; - i--; - } - - for ( ; i < length; i++ ) { - - // Only deal with non-null/undefined values - if ( ( options = arguments[ i ] ) != null ) { - - // Extend the base object - for ( name in options ) { - src = target[ name ]; - copy = options[ name ]; - - // Prevent never-ending loop - if ( target === copy ) { - continue; - } - - // Recurse if we're merging plain objects or arrays - if ( deep && copy && ( jQuery.isPlainObject( copy ) || - ( copyIsArray = Array.isArray( copy ) ) ) ) { - - if ( copyIsArray ) { - copyIsArray = false; - clone = src && Array.isArray( src ) ? src : []; - - } else { - clone = src && jQuery.isPlainObject( src ) ? src : {}; - } - - // Never move original objects, clone them - target[ name ] = jQuery.extend( deep, clone, copy ); - - // Don't bring in undefined values - } else if ( copy !== undefined ) { - target[ name ] = copy; - } - } - } - } - - // Return the modified object - return target; -}; - -jQuery.extend( { - - // Unique for each copy of jQuery on the page - expando: "jQuery" + ( version + Math.random() ).replace( /\D/g, "" ), - - // Assume jQuery is ready without the ready module - isReady: true, - - error: function( msg ) { - throw new Error( msg ); - }, - - noop: function() {}, - - isFunction: function( obj ) { - return jQuery.type( obj ) === "function"; - }, - - isWindow: function( obj ) { - return obj != null && obj === obj.window; - }, - - isNumeric: function( obj ) { - - // As of jQuery 3.0, isNumeric is limited to - // strings and numbers (primitives or objects) - // that can be coerced to finite numbers (gh-2662) - var type = jQuery.type( obj ); - return ( type === "number" || type === "string" ) && - - // parseFloat NaNs numeric-cast false positives ("") - // ...but misinterprets leading-number strings, particularly hex literals ("0x...") - // subtraction forces infinities to NaN - !isNaN( obj - parseFloat( obj ) ); - }, - - isPlainObject: function( obj ) { - var proto, Ctor; - - // Detect obvious negatives - // Use toString instead of jQuery.type to catch host objects - if ( !obj || toString.call( obj ) !== "[object Object]" ) { - return false; - } - - proto = getProto( obj ); - - // Objects with no prototype (e.g., `Object.create( null )`) are plain - if ( !proto ) { - return true; - } - - // Objects with prototype are plain iff they were constructed by a global Object function - Ctor = hasOwn.call( proto, "constructor" ) && proto.constructor; - return typeof Ctor === "function" && fnToString.call( Ctor ) === ObjectFunctionString; - }, - - isEmptyObject: function( obj ) { - - /* eslint-disable no-unused-vars */ - // See https://github.com/eslint/eslint/issues/6125 - var name; - - for ( name in obj ) { - return false; - } - return true; - }, - - type: function( obj ) { - if ( obj == null ) { - return obj + ""; - } - - // Support: Android <=2.3 only (functionish RegExp) - return typeof obj === "object" || typeof obj === "function" ? - class2type[ toString.call( obj ) ] || "object" : - typeof obj; - }, - - // Evaluates a script in a global context - globalEval: function( code ) { - DOMEval( code ); - }, - - // Convert dashed to camelCase; used by the css and data modules - // Support: IE <=9 - 11, Edge 12 - 13 - // Microsoft forgot to hump their vendor prefix (#9572) - camelCase: function( string ) { - return string.replace( rmsPrefix, "ms-" ).replace( rdashAlpha, fcamelCase ); - }, - - each: function( obj, callback ) { - var length, i = 0; - - if ( isArrayLike( obj ) ) { - length = obj.length; - for ( ; i < length; i++ ) { - if ( callback.call( obj[ i ], i, obj[ i ] ) === false ) { - break; - } - } - } else { - for ( i in obj ) { - if ( callback.call( obj[ i ], i, obj[ i ] ) === false ) { - break; - } - } - } - - return obj; - }, - - // Support: Android <=4.0 only - trim: function( text ) { - return text == null ? - "" : - ( text + "" ).replace( rtrim, "" ); - }, - - // results is for internal usage only - makeArray: function( arr, results ) { - var ret = results || []; - - if ( arr != null ) { - if ( isArrayLike( Object( arr ) ) ) { - jQuery.merge( ret, - typeof arr === "string" ? - [ arr ] : arr - ); - } else { - push.call( ret, arr ); - } - } - - return ret; - }, - - inArray: function( elem, arr, i ) { - return arr == null ? -1 : indexOf.call( arr, elem, i ); - }, - - // Support: Android <=4.0 only, PhantomJS 1 only - // push.apply(_, arraylike) throws on ancient WebKit - merge: function( first, second ) { - var len = +second.length, - j = 0, - i = first.length; - - for ( ; j < len; j++ ) { - first[ i++ ] = second[ j ]; - } - - first.length = i; - - return first; - }, - - grep: function( elems, callback, invert ) { - var callbackInverse, - matches = [], - i = 0, - length = elems.length, - callbackExpect = !invert; - - // Go through the array, only saving the items - // that pass the validator function - for ( ; i < length; i++ ) { - callbackInverse = !callback( elems[ i ], i ); - if ( callbackInverse !== callbackExpect ) { - matches.push( elems[ i ] ); - } - } - - return matches; - }, - - // arg is for internal usage only - map: function( elems, callback, arg ) { - var length, value, - i = 0, - ret = []; - - // Go through the array, translating each of the items to their new values - if ( isArrayLike( elems ) ) { - length = elems.length; - for ( ; i < length; i++ ) { - value = callback( elems[ i ], i, arg ); - - if ( value != null ) { - ret.push( value ); - } - } - - // Go through every key on the object, - } else { - for ( i in elems ) { - value = callback( elems[ i ], i, arg ); - - if ( value != null ) { - ret.push( value ); - } - } - } - - // Flatten any nested arrays - return concat.apply( [], ret ); - }, - - // A global GUID counter for objects - guid: 1, - - // Bind a function to a context, optionally partially applying any - // arguments. - proxy: function( fn, context ) { - var tmp, args, proxy; - - if ( typeof context === "string" ) { - tmp = fn[ context ]; - context = fn; - fn = tmp; - } - - // Quick check to determine if target is callable, in the spec - // this throws a TypeError, but we will just return undefined. - if ( !jQuery.isFunction( fn ) ) { - return undefined; - } - - // Simulated bind - args = slice.call( arguments, 2 ); - proxy = function() { - return fn.apply( context || this, args.concat( slice.call( arguments ) ) ); - }; - - // Set the guid of unique handler to the same of original handler, so it can be removed - proxy.guid = fn.guid = fn.guid || jQuery.guid++; - - return proxy; - }, - - now: Date.now, - - // jQuery.support is not used in Core but other projects attach their - // properties to it so it needs to exist. - support: support -} ); - -if ( typeof Symbol === "function" ) { - jQuery.fn[ Symbol.iterator ] = arr[ Symbol.iterator ]; -} - -// Populate the class2type map -jQuery.each( "Boolean Number String Function Array Date RegExp Object Error Symbol".split( " " ), -function( i, name ) { - class2type[ "[object " + name + "]" ] = name.toLowerCase(); -} ); - -function isArrayLike( obj ) { - - // Support: real iOS 8.2 only (not reproducible in simulator) - // `in` check used to prevent JIT error (gh-2145) - // hasOwn isn't used here due to false negatives - // regarding Nodelist length in IE - var length = !!obj && "length" in obj && obj.length, - type = jQuery.type( obj ); - - if ( type === "function" || jQuery.isWindow( obj ) ) { - return false; - } - - return type === "array" || length === 0 || - typeof length === "number" && length > 0 && ( length - 1 ) in obj; -} -var Sizzle = -/*! - * Sizzle CSS Selector Engine v2.3.3 - * https://sizzlejs.com/ - * - * Copyright jQuery Foundation and other contributors - * Released under the MIT license - * http://jquery.org/license - * - * Date: 2016-08-08 - */ -(function( window ) { - -var i, - support, - Expr, - getText, - isXML, - tokenize, - compile, - select, - outermostContext, - sortInput, - hasDuplicate, - - // Local document vars - setDocument, - document, - docElem, - documentIsHTML, - rbuggyQSA, - rbuggyMatches, - matches, - contains, - - // Instance-specific data - expando = "sizzle" + 1 * new Date(), - preferredDoc = window.document, - dirruns = 0, - done = 0, - classCache = createCache(), - tokenCache = createCache(), - compilerCache = createCache(), - sortOrder = function( a, b ) { - if ( a === b ) { - hasDuplicate = true; - } - return 0; - }, - - // Instance methods - hasOwn = ({}).hasOwnProperty, - arr = [], - pop = arr.pop, - push_native = arr.push, - push = arr.push, - slice = arr.slice, - // Use a stripped-down indexOf as it's faster than native - // https://jsperf.com/thor-indexof-vs-for/5 - indexOf = function( list, elem ) { - var i = 0, - len = list.length; - for ( ; i < len; i++ ) { - if ( list[i] === elem ) { - return i; - } - } - return -1; - }, - - booleans = "checked|selected|async|autofocus|autoplay|controls|defer|disabled|hidden|ismap|loop|multiple|open|readonly|required|scoped", - - // Regular expressions - - // http://www.w3.org/TR/css3-selectors/#whitespace - whitespace = "[\\x20\\t\\r\\n\\f]", - - // http://www.w3.org/TR/CSS21/syndata.html#value-def-identifier - identifier = "(?:\\\\.|[\\w-]|[^\0-\\xa0])+", - - // Attribute selectors: http://www.w3.org/TR/selectors/#attribute-selectors - attributes = "\\[" + whitespace + "*(" + identifier + ")(?:" + whitespace + - // Operator (capture 2) - "*([*^$|!~]?=)" + whitespace + - // "Attribute values must be CSS identifiers [capture 5] or strings [capture 3 or capture 4]" - "*(?:'((?:\\\\.|[^\\\\'])*)'|\"((?:\\\\.|[^\\\\\"])*)\"|(" + identifier + "))|)" + whitespace + - "*\\]", - - pseudos = ":(" + identifier + ")(?:\\((" + - // To reduce the number of selectors needing tokenize in the preFilter, prefer arguments: - // 1. quoted (capture 3; capture 4 or capture 5) - "('((?:\\\\.|[^\\\\'])*)'|\"((?:\\\\.|[^\\\\\"])*)\")|" + - // 2. simple (capture 6) - "((?:\\\\.|[^\\\\()[\\]]|" + attributes + ")*)|" + - // 3. anything else (capture 2) - ".*" + - ")\\)|)", - - // Leading and non-escaped trailing whitespace, capturing some non-whitespace characters preceding the latter - rwhitespace = new RegExp( whitespace + "+", "g" ), - rtrim = new RegExp( "^" + whitespace + "+|((?:^|[^\\\\])(?:\\\\.)*)" + whitespace + "+$", "g" ), - - rcomma = new RegExp( "^" + whitespace + "*," + whitespace + "*" ), - rcombinators = new RegExp( "^" + whitespace + "*([>+~]|" + whitespace + ")" + whitespace + "*" ), - - rattributeQuotes = new RegExp( "=" + whitespace + "*([^\\]'\"]*?)" + whitespace + "*\\]", "g" ), - - rpseudo = new RegExp( pseudos ), - ridentifier = new RegExp( "^" + identifier + "$" ), - - matchExpr = { - "ID": new RegExp( "^#(" + identifier + ")" ), - "CLASS": new RegExp( "^\\.(" + identifier + ")" ), - "TAG": new RegExp( "^(" + identifier + "|[*])" ), - "ATTR": new RegExp( "^" + attributes ), - "PSEUDO": new RegExp( "^" + pseudos ), - "CHILD": new RegExp( "^:(only|first|last|nth|nth-last)-(child|of-type)(?:\\(" + whitespace + - "*(even|odd|(([+-]|)(\\d*)n|)" + whitespace + "*(?:([+-]|)" + whitespace + - "*(\\d+)|))" + whitespace + "*\\)|)", "i" ), - "bool": new RegExp( "^(?:" + booleans + ")$", "i" ), - // For use in libraries implementing .is() - // We use this for POS matching in `select` - "needsContext": new RegExp( "^" + whitespace + "*[>+~]|:(even|odd|eq|gt|lt|nth|first|last)(?:\\(" + - whitespace + "*((?:-\\d)?\\d*)" + whitespace + "*\\)|)(?=[^-]|$)", "i" ) - }, - - rinputs = /^(?:input|select|textarea|button)$/i, - rheader = /^h\d$/i, - - rnative = /^[^{]+\{\s*\[native \w/, - - // Easily-parseable/retrievable ID or TAG or CLASS selectors - rquickExpr = /^(?:#([\w-]+)|(\w+)|\.([\w-]+))$/, - - rsibling = /[+~]/, - - // CSS escapes - // http://www.w3.org/TR/CSS21/syndata.html#escaped-characters - runescape = new RegExp( "\\\\([\\da-f]{1,6}" + whitespace + "?|(" + whitespace + ")|.)", "ig" ), - funescape = function( _, escaped, escapedWhitespace ) { - var high = "0x" + escaped - 0x10000; - // NaN means non-codepoint - // Support: Firefox<24 - // Workaround erroneous numeric interpretation of +"0x" - return high !== high || escapedWhitespace ? - escaped : - high < 0 ? - // BMP codepoint - String.fromCharCode( high + 0x10000 ) : - // Supplemental Plane codepoint (surrogate pair) - String.fromCharCode( high >> 10 | 0xD800, high & 0x3FF | 0xDC00 ); - }, - - // CSS string/identifier serialization - // https://drafts.csswg.org/cssom/#common-serializing-idioms - rcssescape = /([\0-\x1f\x7f]|^-?\d)|^-$|[^\0-\x1f\x7f-\uFFFF\w-]/g, - fcssescape = function( ch, asCodePoint ) { - if ( asCodePoint ) { - - // U+0000 NULL becomes U+FFFD REPLACEMENT CHARACTER - if ( ch === "\0" ) { - return "\uFFFD"; - } - - // Control characters and (dependent upon position) numbers get escaped as code points - return ch.slice( 0, -1 ) + "\\" + ch.charCodeAt( ch.length - 1 ).toString( 16 ) + " "; - } - - // Other potentially-special ASCII characters get backslash-escaped - return "\\" + ch; - }, - - // Used for iframes - // See setDocument() - // Removing the function wrapper causes a "Permission Denied" - // error in IE - unloadHandler = function() { - setDocument(); - }, - - disabledAncestor = addCombinator( - function( elem ) { - return elem.disabled === true && ("form" in elem || "label" in elem); - }, - { dir: "parentNode", next: "legend" } - ); - -// Optimize for push.apply( _, NodeList ) -try { - push.apply( - (arr = slice.call( preferredDoc.childNodes )), - preferredDoc.childNodes - ); - // Support: Android<4.0 - // Detect silently failing push.apply - arr[ preferredDoc.childNodes.length ].nodeType; -} catch ( e ) { - push = { apply: arr.length ? - - // Leverage slice if possible - function( target, els ) { - push_native.apply( target, slice.call(els) ); - } : - - // Support: IE<9 - // Otherwise append directly - function( target, els ) { - var j = target.length, - i = 0; - // Can't trust NodeList.length - while ( (target[j++] = els[i++]) ) {} - target.length = j - 1; - } - }; -} - -function Sizzle( selector, context, results, seed ) { - var m, i, elem, nid, match, groups, newSelector, - newContext = context && context.ownerDocument, - - // nodeType defaults to 9, since context defaults to document - nodeType = context ? context.nodeType : 9; - - results = results || []; - - // Return early from calls with invalid selector or context - if ( typeof selector !== "string" || !selector || - nodeType !== 1 && nodeType !== 9 && nodeType !== 11 ) { - - return results; - } - - // Try to shortcut find operations (as opposed to filters) in HTML documents - if ( !seed ) { - - if ( ( context ? context.ownerDocument || context : preferredDoc ) !== document ) { - setDocument( context ); - } - context = context || document; - - if ( documentIsHTML ) { - - // If the selector is sufficiently simple, try using a "get*By*" DOM method - // (excepting DocumentFragment context, where the methods don't exist) - if ( nodeType !== 11 && (match = rquickExpr.exec( selector )) ) { - - // ID selector - if ( (m = match[1]) ) { - - // Document context - if ( nodeType === 9 ) { - if ( (elem = context.getElementById( m )) ) { - - // Support: IE, Opera, Webkit - // TODO: identify versions - // getElementById can match elements by name instead of ID - if ( elem.id === m ) { - results.push( elem ); - return results; - } - } else { - return results; - } - - // Element context - } else { - - // Support: IE, Opera, Webkit - // TODO: identify versions - // getElementById can match elements by name instead of ID - if ( newContext && (elem = newContext.getElementById( m )) && - contains( context, elem ) && - elem.id === m ) { - - results.push( elem ); - return results; - } - } - - // Type selector - } else if ( match[2] ) { - push.apply( results, context.getElementsByTagName( selector ) ); - return results; - - // Class selector - } else if ( (m = match[3]) && support.getElementsByClassName && - context.getElementsByClassName ) { - - push.apply( results, context.getElementsByClassName( m ) ); - return results; - } - } - - // Take advantage of querySelectorAll - if ( support.qsa && - !compilerCache[ selector + " " ] && - (!rbuggyQSA || !rbuggyQSA.test( selector )) ) { - - if ( nodeType !== 1 ) { - newContext = context; - newSelector = selector; - - // qSA looks outside Element context, which is not what we want - // Thanks to Andrew Dupont for this workaround technique - // Support: IE <=8 - // Exclude object elements - } else if ( context.nodeName.toLowerCase() !== "object" ) { - - // Capture the context ID, setting it first if necessary - if ( (nid = context.getAttribute( "id" )) ) { - nid = nid.replace( rcssescape, fcssescape ); - } else { - context.setAttribute( "id", (nid = expando) ); - } - - // Prefix every selector in the list - groups = tokenize( selector ); - i = groups.length; - while ( i-- ) { - groups[i] = "#" + nid + " " + toSelector( groups[i] ); - } - newSelector = groups.join( "," ); - - // Expand context for sibling selectors - newContext = rsibling.test( selector ) && testContext( context.parentNode ) || - context; - } - - if ( newSelector ) { - try { - push.apply( results, - newContext.querySelectorAll( newSelector ) - ); - return results; - } catch ( qsaError ) { - } finally { - if ( nid === expando ) { - context.removeAttribute( "id" ); - } - } - } - } - } - } - - // All others - return select( selector.replace( rtrim, "$1" ), context, results, seed ); -} - -/** - * Create key-value caches of limited size - * @returns {function(string, object)} Returns the Object data after storing it on itself with - * property name the (space-suffixed) string and (if the cache is larger than Expr.cacheLength) - * deleting the oldest entry - */ -function createCache() { - var keys = []; - - function cache( key, value ) { - // Use (key + " ") to avoid collision with native prototype properties (see Issue #157) - if ( keys.push( key + " " ) > Expr.cacheLength ) { - // Only keep the most recent entries - delete cache[ keys.shift() ]; - } - return (cache[ key + " " ] = value); - } - return cache; -} - -/** - * Mark a function for special use by Sizzle - * @param {Function} fn The function to mark - */ -function markFunction( fn ) { - fn[ expando ] = true; - return fn; -} - -/** - * Support testing using an element - * @param {Function} fn Passed the created element and returns a boolean result - */ -function assert( fn ) { - var el = document.createElement("fieldset"); - - try { - return !!fn( el ); - } catch (e) { - return false; - } finally { - // Remove from its parent by default - if ( el.parentNode ) { - el.parentNode.removeChild( el ); - } - // release memory in IE - el = null; - } -} - -/** - * Adds the same handler for all of the specified attrs - * @param {String} attrs Pipe-separated list of attributes - * @param {Function} handler The method that will be applied - */ -function addHandle( attrs, handler ) { - var arr = attrs.split("|"), - i = arr.length; - - while ( i-- ) { - Expr.attrHandle[ arr[i] ] = handler; - } -} - -/** - * Checks document order of two siblings - * @param {Element} a - * @param {Element} b - * @returns {Number} Returns less than 0 if a precedes b, greater than 0 if a follows b - */ -function siblingCheck( a, b ) { - var cur = b && a, - diff = cur && a.nodeType === 1 && b.nodeType === 1 && - a.sourceIndex - b.sourceIndex; - - // Use IE sourceIndex if available on both nodes - if ( diff ) { - return diff; - } - - // Check if b follows a - if ( cur ) { - while ( (cur = cur.nextSibling) ) { - if ( cur === b ) { - return -1; - } - } - } - - return a ? 1 : -1; -} - -/** - * Returns a function to use in pseudos for input types - * @param {String} type - */ -function createInputPseudo( type ) { - return function( elem ) { - var name = elem.nodeName.toLowerCase(); - return name === "input" && elem.type === type; - }; -} - -/** - * Returns a function to use in pseudos for buttons - * @param {String} type - */ -function createButtonPseudo( type ) { - return function( elem ) { - var name = elem.nodeName.toLowerCase(); - return (name === "input" || name === "button") && elem.type === type; - }; -} - -/** - * Returns a function to use in pseudos for :enabled/:disabled - * @param {Boolean} disabled true for :disabled; false for :enabled - */ -function createDisabledPseudo( disabled ) { - - // Known :disabled false positives: fieldset[disabled] > legend:nth-of-type(n+2) :can-disable - return function( elem ) { - - // Only certain elements can match :enabled or :disabled - // https://html.spec.whatwg.org/multipage/scripting.html#selector-enabled - // https://html.spec.whatwg.org/multipage/scripting.html#selector-disabled - if ( "form" in elem ) { - - // Check for inherited disabledness on relevant non-disabled elements: - // * listed form-associated elements in a disabled fieldset - // https://html.spec.whatwg.org/multipage/forms.html#category-listed - // https://html.spec.whatwg.org/multipage/forms.html#concept-fe-disabled - // * option elements in a disabled optgroup - // https://html.spec.whatwg.org/multipage/forms.html#concept-option-disabled - // All such elements have a "form" property. - if ( elem.parentNode && elem.disabled === false ) { - - // Option elements defer to a parent optgroup if present - if ( "label" in elem ) { - if ( "label" in elem.parentNode ) { - return elem.parentNode.disabled === disabled; - } else { - return elem.disabled === disabled; - } - } - - // Support: IE 6 - 11 - // Use the isDisabled shortcut property to check for disabled fieldset ancestors - return elem.isDisabled === disabled || - - // Where there is no isDisabled, check manually - /* jshint -W018 */ - elem.isDisabled !== !disabled && - disabledAncestor( elem ) === disabled; - } - - return elem.disabled === disabled; - - // Try to winnow out elements that can't be disabled before trusting the disabled property. - // Some victims get caught in our net (label, legend, menu, track), but it shouldn't - // even exist on them, let alone have a boolean value. - } else if ( "label" in elem ) { - return elem.disabled === disabled; - } - - // Remaining elements are neither :enabled nor :disabled - return false; - }; -} - -/** - * Returns a function to use in pseudos for positionals - * @param {Function} fn - */ -function createPositionalPseudo( fn ) { - return markFunction(function( argument ) { - argument = +argument; - return markFunction(function( seed, matches ) { - var j, - matchIndexes = fn( [], seed.length, argument ), - i = matchIndexes.length; - - // Match elements found at the specified indexes - while ( i-- ) { - if ( seed[ (j = matchIndexes[i]) ] ) { - seed[j] = !(matches[j] = seed[j]); - } - } - }); - }); -} - -/** - * Checks a node for validity as a Sizzle context - * @param {Element|Object=} context - * @returns {Element|Object|Boolean} The input node if acceptable, otherwise a falsy value - */ -function testContext( context ) { - return context && typeof context.getElementsByTagName !== "undefined" && context; -} - -// Expose support vars for convenience -support = Sizzle.support = {}; - -/** - * Detects XML nodes - * @param {Element|Object} elem An element or a document - * @returns {Boolean} True iff elem is a non-HTML XML node - */ -isXML = Sizzle.isXML = function( elem ) { - // documentElement is verified for cases where it doesn't yet exist - // (such as loading iframes in IE - #4833) - var documentElement = elem && (elem.ownerDocument || elem).documentElement; - return documentElement ? documentElement.nodeName !== "HTML" : false; -}; - -/** - * Sets document-related variables once based on the current document - * @param {Element|Object} [doc] An element or document object to use to set the document - * @returns {Object} Returns the current document - */ -setDocument = Sizzle.setDocument = function( node ) { - var hasCompare, subWindow, - doc = node ? node.ownerDocument || node : preferredDoc; - - // Return early if doc is invalid or already selected - if ( doc === document || doc.nodeType !== 9 || !doc.documentElement ) { - return document; - } - - // Update global variables - document = doc; - docElem = document.documentElement; - documentIsHTML = !isXML( document ); - - // Support: IE 9-11, Edge - // Accessing iframe documents after unload throws "permission denied" errors (jQuery #13936) - if ( preferredDoc !== document && - (subWindow = document.defaultView) && subWindow.top !== subWindow ) { - - // Support: IE 11, Edge - if ( subWindow.addEventListener ) { - subWindow.addEventListener( "unload", unloadHandler, false ); - - // Support: IE 9 - 10 only - } else if ( subWindow.attachEvent ) { - subWindow.attachEvent( "onunload", unloadHandler ); - } - } - - /* Attributes - ---------------------------------------------------------------------- */ - - // Support: IE<8 - // Verify that getAttribute really returns attributes and not properties - // (excepting IE8 booleans) - support.attributes = assert(function( el ) { - el.className = "i"; - return !el.getAttribute("className"); - }); - - /* getElement(s)By* - ---------------------------------------------------------------------- */ - - // Check if getElementsByTagName("*") returns only elements - support.getElementsByTagName = assert(function( el ) { - el.appendChild( document.createComment("") ); - return !el.getElementsByTagName("*").length; - }); - - // Support: IE<9 - support.getElementsByClassName = rnative.test( document.getElementsByClassName ); - - // Support: IE<10 - // Check if getElementById returns elements by name - // The broken getElementById methods don't pick up programmatically-set names, - // so use a roundabout getElementsByName test - support.getById = assert(function( el ) { - docElem.appendChild( el ).id = expando; - return !document.getElementsByName || !document.getElementsByName( expando ).length; - }); - - // ID filter and find - if ( support.getById ) { - Expr.filter["ID"] = function( id ) { - var attrId = id.replace( runescape, funescape ); - return function( elem ) { - return elem.getAttribute("id") === attrId; - }; - }; - Expr.find["ID"] = function( id, context ) { - if ( typeof context.getElementById !== "undefined" && documentIsHTML ) { - var elem = context.getElementById( id ); - return elem ? [ elem ] : []; - } - }; - } else { - Expr.filter["ID"] = function( id ) { - var attrId = id.replace( runescape, funescape ); - return function( elem ) { - var node = typeof elem.getAttributeNode !== "undefined" && - elem.getAttributeNode("id"); - return node && node.value === attrId; - }; - }; - - // Support: IE 6 - 7 only - // getElementById is not reliable as a find shortcut - Expr.find["ID"] = function( id, context ) { - if ( typeof context.getElementById !== "undefined" && documentIsHTML ) { - var node, i, elems, - elem = context.getElementById( id ); - - if ( elem ) { - - // Verify the id attribute - node = elem.getAttributeNode("id"); - if ( node && node.value === id ) { - return [ elem ]; - } - - // Fall back on getElementsByName - elems = context.getElementsByName( id ); - i = 0; - while ( (elem = elems[i++]) ) { - node = elem.getAttributeNode("id"); - if ( node && node.value === id ) { - return [ elem ]; - } - } - } - - return []; - } - }; - } - - // Tag - Expr.find["TAG"] = support.getElementsByTagName ? - function( tag, context ) { - if ( typeof context.getElementsByTagName !== "undefined" ) { - return context.getElementsByTagName( tag ); - - // DocumentFragment nodes don't have gEBTN - } else if ( support.qsa ) { - return context.querySelectorAll( tag ); - } - } : - - function( tag, context ) { - var elem, - tmp = [], - i = 0, - // By happy coincidence, a (broken) gEBTN appears on DocumentFragment nodes too - results = context.getElementsByTagName( tag ); - - // Filter out possible comments - if ( tag === "*" ) { - while ( (elem = results[i++]) ) { - if ( elem.nodeType === 1 ) { - tmp.push( elem ); - } - } - - return tmp; - } - return results; - }; - - // Class - Expr.find["CLASS"] = support.getElementsByClassName && function( className, context ) { - if ( typeof context.getElementsByClassName !== "undefined" && documentIsHTML ) { - return context.getElementsByClassName( className ); - } - }; - - /* QSA/matchesSelector - ---------------------------------------------------------------------- */ - - // QSA and matchesSelector support - - // matchesSelector(:active) reports false when true (IE9/Opera 11.5) - rbuggyMatches = []; - - // qSa(:focus) reports false when true (Chrome 21) - // We allow this because of a bug in IE8/9 that throws an error - // whenever `document.activeElement` is accessed on an iframe - // So, we allow :focus to pass through QSA all the time to avoid the IE error - // See https://bugs.jquery.com/ticket/13378 - rbuggyQSA = []; - - if ( (support.qsa = rnative.test( document.querySelectorAll )) ) { - // Build QSA regex - // Regex strategy adopted from Diego Perini - assert(function( el ) { - // Select is set to empty string on purpose - // This is to test IE's treatment of not explicitly - // setting a boolean content attribute, - // since its presence should be enough - // https://bugs.jquery.com/ticket/12359 - docElem.appendChild( el ).innerHTML = "" + - ""; - - // Support: IE8, Opera 11-12.16 - // Nothing should be selected when empty strings follow ^= or $= or *= - // The test attribute must be unknown in Opera but "safe" for WinRT - // https://msdn.microsoft.com/en-us/library/ie/hh465388.aspx#attribute_section - if ( el.querySelectorAll("[msallowcapture^='']").length ) { - rbuggyQSA.push( "[*^$]=" + whitespace + "*(?:''|\"\")" ); - } - - // Support: IE8 - // Boolean attributes and "value" are not treated correctly - if ( !el.querySelectorAll("[selected]").length ) { - rbuggyQSA.push( "\\[" + whitespace + "*(?:value|" + booleans + ")" ); - } - - // Support: Chrome<29, Android<4.4, Safari<7.0+, iOS<7.0+, PhantomJS<1.9.8+ - if ( !el.querySelectorAll( "[id~=" + expando + "-]" ).length ) { - rbuggyQSA.push("~="); - } - - // Webkit/Opera - :checked should return selected option elements - // http://www.w3.org/TR/2011/REC-css3-selectors-20110929/#checked - // IE8 throws error here and will not see later tests - if ( !el.querySelectorAll(":checked").length ) { - rbuggyQSA.push(":checked"); - } - - // Support: Safari 8+, iOS 8+ - // https://bugs.webkit.org/show_bug.cgi?id=136851 - // In-page `selector#id sibling-combinator selector` fails - if ( !el.querySelectorAll( "a#" + expando + "+*" ).length ) { - rbuggyQSA.push(".#.+[+~]"); - } - }); - - assert(function( el ) { - el.innerHTML = "" + - ""; - - // Support: Windows 8 Native Apps - // The type and name attributes are restricted during .innerHTML assignment - var input = document.createElement("input"); - input.setAttribute( "type", "hidden" ); - el.appendChild( input ).setAttribute( "name", "D" ); - - // Support: IE8 - // Enforce case-sensitivity of name attribute - if ( el.querySelectorAll("[name=d]").length ) { - rbuggyQSA.push( "name" + whitespace + "*[*^$|!~]?=" ); - } - - // FF 3.5 - :enabled/:disabled and hidden elements (hidden elements are still enabled) - // IE8 throws error here and will not see later tests - if ( el.querySelectorAll(":enabled").length !== 2 ) { - rbuggyQSA.push( ":enabled", ":disabled" ); - } - - // Support: IE9-11+ - // IE's :disabled selector does not pick up the children of disabled fieldsets - docElem.appendChild( el ).disabled = true; - if ( el.querySelectorAll(":disabled").length !== 2 ) { - rbuggyQSA.push( ":enabled", ":disabled" ); - } - - // Opera 10-11 does not throw on post-comma invalid pseudos - el.querySelectorAll("*,:x"); - rbuggyQSA.push(",.*:"); - }); - } - - if ( (support.matchesSelector = rnative.test( (matches = docElem.matches || - docElem.webkitMatchesSelector || - docElem.mozMatchesSelector || - docElem.oMatchesSelector || - docElem.msMatchesSelector) )) ) { - - assert(function( el ) { - // Check to see if it's possible to do matchesSelector - // on a disconnected node (IE 9) - support.disconnectedMatch = matches.call( el, "*" ); - - // This should fail with an exception - // Gecko does not error, returns false instead - matches.call( el, "[s!='']:x" ); - rbuggyMatches.push( "!=", pseudos ); - }); - } - - rbuggyQSA = rbuggyQSA.length && new RegExp( rbuggyQSA.join("|") ); - rbuggyMatches = rbuggyMatches.length && new RegExp( rbuggyMatches.join("|") ); - - /* Contains - ---------------------------------------------------------------------- */ - hasCompare = rnative.test( docElem.compareDocumentPosition ); - - // Element contains another - // Purposefully self-exclusive - // As in, an element does not contain itself - contains = hasCompare || rnative.test( docElem.contains ) ? - function( a, b ) { - var adown = a.nodeType === 9 ? a.documentElement : a, - bup = b && b.parentNode; - return a === bup || !!( bup && bup.nodeType === 1 && ( - adown.contains ? - adown.contains( bup ) : - a.compareDocumentPosition && a.compareDocumentPosition( bup ) & 16 - )); - } : - function( a, b ) { - if ( b ) { - while ( (b = b.parentNode) ) { - if ( b === a ) { - return true; - } - } - } - return false; - }; - - /* Sorting - ---------------------------------------------------------------------- */ - - // Document order sorting - sortOrder = hasCompare ? - function( a, b ) { - - // Flag for duplicate removal - if ( a === b ) { - hasDuplicate = true; - return 0; - } - - // Sort on method existence if only one input has compareDocumentPosition - var compare = !a.compareDocumentPosition - !b.compareDocumentPosition; - if ( compare ) { - return compare; - } - - // Calculate position if both inputs belong to the same document - compare = ( a.ownerDocument || a ) === ( b.ownerDocument || b ) ? - a.compareDocumentPosition( b ) : - - // Otherwise we know they are disconnected - 1; - - // Disconnected nodes - if ( compare & 1 || - (!support.sortDetached && b.compareDocumentPosition( a ) === compare) ) { - - // Choose the first element that is related to our preferred document - if ( a === document || a.ownerDocument === preferredDoc && contains(preferredDoc, a) ) { - return -1; - } - if ( b === document || b.ownerDocument === preferredDoc && contains(preferredDoc, b) ) { - return 1; - } - - // Maintain original order - return sortInput ? - ( indexOf( sortInput, a ) - indexOf( sortInput, b ) ) : - 0; - } - - return compare & 4 ? -1 : 1; - } : - function( a, b ) { - // Exit early if the nodes are identical - if ( a === b ) { - hasDuplicate = true; - return 0; - } - - var cur, - i = 0, - aup = a.parentNode, - bup = b.parentNode, - ap = [ a ], - bp = [ b ]; - - // Parentless nodes are either documents or disconnected - if ( !aup || !bup ) { - return a === document ? -1 : - b === document ? 1 : - aup ? -1 : - bup ? 1 : - sortInput ? - ( indexOf( sortInput, a ) - indexOf( sortInput, b ) ) : - 0; - - // If the nodes are siblings, we can do a quick check - } else if ( aup === bup ) { - return siblingCheck( a, b ); - } - - // Otherwise we need full lists of their ancestors for comparison - cur = a; - while ( (cur = cur.parentNode) ) { - ap.unshift( cur ); - } - cur = b; - while ( (cur = cur.parentNode) ) { - bp.unshift( cur ); - } - - // Walk down the tree looking for a discrepancy - while ( ap[i] === bp[i] ) { - i++; - } - - return i ? - // Do a sibling check if the nodes have a common ancestor - siblingCheck( ap[i], bp[i] ) : - - // Otherwise nodes in our document sort first - ap[i] === preferredDoc ? -1 : - bp[i] === preferredDoc ? 1 : - 0; - }; - - return document; -}; - -Sizzle.matches = function( expr, elements ) { - return Sizzle( expr, null, null, elements ); -}; - -Sizzle.matchesSelector = function( elem, expr ) { - // Set document vars if needed - if ( ( elem.ownerDocument || elem ) !== document ) { - setDocument( elem ); - } - - // Make sure that attribute selectors are quoted - expr = expr.replace( rattributeQuotes, "='$1']" ); - - if ( support.matchesSelector && documentIsHTML && - !compilerCache[ expr + " " ] && - ( !rbuggyMatches || !rbuggyMatches.test( expr ) ) && - ( !rbuggyQSA || !rbuggyQSA.test( expr ) ) ) { - - try { - var ret = matches.call( elem, expr ); - - // IE 9's matchesSelector returns false on disconnected nodes - if ( ret || support.disconnectedMatch || - // As well, disconnected nodes are said to be in a document - // fragment in IE 9 - elem.document && elem.document.nodeType !== 11 ) { - return ret; - } - } catch (e) {} - } - - return Sizzle( expr, document, null, [ elem ] ).length > 0; -}; - -Sizzle.contains = function( context, elem ) { - // Set document vars if needed - if ( ( context.ownerDocument || context ) !== document ) { - setDocument( context ); - } - return contains( context, elem ); -}; - -Sizzle.attr = function( elem, name ) { - // Set document vars if needed - if ( ( elem.ownerDocument || elem ) !== document ) { - setDocument( elem ); - } - - var fn = Expr.attrHandle[ name.toLowerCase() ], - // Don't get fooled by Object.prototype properties (jQuery #13807) - val = fn && hasOwn.call( Expr.attrHandle, name.toLowerCase() ) ? - fn( elem, name, !documentIsHTML ) : - undefined; - - return val !== undefined ? - val : - support.attributes || !documentIsHTML ? - elem.getAttribute( name ) : - (val = elem.getAttributeNode(name)) && val.specified ? - val.value : - null; -}; - -Sizzle.escape = function( sel ) { - return (sel + "").replace( rcssescape, fcssescape ); -}; - -Sizzle.error = function( msg ) { - throw new Error( "Syntax error, unrecognized expression: " + msg ); -}; - -/** - * Document sorting and removing duplicates - * @param {ArrayLike} results - */ -Sizzle.uniqueSort = function( results ) { - var elem, - duplicates = [], - j = 0, - i = 0; - - // Unless we *know* we can detect duplicates, assume their presence - hasDuplicate = !support.detectDuplicates; - sortInput = !support.sortStable && results.slice( 0 ); - results.sort( sortOrder ); - - if ( hasDuplicate ) { - while ( (elem = results[i++]) ) { - if ( elem === results[ i ] ) { - j = duplicates.push( i ); - } - } - while ( j-- ) { - results.splice( duplicates[ j ], 1 ); - } - } - - // Clear input after sorting to release objects - // See https://github.com/jquery/sizzle/pull/225 - sortInput = null; - - return results; -}; - -/** - * Utility function for retrieving the text value of an array of DOM nodes - * @param {Array|Element} elem - */ -getText = Sizzle.getText = function( elem ) { - var node, - ret = "", - i = 0, - nodeType = elem.nodeType; - - if ( !nodeType ) { - // If no nodeType, this is expected to be an array - while ( (node = elem[i++]) ) { - // Do not traverse comment nodes - ret += getText( node ); - } - } else if ( nodeType === 1 || nodeType === 9 || nodeType === 11 ) { - // Use textContent for elements - // innerText usage removed for consistency of new lines (jQuery #11153) - if ( typeof elem.textContent === "string" ) { - return elem.textContent; - } else { - // Traverse its children - for ( elem = elem.firstChild; elem; elem = elem.nextSibling ) { - ret += getText( elem ); - } - } - } else if ( nodeType === 3 || nodeType === 4 ) { - return elem.nodeValue; - } - // Do not include comment or processing instruction nodes - - return ret; -}; - -Expr = Sizzle.selectors = { - - // Can be adjusted by the user - cacheLength: 50, - - createPseudo: markFunction, - - match: matchExpr, - - attrHandle: {}, - - find: {}, - - relative: { - ">": { dir: "parentNode", first: true }, - " ": { dir: "parentNode" }, - "+": { dir: "previousSibling", first: true }, - "~": { dir: "previousSibling" } - }, - - preFilter: { - "ATTR": function( match ) { - match[1] = match[1].replace( runescape, funescape ); - - // Move the given value to match[3] whether quoted or unquoted - match[3] = ( match[3] || match[4] || match[5] || "" ).replace( runescape, funescape ); - - if ( match[2] === "~=" ) { - match[3] = " " + match[3] + " "; - } - - return match.slice( 0, 4 ); - }, - - "CHILD": function( match ) { - /* matches from matchExpr["CHILD"] - 1 type (only|nth|...) - 2 what (child|of-type) - 3 argument (even|odd|\d*|\d*n([+-]\d+)?|...) - 4 xn-component of xn+y argument ([+-]?\d*n|) - 5 sign of xn-component - 6 x of xn-component - 7 sign of y-component - 8 y of y-component - */ - match[1] = match[1].toLowerCase(); - - if ( match[1].slice( 0, 3 ) === "nth" ) { - // nth-* requires argument - if ( !match[3] ) { - Sizzle.error( match[0] ); - } - - // numeric x and y parameters for Expr.filter.CHILD - // remember that false/true cast respectively to 0/1 - match[4] = +( match[4] ? match[5] + (match[6] || 1) : 2 * ( match[3] === "even" || match[3] === "odd" ) ); - match[5] = +( ( match[7] + match[8] ) || match[3] === "odd" ); - - // other types prohibit arguments - } else if ( match[3] ) { - Sizzle.error( match[0] ); - } - - return match; - }, - - "PSEUDO": function( match ) { - var excess, - unquoted = !match[6] && match[2]; - - if ( matchExpr["CHILD"].test( match[0] ) ) { - return null; - } - - // Accept quoted arguments as-is - if ( match[3] ) { - match[2] = match[4] || match[5] || ""; - - // Strip excess characters from unquoted arguments - } else if ( unquoted && rpseudo.test( unquoted ) && - // Get excess from tokenize (recursively) - (excess = tokenize( unquoted, true )) && - // advance to the next closing parenthesis - (excess = unquoted.indexOf( ")", unquoted.length - excess ) - unquoted.length) ) { - - // excess is a negative index - match[0] = match[0].slice( 0, excess ); - match[2] = unquoted.slice( 0, excess ); - } - - // Return only captures needed by the pseudo filter method (type and argument) - return match.slice( 0, 3 ); - } - }, - - filter: { - - "TAG": function( nodeNameSelector ) { - var nodeName = nodeNameSelector.replace( runescape, funescape ).toLowerCase(); - return nodeNameSelector === "*" ? - function() { return true; } : - function( elem ) { - return elem.nodeName && elem.nodeName.toLowerCase() === nodeName; - }; - }, - - "CLASS": function( className ) { - var pattern = classCache[ className + " " ]; - - return pattern || - (pattern = new RegExp( "(^|" + whitespace + ")" + className + "(" + whitespace + "|$)" )) && - classCache( className, function( elem ) { - return pattern.test( typeof elem.className === "string" && elem.className || typeof elem.getAttribute !== "undefined" && elem.getAttribute("class") || "" ); - }); - }, - - "ATTR": function( name, operator, check ) { - return function( elem ) { - var result = Sizzle.attr( elem, name ); - - if ( result == null ) { - return operator === "!="; - } - if ( !operator ) { - return true; - } - - result += ""; - - return operator === "=" ? result === check : - operator === "!=" ? result !== check : - operator === "^=" ? check && result.indexOf( check ) === 0 : - operator === "*=" ? check && result.indexOf( check ) > -1 : - operator === "$=" ? check && result.slice( -check.length ) === check : - operator === "~=" ? ( " " + result.replace( rwhitespace, " " ) + " " ).indexOf( check ) > -1 : - operator === "|=" ? result === check || result.slice( 0, check.length + 1 ) === check + "-" : - false; - }; - }, - - "CHILD": function( type, what, argument, first, last ) { - var simple = type.slice( 0, 3 ) !== "nth", - forward = type.slice( -4 ) !== "last", - ofType = what === "of-type"; - - return first === 1 && last === 0 ? - - // Shortcut for :nth-*(n) - function( elem ) { - return !!elem.parentNode; - } : - - function( elem, context, xml ) { - var cache, uniqueCache, outerCache, node, nodeIndex, start, - dir = simple !== forward ? "nextSibling" : "previousSibling", - parent = elem.parentNode, - name = ofType && elem.nodeName.toLowerCase(), - useCache = !xml && !ofType, - diff = false; - - if ( parent ) { - - // :(first|last|only)-(child|of-type) - if ( simple ) { - while ( dir ) { - node = elem; - while ( (node = node[ dir ]) ) { - if ( ofType ? - node.nodeName.toLowerCase() === name : - node.nodeType === 1 ) { - - return false; - } - } - // Reverse direction for :only-* (if we haven't yet done so) - start = dir = type === "only" && !start && "nextSibling"; - } - return true; - } - - start = [ forward ? parent.firstChild : parent.lastChild ]; - - // non-xml :nth-child(...) stores cache data on `parent` - if ( forward && useCache ) { - - // Seek `elem` from a previously-cached index - - // ...in a gzip-friendly way - node = parent; - outerCache = node[ expando ] || (node[ expando ] = {}); - - // Support: IE <9 only - // Defend against cloned attroperties (jQuery gh-1709) - uniqueCache = outerCache[ node.uniqueID ] || - (outerCache[ node.uniqueID ] = {}); - - cache = uniqueCache[ type ] || []; - nodeIndex = cache[ 0 ] === dirruns && cache[ 1 ]; - diff = nodeIndex && cache[ 2 ]; - node = nodeIndex && parent.childNodes[ nodeIndex ]; - - while ( (node = ++nodeIndex && node && node[ dir ] || - - // Fallback to seeking `elem` from the start - (diff = nodeIndex = 0) || start.pop()) ) { - - // When found, cache indexes on `parent` and break - if ( node.nodeType === 1 && ++diff && node === elem ) { - uniqueCache[ type ] = [ dirruns, nodeIndex, diff ]; - break; - } - } - - } else { - // Use previously-cached element index if available - if ( useCache ) { - // ...in a gzip-friendly way - node = elem; - outerCache = node[ expando ] || (node[ expando ] = {}); - - // Support: IE <9 only - // Defend against cloned attroperties (jQuery gh-1709) - uniqueCache = outerCache[ node.uniqueID ] || - (outerCache[ node.uniqueID ] = {}); - - cache = uniqueCache[ type ] || []; - nodeIndex = cache[ 0 ] === dirruns && cache[ 1 ]; - diff = nodeIndex; - } - - // xml :nth-child(...) - // or :nth-last-child(...) or :nth(-last)?-of-type(...) - if ( diff === false ) { - // Use the same loop as above to seek `elem` from the start - while ( (node = ++nodeIndex && node && node[ dir ] || - (diff = nodeIndex = 0) || start.pop()) ) { - - if ( ( ofType ? - node.nodeName.toLowerCase() === name : - node.nodeType === 1 ) && - ++diff ) { - - // Cache the index of each encountered element - if ( useCache ) { - outerCache = node[ expando ] || (node[ expando ] = {}); - - // Support: IE <9 only - // Defend against cloned attroperties (jQuery gh-1709) - uniqueCache = outerCache[ node.uniqueID ] || - (outerCache[ node.uniqueID ] = {}); - - uniqueCache[ type ] = [ dirruns, diff ]; - } - - if ( node === elem ) { - break; - } - } - } - } - } - - // Incorporate the offset, then check against cycle size - diff -= last; - return diff === first || ( diff % first === 0 && diff / first >= 0 ); - } - }; - }, - - "PSEUDO": function( pseudo, argument ) { - // pseudo-class names are case-insensitive - // http://www.w3.org/TR/selectors/#pseudo-classes - // Prioritize by case sensitivity in case custom pseudos are added with uppercase letters - // Remember that setFilters inherits from pseudos - var args, - fn = Expr.pseudos[ pseudo ] || Expr.setFilters[ pseudo.toLowerCase() ] || - Sizzle.error( "unsupported pseudo: " + pseudo ); - - // The user may use createPseudo to indicate that - // arguments are needed to create the filter function - // just as Sizzle does - if ( fn[ expando ] ) { - return fn( argument ); - } - - // But maintain support for old signatures - if ( fn.length > 1 ) { - args = [ pseudo, pseudo, "", argument ]; - return Expr.setFilters.hasOwnProperty( pseudo.toLowerCase() ) ? - markFunction(function( seed, matches ) { - var idx, - matched = fn( seed, argument ), - i = matched.length; - while ( i-- ) { - idx = indexOf( seed, matched[i] ); - seed[ idx ] = !( matches[ idx ] = matched[i] ); - } - }) : - function( elem ) { - return fn( elem, 0, args ); - }; - } - - return fn; - } - }, - - pseudos: { - // Potentially complex pseudos - "not": markFunction(function( selector ) { - // Trim the selector passed to compile - // to avoid treating leading and trailing - // spaces as combinators - var input = [], - results = [], - matcher = compile( selector.replace( rtrim, "$1" ) ); - - return matcher[ expando ] ? - markFunction(function( seed, matches, context, xml ) { - var elem, - unmatched = matcher( seed, null, xml, [] ), - i = seed.length; - - // Match elements unmatched by `matcher` - while ( i-- ) { - if ( (elem = unmatched[i]) ) { - seed[i] = !(matches[i] = elem); - } - } - }) : - function( elem, context, xml ) { - input[0] = elem; - matcher( input, null, xml, results ); - // Don't keep the element (issue #299) - input[0] = null; - return !results.pop(); - }; - }), - - "has": markFunction(function( selector ) { - return function( elem ) { - return Sizzle( selector, elem ).length > 0; - }; - }), - - "contains": markFunction(function( text ) { - text = text.replace( runescape, funescape ); - return function( elem ) { - return ( elem.textContent || elem.innerText || getText( elem ) ).indexOf( text ) > -1; - }; - }), - - // "Whether an element is represented by a :lang() selector - // is based solely on the element's language value - // being equal to the identifier C, - // or beginning with the identifier C immediately followed by "-". - // The matching of C against the element's language value is performed case-insensitively. - // The identifier C does not have to be a valid language name." - // http://www.w3.org/TR/selectors/#lang-pseudo - "lang": markFunction( function( lang ) { - // lang value must be a valid identifier - if ( !ridentifier.test(lang || "") ) { - Sizzle.error( "unsupported lang: " + lang ); - } - lang = lang.replace( runescape, funescape ).toLowerCase(); - return function( elem ) { - var elemLang; - do { - if ( (elemLang = documentIsHTML ? - elem.lang : - elem.getAttribute("xml:lang") || elem.getAttribute("lang")) ) { - - elemLang = elemLang.toLowerCase(); - return elemLang === lang || elemLang.indexOf( lang + "-" ) === 0; - } - } while ( (elem = elem.parentNode) && elem.nodeType === 1 ); - return false; - }; - }), - - // Miscellaneous - "target": function( elem ) { - var hash = window.location && window.location.hash; - return hash && hash.slice( 1 ) === elem.id; - }, - - "root": function( elem ) { - return elem === docElem; - }, - - "focus": function( elem ) { - return elem === document.activeElement && (!document.hasFocus || document.hasFocus()) && !!(elem.type || elem.href || ~elem.tabIndex); - }, - - // Boolean properties - "enabled": createDisabledPseudo( false ), - "disabled": createDisabledPseudo( true ), - - "checked": function( elem ) { - // In CSS3, :checked should return both checked and selected elements - // http://www.w3.org/TR/2011/REC-css3-selectors-20110929/#checked - var nodeName = elem.nodeName.toLowerCase(); - return (nodeName === "input" && !!elem.checked) || (nodeName === "option" && !!elem.selected); - }, - - "selected": function( elem ) { - // Accessing this property makes selected-by-default - // options in Safari work properly - if ( elem.parentNode ) { - elem.parentNode.selectedIndex; - } - - return elem.selected === true; - }, - - // Contents - "empty": function( elem ) { - // http://www.w3.org/TR/selectors/#empty-pseudo - // :empty is negated by element (1) or content nodes (text: 3; cdata: 4; entity ref: 5), - // but not by others (comment: 8; processing instruction: 7; etc.) - // nodeType < 6 works because attributes (2) do not appear as children - for ( elem = elem.firstChild; elem; elem = elem.nextSibling ) { - if ( elem.nodeType < 6 ) { - return false; - } - } - return true; - }, - - "parent": function( elem ) { - return !Expr.pseudos["empty"]( elem ); - }, - - // Element/input types - "header": function( elem ) { - return rheader.test( elem.nodeName ); - }, - - "input": function( elem ) { - return rinputs.test( elem.nodeName ); - }, - - "button": function( elem ) { - var name = elem.nodeName.toLowerCase(); - return name === "input" && elem.type === "button" || name === "button"; - }, - - "text": function( elem ) { - var attr; - return elem.nodeName.toLowerCase() === "input" && - elem.type === "text" && - - // Support: IE<8 - // New HTML5 attribute values (e.g., "search") appear with elem.type === "text" - ( (attr = elem.getAttribute("type")) == null || attr.toLowerCase() === "text" ); - }, - - // Position-in-collection - "first": createPositionalPseudo(function() { - return [ 0 ]; - }), - - "last": createPositionalPseudo(function( matchIndexes, length ) { - return [ length - 1 ]; - }), - - "eq": createPositionalPseudo(function( matchIndexes, length, argument ) { - return [ argument < 0 ? argument + length : argument ]; - }), - - "even": createPositionalPseudo(function( matchIndexes, length ) { - var i = 0; - for ( ; i < length; i += 2 ) { - matchIndexes.push( i ); - } - return matchIndexes; - }), - - "odd": createPositionalPseudo(function( matchIndexes, length ) { - var i = 1; - for ( ; i < length; i += 2 ) { - matchIndexes.push( i ); - } - return matchIndexes; - }), - - "lt": createPositionalPseudo(function( matchIndexes, length, argument ) { - var i = argument < 0 ? argument + length : argument; - for ( ; --i >= 0; ) { - matchIndexes.push( i ); - } - return matchIndexes; - }), - - "gt": createPositionalPseudo(function( matchIndexes, length, argument ) { - var i = argument < 0 ? argument + length : argument; - for ( ; ++i < length; ) { - matchIndexes.push( i ); - } - return matchIndexes; - }) - } -}; - -Expr.pseudos["nth"] = Expr.pseudos["eq"]; - -// Add button/input type pseudos -for ( i in { radio: true, checkbox: true, file: true, password: true, image: true } ) { - Expr.pseudos[ i ] = createInputPseudo( i ); -} -for ( i in { submit: true, reset: true } ) { - Expr.pseudos[ i ] = createButtonPseudo( i ); -} - -// Easy API for creating new setFilters -function setFilters() {} -setFilters.prototype = Expr.filters = Expr.pseudos; -Expr.setFilters = new setFilters(); - -tokenize = Sizzle.tokenize = function( selector, parseOnly ) { - var matched, match, tokens, type, - soFar, groups, preFilters, - cached = tokenCache[ selector + " " ]; - - if ( cached ) { - return parseOnly ? 0 : cached.slice( 0 ); - } - - soFar = selector; - groups = []; - preFilters = Expr.preFilter; - - while ( soFar ) { - - // Comma and first run - if ( !matched || (match = rcomma.exec( soFar )) ) { - if ( match ) { - // Don't consume trailing commas as valid - soFar = soFar.slice( match[0].length ) || soFar; - } - groups.push( (tokens = []) ); - } - - matched = false; - - // Combinators - if ( (match = rcombinators.exec( soFar )) ) { - matched = match.shift(); - tokens.push({ - value: matched, - // Cast descendant combinators to space - type: match[0].replace( rtrim, " " ) - }); - soFar = soFar.slice( matched.length ); - } - - // Filters - for ( type in Expr.filter ) { - if ( (match = matchExpr[ type ].exec( soFar )) && (!preFilters[ type ] || - (match = preFilters[ type ]( match ))) ) { - matched = match.shift(); - tokens.push({ - value: matched, - type: type, - matches: match - }); - soFar = soFar.slice( matched.length ); - } - } - - if ( !matched ) { - break; - } - } - - // Return the length of the invalid excess - // if we're just parsing - // Otherwise, throw an error or return tokens - return parseOnly ? - soFar.length : - soFar ? - Sizzle.error( selector ) : - // Cache the tokens - tokenCache( selector, groups ).slice( 0 ); -}; - -function toSelector( tokens ) { - var i = 0, - len = tokens.length, - selector = ""; - for ( ; i < len; i++ ) { - selector += tokens[i].value; - } - return selector; -} - -function addCombinator( matcher, combinator, base ) { - var dir = combinator.dir, - skip = combinator.next, - key = skip || dir, - checkNonElements = base && key === "parentNode", - doneName = done++; - - return combinator.first ? - // Check against closest ancestor/preceding element - function( elem, context, xml ) { - while ( (elem = elem[ dir ]) ) { - if ( elem.nodeType === 1 || checkNonElements ) { - return matcher( elem, context, xml ); - } - } - return false; - } : - - // Check against all ancestor/preceding elements - function( elem, context, xml ) { - var oldCache, uniqueCache, outerCache, - newCache = [ dirruns, doneName ]; - - // We can't set arbitrary data on XML nodes, so they don't benefit from combinator caching - if ( xml ) { - while ( (elem = elem[ dir ]) ) { - if ( elem.nodeType === 1 || checkNonElements ) { - if ( matcher( elem, context, xml ) ) { - return true; - } - } - } - } else { - while ( (elem = elem[ dir ]) ) { - if ( elem.nodeType === 1 || checkNonElements ) { - outerCache = elem[ expando ] || (elem[ expando ] = {}); - - // Support: IE <9 only - // Defend against cloned attroperties (jQuery gh-1709) - uniqueCache = outerCache[ elem.uniqueID ] || (outerCache[ elem.uniqueID ] = {}); - - if ( skip && skip === elem.nodeName.toLowerCase() ) { - elem = elem[ dir ] || elem; - } else if ( (oldCache = uniqueCache[ key ]) && - oldCache[ 0 ] === dirruns && oldCache[ 1 ] === doneName ) { - - // Assign to newCache so results back-propagate to previous elements - return (newCache[ 2 ] = oldCache[ 2 ]); - } else { - // Reuse newcache so results back-propagate to previous elements - uniqueCache[ key ] = newCache; - - // A match means we're done; a fail means we have to keep checking - if ( (newCache[ 2 ] = matcher( elem, context, xml )) ) { - return true; - } - } - } - } - } - return false; - }; -} - -function elementMatcher( matchers ) { - return matchers.length > 1 ? - function( elem, context, xml ) { - var i = matchers.length; - while ( i-- ) { - if ( !matchers[i]( elem, context, xml ) ) { - return false; - } - } - return true; - } : - matchers[0]; -} - -function multipleContexts( selector, contexts, results ) { - var i = 0, - len = contexts.length; - for ( ; i < len; i++ ) { - Sizzle( selector, contexts[i], results ); - } - return results; -} - -function condense( unmatched, map, filter, context, xml ) { - var elem, - newUnmatched = [], - i = 0, - len = unmatched.length, - mapped = map != null; - - for ( ; i < len; i++ ) { - if ( (elem = unmatched[i]) ) { - if ( !filter || filter( elem, context, xml ) ) { - newUnmatched.push( elem ); - if ( mapped ) { - map.push( i ); - } - } - } - } - - return newUnmatched; -} - -function setMatcher( preFilter, selector, matcher, postFilter, postFinder, postSelector ) { - if ( postFilter && !postFilter[ expando ] ) { - postFilter = setMatcher( postFilter ); - } - if ( postFinder && !postFinder[ expando ] ) { - postFinder = setMatcher( postFinder, postSelector ); - } - return markFunction(function( seed, results, context, xml ) { - var temp, i, elem, - preMap = [], - postMap = [], - preexisting = results.length, - - // Get initial elements from seed or context - elems = seed || multipleContexts( selector || "*", context.nodeType ? [ context ] : context, [] ), - - // Prefilter to get matcher input, preserving a map for seed-results synchronization - matcherIn = preFilter && ( seed || !selector ) ? - condense( elems, preMap, preFilter, context, xml ) : - elems, - - matcherOut = matcher ? - // If we have a postFinder, or filtered seed, or non-seed postFilter or preexisting results, - postFinder || ( seed ? preFilter : preexisting || postFilter ) ? - - // ...intermediate processing is necessary - [] : - - // ...otherwise use results directly - results : - matcherIn; - - // Find primary matches - if ( matcher ) { - matcher( matcherIn, matcherOut, context, xml ); - } - - // Apply postFilter - if ( postFilter ) { - temp = condense( matcherOut, postMap ); - postFilter( temp, [], context, xml ); - - // Un-match failing elements by moving them back to matcherIn - i = temp.length; - while ( i-- ) { - if ( (elem = temp[i]) ) { - matcherOut[ postMap[i] ] = !(matcherIn[ postMap[i] ] = elem); - } - } - } - - if ( seed ) { - if ( postFinder || preFilter ) { - if ( postFinder ) { - // Get the final matcherOut by condensing this intermediate into postFinder contexts - temp = []; - i = matcherOut.length; - while ( i-- ) { - if ( (elem = matcherOut[i]) ) { - // Restore matcherIn since elem is not yet a final match - temp.push( (matcherIn[i] = elem) ); - } - } - postFinder( null, (matcherOut = []), temp, xml ); - } - - // Move matched elements from seed to results to keep them synchronized - i = matcherOut.length; - while ( i-- ) { - if ( (elem = matcherOut[i]) && - (temp = postFinder ? indexOf( seed, elem ) : preMap[i]) > -1 ) { - - seed[temp] = !(results[temp] = elem); - } - } - } - - // Add elements to results, through postFinder if defined - } else { - matcherOut = condense( - matcherOut === results ? - matcherOut.splice( preexisting, matcherOut.length ) : - matcherOut - ); - if ( postFinder ) { - postFinder( null, results, matcherOut, xml ); - } else { - push.apply( results, matcherOut ); - } - } - }); -} - -function matcherFromTokens( tokens ) { - var checkContext, matcher, j, - len = tokens.length, - leadingRelative = Expr.relative[ tokens[0].type ], - implicitRelative = leadingRelative || Expr.relative[" "], - i = leadingRelative ? 1 : 0, - - // The foundational matcher ensures that elements are reachable from top-level context(s) - matchContext = addCombinator( function( elem ) { - return elem === checkContext; - }, implicitRelative, true ), - matchAnyContext = addCombinator( function( elem ) { - return indexOf( checkContext, elem ) > -1; - }, implicitRelative, true ), - matchers = [ function( elem, context, xml ) { - var ret = ( !leadingRelative && ( xml || context !== outermostContext ) ) || ( - (checkContext = context).nodeType ? - matchContext( elem, context, xml ) : - matchAnyContext( elem, context, xml ) ); - // Avoid hanging onto element (issue #299) - checkContext = null; - return ret; - } ]; - - for ( ; i < len; i++ ) { - if ( (matcher = Expr.relative[ tokens[i].type ]) ) { - matchers = [ addCombinator(elementMatcher( matchers ), matcher) ]; - } else { - matcher = Expr.filter[ tokens[i].type ].apply( null, tokens[i].matches ); - - // Return special upon seeing a positional matcher - if ( matcher[ expando ] ) { - // Find the next relative operator (if any) for proper handling - j = ++i; - for ( ; j < len; j++ ) { - if ( Expr.relative[ tokens[j].type ] ) { - break; - } - } - return setMatcher( - i > 1 && elementMatcher( matchers ), - i > 1 && toSelector( - // If the preceding token was a descendant combinator, insert an implicit any-element `*` - tokens.slice( 0, i - 1 ).concat({ value: tokens[ i - 2 ].type === " " ? "*" : "" }) - ).replace( rtrim, "$1" ), - matcher, - i < j && matcherFromTokens( tokens.slice( i, j ) ), - j < len && matcherFromTokens( (tokens = tokens.slice( j )) ), - j < len && toSelector( tokens ) - ); - } - matchers.push( matcher ); - } - } - - return elementMatcher( matchers ); -} - -function matcherFromGroupMatchers( elementMatchers, setMatchers ) { - var bySet = setMatchers.length > 0, - byElement = elementMatchers.length > 0, - superMatcher = function( seed, context, xml, results, outermost ) { - var elem, j, matcher, - matchedCount = 0, - i = "0", - unmatched = seed && [], - setMatched = [], - contextBackup = outermostContext, - // We must always have either seed elements or outermost context - elems = seed || byElement && Expr.find["TAG"]( "*", outermost ), - // Use integer dirruns iff this is the outermost matcher - dirrunsUnique = (dirruns += contextBackup == null ? 1 : Math.random() || 0.1), - len = elems.length; - - if ( outermost ) { - outermostContext = context === document || context || outermost; - } - - // Add elements passing elementMatchers directly to results - // Support: IE<9, Safari - // Tolerate NodeList properties (IE: "length"; Safari: ) matching elements by id - for ( ; i !== len && (elem = elems[i]) != null; i++ ) { - if ( byElement && elem ) { - j = 0; - if ( !context && elem.ownerDocument !== document ) { - setDocument( elem ); - xml = !documentIsHTML; - } - while ( (matcher = elementMatchers[j++]) ) { - if ( matcher( elem, context || document, xml) ) { - results.push( elem ); - break; - } - } - if ( outermost ) { - dirruns = dirrunsUnique; - } - } - - // Track unmatched elements for set filters - if ( bySet ) { - // They will have gone through all possible matchers - if ( (elem = !matcher && elem) ) { - matchedCount--; - } - - // Lengthen the array for every element, matched or not - if ( seed ) { - unmatched.push( elem ); - } - } - } - - // `i` is now the count of elements visited above, and adding it to `matchedCount` - // makes the latter nonnegative. - matchedCount += i; - - // Apply set filters to unmatched elements - // NOTE: This can be skipped if there are no unmatched elements (i.e., `matchedCount` - // equals `i`), unless we didn't visit _any_ elements in the above loop because we have - // no element matchers and no seed. - // Incrementing an initially-string "0" `i` allows `i` to remain a string only in that - // case, which will result in a "00" `matchedCount` that differs from `i` but is also - // numerically zero. - if ( bySet && i !== matchedCount ) { - j = 0; - while ( (matcher = setMatchers[j++]) ) { - matcher( unmatched, setMatched, context, xml ); - } - - if ( seed ) { - // Reintegrate element matches to eliminate the need for sorting - if ( matchedCount > 0 ) { - while ( i-- ) { - if ( !(unmatched[i] || setMatched[i]) ) { - setMatched[i] = pop.call( results ); - } - } - } - - // Discard index placeholder values to get only actual matches - setMatched = condense( setMatched ); - } - - // Add matches to results - push.apply( results, setMatched ); - - // Seedless set matches succeeding multiple successful matchers stipulate sorting - if ( outermost && !seed && setMatched.length > 0 && - ( matchedCount + setMatchers.length ) > 1 ) { - - Sizzle.uniqueSort( results ); - } - } - - // Override manipulation of globals by nested matchers - if ( outermost ) { - dirruns = dirrunsUnique; - outermostContext = contextBackup; - } - - return unmatched; - }; - - return bySet ? - markFunction( superMatcher ) : - superMatcher; -} - -compile = Sizzle.compile = function( selector, match /* Internal Use Only */ ) { - var i, - setMatchers = [], - elementMatchers = [], - cached = compilerCache[ selector + " " ]; - - if ( !cached ) { - // Generate a function of recursive functions that can be used to check each element - if ( !match ) { - match = tokenize( selector ); - } - i = match.length; - while ( i-- ) { - cached = matcherFromTokens( match[i] ); - if ( cached[ expando ] ) { - setMatchers.push( cached ); - } else { - elementMatchers.push( cached ); - } - } - - // Cache the compiled function - cached = compilerCache( selector, matcherFromGroupMatchers( elementMatchers, setMatchers ) ); - - // Save selector and tokenization - cached.selector = selector; - } - return cached; -}; - -/** - * A low-level selection function that works with Sizzle's compiled - * selector functions - * @param {String|Function} selector A selector or a pre-compiled - * selector function built with Sizzle.compile - * @param {Element} context - * @param {Array} [results] - * @param {Array} [seed] A set of elements to match against - */ -select = Sizzle.select = function( selector, context, results, seed ) { - var i, tokens, token, type, find, - compiled = typeof selector === "function" && selector, - match = !seed && tokenize( (selector = compiled.selector || selector) ); - - results = results || []; - - // Try to minimize operations if there is only one selector in the list and no seed - // (the latter of which guarantees us context) - if ( match.length === 1 ) { - - // Reduce context if the leading compound selector is an ID - tokens = match[0] = match[0].slice( 0 ); - if ( tokens.length > 2 && (token = tokens[0]).type === "ID" && - context.nodeType === 9 && documentIsHTML && Expr.relative[ tokens[1].type ] ) { - - context = ( Expr.find["ID"]( token.matches[0].replace(runescape, funescape), context ) || [] )[0]; - if ( !context ) { - return results; - - // Precompiled matchers will still verify ancestry, so step up a level - } else if ( compiled ) { - context = context.parentNode; - } - - selector = selector.slice( tokens.shift().value.length ); - } - - // Fetch a seed set for right-to-left matching - i = matchExpr["needsContext"].test( selector ) ? 0 : tokens.length; - while ( i-- ) { - token = tokens[i]; - - // Abort if we hit a combinator - if ( Expr.relative[ (type = token.type) ] ) { - break; - } - if ( (find = Expr.find[ type ]) ) { - // Search, expanding context for leading sibling combinators - if ( (seed = find( - token.matches[0].replace( runescape, funescape ), - rsibling.test( tokens[0].type ) && testContext( context.parentNode ) || context - )) ) { - - // If seed is empty or no tokens remain, we can return early - tokens.splice( i, 1 ); - selector = seed.length && toSelector( tokens ); - if ( !selector ) { - push.apply( results, seed ); - return results; - } - - break; - } - } - } - } - - // Compile and execute a filtering function if one is not provided - // Provide `match` to avoid retokenization if we modified the selector above - ( compiled || compile( selector, match ) )( - seed, - context, - !documentIsHTML, - results, - !context || rsibling.test( selector ) && testContext( context.parentNode ) || context - ); - return results; -}; - -// One-time assignments - -// Sort stability -support.sortStable = expando.split("").sort( sortOrder ).join("") === expando; - -// Support: Chrome 14-35+ -// Always assume duplicates if they aren't passed to the comparison function -support.detectDuplicates = !!hasDuplicate; - -// Initialize against the default document -setDocument(); - -// Support: Webkit<537.32 - Safari 6.0.3/Chrome 25 (fixed in Chrome 27) -// Detached nodes confoundingly follow *each other* -support.sortDetached = assert(function( el ) { - // Should return 1, but returns 4 (following) - return el.compareDocumentPosition( document.createElement("fieldset") ) & 1; -}); - -// Support: IE<8 -// Prevent attribute/property "interpolation" -// https://msdn.microsoft.com/en-us/library/ms536429%28VS.85%29.aspx -if ( !assert(function( el ) { - el.innerHTML = ""; - return el.firstChild.getAttribute("href") === "#" ; -}) ) { - addHandle( "type|href|height|width", function( elem, name, isXML ) { - if ( !isXML ) { - return elem.getAttribute( name, name.toLowerCase() === "type" ? 1 : 2 ); - } - }); -} - -// Support: IE<9 -// Use defaultValue in place of getAttribute("value") -if ( !support.attributes || !assert(function( el ) { - el.innerHTML = ""; - el.firstChild.setAttribute( "value", "" ); - return el.firstChild.getAttribute( "value" ) === ""; -}) ) { - addHandle( "value", function( elem, name, isXML ) { - if ( !isXML && elem.nodeName.toLowerCase() === "input" ) { - return elem.defaultValue; - } - }); -} - -// Support: IE<9 -// Use getAttributeNode to fetch booleans when getAttribute lies -if ( !assert(function( el ) { - return el.getAttribute("disabled") == null; -}) ) { - addHandle( booleans, function( elem, name, isXML ) { - var val; - if ( !isXML ) { - return elem[ name ] === true ? name.toLowerCase() : - (val = elem.getAttributeNode( name )) && val.specified ? - val.value : - null; - } - }); -} - -return Sizzle; - -})( window ); - - - -jQuery.find = Sizzle; -jQuery.expr = Sizzle.selectors; - -// Deprecated -jQuery.expr[ ":" ] = jQuery.expr.pseudos; -jQuery.uniqueSort = jQuery.unique = Sizzle.uniqueSort; -jQuery.text = Sizzle.getText; -jQuery.isXMLDoc = Sizzle.isXML; -jQuery.contains = Sizzle.contains; -jQuery.escapeSelector = Sizzle.escape; - - - - -var dir = function( elem, dir, until ) { - var matched = [], - truncate = until !== undefined; - - while ( ( elem = elem[ dir ] ) && elem.nodeType !== 9 ) { - if ( elem.nodeType === 1 ) { - if ( truncate && jQuery( elem ).is( until ) ) { - break; - } - matched.push( elem ); - } - } - return matched; -}; - - -var siblings = function( n, elem ) { - var matched = []; - - for ( ; n; n = n.nextSibling ) { - if ( n.nodeType === 1 && n !== elem ) { - matched.push( n ); - } - } - - return matched; -}; - - -var rneedsContext = jQuery.expr.match.needsContext; - - - -function nodeName( elem, name ) { - - return elem.nodeName && elem.nodeName.toLowerCase() === name.toLowerCase(); - -}; -var rsingleTag = ( /^<([a-z][^\/\0>:\x20\t\r\n\f]*)[\x20\t\r\n\f]*\/?>(?:<\/\1>|)$/i ); - - - -var risSimple = /^.[^:#\[\.,]*$/; - -// Implement the identical functionality for filter and not -function winnow( elements, qualifier, not ) { - if ( jQuery.isFunction( qualifier ) ) { - return jQuery.grep( elements, function( elem, i ) { - return !!qualifier.call( elem, i, elem ) !== not; - } ); - } - - // Single element - if ( qualifier.nodeType ) { - return jQuery.grep( elements, function( elem ) { - return ( elem === qualifier ) !== not; - } ); - } - - // Arraylike of elements (jQuery, arguments, Array) - if ( typeof qualifier !== "string" ) { - return jQuery.grep( elements, function( elem ) { - return ( indexOf.call( qualifier, elem ) > -1 ) !== not; - } ); - } - - // Simple selector that can be filtered directly, removing non-Elements - if ( risSimple.test( qualifier ) ) { - return jQuery.filter( qualifier, elements, not ); - } - - // Complex selector, compare the two sets, removing non-Elements - qualifier = jQuery.filter( qualifier, elements ); - return jQuery.grep( elements, function( elem ) { - return ( indexOf.call( qualifier, elem ) > -1 ) !== not && elem.nodeType === 1; - } ); -} - -jQuery.filter = function( expr, elems, not ) { - var elem = elems[ 0 ]; - - if ( not ) { - expr = ":not(" + expr + ")"; - } - - if ( elems.length === 1 && elem.nodeType === 1 ) { - return jQuery.find.matchesSelector( elem, expr ) ? [ elem ] : []; - } - - return jQuery.find.matches( expr, jQuery.grep( elems, function( elem ) { - return elem.nodeType === 1; - } ) ); -}; - -jQuery.fn.extend( { - find: function( selector ) { - var i, ret, - len = this.length, - self = this; - - if ( typeof selector !== "string" ) { - return this.pushStack( jQuery( selector ).filter( function() { - for ( i = 0; i < len; i++ ) { - if ( jQuery.contains( self[ i ], this ) ) { - return true; - } - } - } ) ); - } - - ret = this.pushStack( [] ); - - for ( i = 0; i < len; i++ ) { - jQuery.find( selector, self[ i ], ret ); - } - - return len > 1 ? jQuery.uniqueSort( ret ) : ret; - }, - filter: function( selector ) { - return this.pushStack( winnow( this, selector || [], false ) ); - }, - not: function( selector ) { - return this.pushStack( winnow( this, selector || [], true ) ); - }, - is: function( selector ) { - return !!winnow( - this, - - // If this is a positional/relative selector, check membership in the returned set - // so $("p:first").is("p:last") won't return true for a doc with two "p". - typeof selector === "string" && rneedsContext.test( selector ) ? - jQuery( selector ) : - selector || [], - false - ).length; - } -} ); - - -// Initialize a jQuery object - - -// A central reference to the root jQuery(document) -var rootjQuery, - - // A simple way to check for HTML strings - // Prioritize #id over to avoid XSS via location.hash (#9521) - // Strict HTML recognition (#11290: must start with <) - // Shortcut simple #id case for speed - rquickExpr = /^(?:\s*(<[\w\W]+>)[^>]*|#([\w-]+))$/, - - init = jQuery.fn.init = function( selector, context, root ) { - var match, elem; - - // HANDLE: $(""), $(null), $(undefined), $(false) - if ( !selector ) { - return this; - } - - // Method init() accepts an alternate rootjQuery - // so migrate can support jQuery.sub (gh-2101) - root = root || rootjQuery; - - // Handle HTML strings - if ( typeof selector === "string" ) { - if ( selector[ 0 ] === "<" && - selector[ selector.length - 1 ] === ">" && - selector.length >= 3 ) { - - // Assume that strings that start and end with <> are HTML and skip the regex check - match = [ null, selector, null ]; - - } else { - match = rquickExpr.exec( selector ); - } - - // Match html or make sure no context is specified for #id - if ( match && ( match[ 1 ] || !context ) ) { - - // HANDLE: $(html) -> $(array) - if ( match[ 1 ] ) { - context = context instanceof jQuery ? context[ 0 ] : context; - - // Option to run scripts is true for back-compat - // Intentionally let the error be thrown if parseHTML is not present - jQuery.merge( this, jQuery.parseHTML( - match[ 1 ], - context && context.nodeType ? context.ownerDocument || context : document, - true - ) ); - - // HANDLE: $(html, props) - if ( rsingleTag.test( match[ 1 ] ) && jQuery.isPlainObject( context ) ) { - for ( match in context ) { - - // Properties of context are called as methods if possible - if ( jQuery.isFunction( this[ match ] ) ) { - this[ match ]( context[ match ] ); - - // ...and otherwise set as attributes - } else { - this.attr( match, context[ match ] ); - } - } - } - - return this; - - // HANDLE: $(#id) - } else { - elem = document.getElementById( match[ 2 ] ); - - if ( elem ) { - - // Inject the element directly into the jQuery object - this[ 0 ] = elem; - this.length = 1; - } - return this; - } - - // HANDLE: $(expr, $(...)) - } else if ( !context || context.jquery ) { - return ( context || root ).find( selector ); - - // HANDLE: $(expr, context) - // (which is just equivalent to: $(context).find(expr) - } else { - return this.constructor( context ).find( selector ); - } - - // HANDLE: $(DOMElement) - } else if ( selector.nodeType ) { - this[ 0 ] = selector; - this.length = 1; - return this; - - // HANDLE: $(function) - // Shortcut for document ready - } else if ( jQuery.isFunction( selector ) ) { - return root.ready !== undefined ? - root.ready( selector ) : - - // Execute immediately if ready is not present - selector( jQuery ); - } - - return jQuery.makeArray( selector, this ); - }; - -// Give the init function the jQuery prototype for later instantiation -init.prototype = jQuery.fn; - -// Initialize central reference -rootjQuery = jQuery( document ); - - -var rparentsprev = /^(?:parents|prev(?:Until|All))/, - - // Methods guaranteed to produce a unique set when starting from a unique set - guaranteedUnique = { - children: true, - contents: true, - next: true, - prev: true - }; - -jQuery.fn.extend( { - has: function( target ) { - var targets = jQuery( target, this ), - l = targets.length; - - return this.filter( function() { - var i = 0; - for ( ; i < l; i++ ) { - if ( jQuery.contains( this, targets[ i ] ) ) { - return true; - } - } - } ); - }, - - closest: function( selectors, context ) { - var cur, - i = 0, - l = this.length, - matched = [], - targets = typeof selectors !== "string" && jQuery( selectors ); - - // Positional selectors never match, since there's no _selection_ context - if ( !rneedsContext.test( selectors ) ) { - for ( ; i < l; i++ ) { - for ( cur = this[ i ]; cur && cur !== context; cur = cur.parentNode ) { - - // Always skip document fragments - if ( cur.nodeType < 11 && ( targets ? - targets.index( cur ) > -1 : - - // Don't pass non-elements to Sizzle - cur.nodeType === 1 && - jQuery.find.matchesSelector( cur, selectors ) ) ) { - - matched.push( cur ); - break; - } - } - } - } - - return this.pushStack( matched.length > 1 ? jQuery.uniqueSort( matched ) : matched ); - }, - - // Determine the position of an element within the set - index: function( elem ) { - - // No argument, return index in parent - if ( !elem ) { - return ( this[ 0 ] && this[ 0 ].parentNode ) ? this.first().prevAll().length : -1; - } - - // Index in selector - if ( typeof elem === "string" ) { - return indexOf.call( jQuery( elem ), this[ 0 ] ); - } - - // Locate the position of the desired element - return indexOf.call( this, - - // If it receives a jQuery object, the first element is used - elem.jquery ? elem[ 0 ] : elem - ); - }, - - add: function( selector, context ) { - return this.pushStack( - jQuery.uniqueSort( - jQuery.merge( this.get(), jQuery( selector, context ) ) - ) - ); - }, - - addBack: function( selector ) { - return this.add( selector == null ? - this.prevObject : this.prevObject.filter( selector ) - ); - } -} ); - -function sibling( cur, dir ) { - while ( ( cur = cur[ dir ] ) && cur.nodeType !== 1 ) {} - return cur; -} - -jQuery.each( { - parent: function( elem ) { - var parent = elem.parentNode; - return parent && parent.nodeType !== 11 ? parent : null; - }, - parents: function( elem ) { - return dir( elem, "parentNode" ); - }, - parentsUntil: function( elem, i, until ) { - return dir( elem, "parentNode", until ); - }, - next: function( elem ) { - return sibling( elem, "nextSibling" ); - }, - prev: function( elem ) { - return sibling( elem, "previousSibling" ); - }, - nextAll: function( elem ) { - return dir( elem, "nextSibling" ); - }, - prevAll: function( elem ) { - return dir( elem, "previousSibling" ); - }, - nextUntil: function( elem, i, until ) { - return dir( elem, "nextSibling", until ); - }, - prevUntil: function( elem, i, until ) { - return dir( elem, "previousSibling", until ); - }, - siblings: function( elem ) { - return siblings( ( elem.parentNode || {} ).firstChild, elem ); - }, - children: function( elem ) { - return siblings( elem.firstChild ); - }, - contents: function( elem ) { - if ( nodeName( elem, "iframe" ) ) { - return elem.contentDocument; - } - - // Support: IE 9 - 11 only, iOS 7 only, Android Browser <=4.3 only - // Treat the template element as a regular one in browsers that - // don't support it. - if ( nodeName( elem, "template" ) ) { - elem = elem.content || elem; - } - - return jQuery.merge( [], elem.childNodes ); - } -}, function( name, fn ) { - jQuery.fn[ name ] = function( until, selector ) { - var matched = jQuery.map( this, fn, until ); - - if ( name.slice( -5 ) !== "Until" ) { - selector = until; - } - - if ( selector && typeof selector === "string" ) { - matched = jQuery.filter( selector, matched ); - } - - if ( this.length > 1 ) { - - // Remove duplicates - if ( !guaranteedUnique[ name ] ) { - jQuery.uniqueSort( matched ); - } - - // Reverse order for parents* and prev-derivatives - if ( rparentsprev.test( name ) ) { - matched.reverse(); - } - } - - return this.pushStack( matched ); - }; -} ); -var rnothtmlwhite = ( /[^\x20\t\r\n\f]+/g ); - - - -// Convert String-formatted options into Object-formatted ones -function createOptions( options ) { - var object = {}; - jQuery.each( options.match( rnothtmlwhite ) || [], function( _, flag ) { - object[ flag ] = true; - } ); - return object; -} - -/* - * Create a callback list using the following parameters: - * - * options: an optional list of space-separated options that will change how - * the callback list behaves or a more traditional option object - * - * By default a callback list will act like an event callback list and can be - * "fired" multiple times. - * - * Possible options: - * - * once: will ensure the callback list can only be fired once (like a Deferred) - * - * memory: will keep track of previous values and will call any callback added - * after the list has been fired right away with the latest "memorized" - * values (like a Deferred) - * - * unique: will ensure a callback can only be added once (no duplicate in the list) - * - * stopOnFalse: interrupt callings when a callback returns false - * - */ -jQuery.Callbacks = function( options ) { - - // Convert options from String-formatted to Object-formatted if needed - // (we check in cache first) - options = typeof options === "string" ? - createOptions( options ) : - jQuery.extend( {}, options ); - - var // Flag to know if list is currently firing - firing, - - // Last fire value for non-forgettable lists - memory, - - // Flag to know if list was already fired - fired, - - // Flag to prevent firing - locked, - - // Actual callback list - list = [], - - // Queue of execution data for repeatable lists - queue = [], - - // Index of currently firing callback (modified by add/remove as needed) - firingIndex = -1, - - // Fire callbacks - fire = function() { - - // Enforce single-firing - locked = locked || options.once; - - // Execute callbacks for all pending executions, - // respecting firingIndex overrides and runtime changes - fired = firing = true; - for ( ; queue.length; firingIndex = -1 ) { - memory = queue.shift(); - while ( ++firingIndex < list.length ) { - - // Run callback and check for early termination - if ( list[ firingIndex ].apply( memory[ 0 ], memory[ 1 ] ) === false && - options.stopOnFalse ) { - - // Jump to end and forget the data so .add doesn't re-fire - firingIndex = list.length; - memory = false; - } - } - } - - // Forget the data if we're done with it - if ( !options.memory ) { - memory = false; - } - - firing = false; - - // Clean up if we're done firing for good - if ( locked ) { - - // Keep an empty list if we have data for future add calls - if ( memory ) { - list = []; - - // Otherwise, this object is spent - } else { - list = ""; - } - } - }, - - // Actual Callbacks object - self = { - - // Add a callback or a collection of callbacks to the list - add: function() { - if ( list ) { - - // If we have memory from a past run, we should fire after adding - if ( memory && !firing ) { - firingIndex = list.length - 1; - queue.push( memory ); - } - - ( function add( args ) { - jQuery.each( args, function( _, arg ) { - if ( jQuery.isFunction( arg ) ) { - if ( !options.unique || !self.has( arg ) ) { - list.push( arg ); - } - } else if ( arg && arg.length && jQuery.type( arg ) !== "string" ) { - - // Inspect recursively - add( arg ); - } - } ); - } )( arguments ); - - if ( memory && !firing ) { - fire(); - } - } - return this; - }, - - // Remove a callback from the list - remove: function() { - jQuery.each( arguments, function( _, arg ) { - var index; - while ( ( index = jQuery.inArray( arg, list, index ) ) > -1 ) { - list.splice( index, 1 ); - - // Handle firing indexes - if ( index <= firingIndex ) { - firingIndex--; - } - } - } ); - return this; - }, - - // Check if a given callback is in the list. - // If no argument is given, return whether or not list has callbacks attached. - has: function( fn ) { - return fn ? - jQuery.inArray( fn, list ) > -1 : - list.length > 0; - }, - - // Remove all callbacks from the list - empty: function() { - if ( list ) { - list = []; - } - return this; - }, - - // Disable .fire and .add - // Abort any current/pending executions - // Clear all callbacks and values - disable: function() { - locked = queue = []; - list = memory = ""; - return this; - }, - disabled: function() { - return !list; - }, - - // Disable .fire - // Also disable .add unless we have memory (since it would have no effect) - // Abort any pending executions - lock: function() { - locked = queue = []; - if ( !memory && !firing ) { - list = memory = ""; - } - return this; - }, - locked: function() { - return !!locked; - }, - - // Call all callbacks with the given context and arguments - fireWith: function( context, args ) { - if ( !locked ) { - args = args || []; - args = [ context, args.slice ? args.slice() : args ]; - queue.push( args ); - if ( !firing ) { - fire(); - } - } - return this; - }, - - // Call all the callbacks with the given arguments - fire: function() { - self.fireWith( this, arguments ); - return this; - }, - - // To know if the callbacks have already been called at least once - fired: function() { - return !!fired; - } - }; - - return self; -}; - - -function Identity( v ) { - return v; -} -function Thrower( ex ) { - throw ex; -} - -function adoptValue( value, resolve, reject, noValue ) { - var method; - - try { - - // Check for promise aspect first to privilege synchronous behavior - if ( value && jQuery.isFunction( ( method = value.promise ) ) ) { - method.call( value ).done( resolve ).fail( reject ); - - // Other thenables - } else if ( value && jQuery.isFunction( ( method = value.then ) ) ) { - method.call( value, resolve, reject ); - - // Other non-thenables - } else { - - // Control `resolve` arguments by letting Array#slice cast boolean `noValue` to integer: - // * false: [ value ].slice( 0 ) => resolve( value ) - // * true: [ value ].slice( 1 ) => resolve() - resolve.apply( undefined, [ value ].slice( noValue ) ); - } - - // For Promises/A+, convert exceptions into rejections - // Since jQuery.when doesn't unwrap thenables, we can skip the extra checks appearing in - // Deferred#then to conditionally suppress rejection. - } catch ( value ) { - - // Support: Android 4.0 only - // Strict mode functions invoked without .call/.apply get global-object context - reject.apply( undefined, [ value ] ); - } -} - -jQuery.extend( { - - Deferred: function( func ) { - var tuples = [ - - // action, add listener, callbacks, - // ... .then handlers, argument index, [final state] - [ "notify", "progress", jQuery.Callbacks( "memory" ), - jQuery.Callbacks( "memory" ), 2 ], - [ "resolve", "done", jQuery.Callbacks( "once memory" ), - jQuery.Callbacks( "once memory" ), 0, "resolved" ], - [ "reject", "fail", jQuery.Callbacks( "once memory" ), - jQuery.Callbacks( "once memory" ), 1, "rejected" ] - ], - state = "pending", - promise = { - state: function() { - return state; - }, - always: function() { - deferred.done( arguments ).fail( arguments ); - return this; - }, - "catch": function( fn ) { - return promise.then( null, fn ); - }, - - // Keep pipe for back-compat - pipe: function( /* fnDone, fnFail, fnProgress */ ) { - var fns = arguments; - - return jQuery.Deferred( function( newDefer ) { - jQuery.each( tuples, function( i, tuple ) { - - // Map tuples (progress, done, fail) to arguments (done, fail, progress) - var fn = jQuery.isFunction( fns[ tuple[ 4 ] ] ) && fns[ tuple[ 4 ] ]; - - // deferred.progress(function() { bind to newDefer or newDefer.notify }) - // deferred.done(function() { bind to newDefer or newDefer.resolve }) - // deferred.fail(function() { bind to newDefer or newDefer.reject }) - deferred[ tuple[ 1 ] ]( function() { - var returned = fn && fn.apply( this, arguments ); - if ( returned && jQuery.isFunction( returned.promise ) ) { - returned.promise() - .progress( newDefer.notify ) - .done( newDefer.resolve ) - .fail( newDefer.reject ); - } else { - newDefer[ tuple[ 0 ] + "With" ]( - this, - fn ? [ returned ] : arguments - ); - } - } ); - } ); - fns = null; - } ).promise(); - }, - then: function( onFulfilled, onRejected, onProgress ) { - var maxDepth = 0; - function resolve( depth, deferred, handler, special ) { - return function() { - var that = this, - args = arguments, - mightThrow = function() { - var returned, then; - - // Support: Promises/A+ section 2.3.3.3.3 - // https://promisesaplus.com/#point-59 - // Ignore double-resolution attempts - if ( depth < maxDepth ) { - return; - } - - returned = handler.apply( that, args ); - - // Support: Promises/A+ section 2.3.1 - // https://promisesaplus.com/#point-48 - if ( returned === deferred.promise() ) { - throw new TypeError( "Thenable self-resolution" ); - } - - // Support: Promises/A+ sections 2.3.3.1, 3.5 - // https://promisesaplus.com/#point-54 - // https://promisesaplus.com/#point-75 - // Retrieve `then` only once - then = returned && - - // Support: Promises/A+ section 2.3.4 - // https://promisesaplus.com/#point-64 - // Only check objects and functions for thenability - ( typeof returned === "object" || - typeof returned === "function" ) && - returned.then; - - // Handle a returned thenable - if ( jQuery.isFunction( then ) ) { - - // Special processors (notify) just wait for resolution - if ( special ) { - then.call( - returned, - resolve( maxDepth, deferred, Identity, special ), - resolve( maxDepth, deferred, Thrower, special ) - ); - - // Normal processors (resolve) also hook into progress - } else { - - // ...and disregard older resolution values - maxDepth++; - - then.call( - returned, - resolve( maxDepth, deferred, Identity, special ), - resolve( maxDepth, deferred, Thrower, special ), - resolve( maxDepth, deferred, Identity, - deferred.notifyWith ) - ); - } - - // Handle all other returned values - } else { - - // Only substitute handlers pass on context - // and multiple values (non-spec behavior) - if ( handler !== Identity ) { - that = undefined; - args = [ returned ]; - } - - // Process the value(s) - // Default process is resolve - ( special || deferred.resolveWith )( that, args ); - } - }, - - // Only normal processors (resolve) catch and reject exceptions - process = special ? - mightThrow : - function() { - try { - mightThrow(); - } catch ( e ) { - - if ( jQuery.Deferred.exceptionHook ) { - jQuery.Deferred.exceptionHook( e, - process.stackTrace ); - } - - // Support: Promises/A+ section 2.3.3.3.4.1 - // https://promisesaplus.com/#point-61 - // Ignore post-resolution exceptions - if ( depth + 1 >= maxDepth ) { - - // Only substitute handlers pass on context - // and multiple values (non-spec behavior) - if ( handler !== Thrower ) { - that = undefined; - args = [ e ]; - } - - deferred.rejectWith( that, args ); - } - } - }; - - // Support: Promises/A+ section 2.3.3.3.1 - // https://promisesaplus.com/#point-57 - // Re-resolve promises immediately to dodge false rejection from - // subsequent errors - if ( depth ) { - process(); - } else { - - // Call an optional hook to record the stack, in case of exception - // since it's otherwise lost when execution goes async - if ( jQuery.Deferred.getStackHook ) { - process.stackTrace = jQuery.Deferred.getStackHook(); - } - window.setTimeout( process ); - } - }; - } - - return jQuery.Deferred( function( newDefer ) { - - // progress_handlers.add( ... ) - tuples[ 0 ][ 3 ].add( - resolve( - 0, - newDefer, - jQuery.isFunction( onProgress ) ? - onProgress : - Identity, - newDefer.notifyWith - ) - ); - - // fulfilled_handlers.add( ... ) - tuples[ 1 ][ 3 ].add( - resolve( - 0, - newDefer, - jQuery.isFunction( onFulfilled ) ? - onFulfilled : - Identity - ) - ); - - // rejected_handlers.add( ... ) - tuples[ 2 ][ 3 ].add( - resolve( - 0, - newDefer, - jQuery.isFunction( onRejected ) ? - onRejected : - Thrower - ) - ); - } ).promise(); - }, - - // Get a promise for this deferred - // If obj is provided, the promise aspect is added to the object - promise: function( obj ) { - return obj != null ? jQuery.extend( obj, promise ) : promise; - } - }, - deferred = {}; - - // Add list-specific methods - jQuery.each( tuples, function( i, tuple ) { - var list = tuple[ 2 ], - stateString = tuple[ 5 ]; - - // promise.progress = list.add - // promise.done = list.add - // promise.fail = list.add - promise[ tuple[ 1 ] ] = list.add; - - // Handle state - if ( stateString ) { - list.add( - function() { - - // state = "resolved" (i.e., fulfilled) - // state = "rejected" - state = stateString; - }, - - // rejected_callbacks.disable - // fulfilled_callbacks.disable - tuples[ 3 - i ][ 2 ].disable, - - // progress_callbacks.lock - tuples[ 0 ][ 2 ].lock - ); - } - - // progress_handlers.fire - // fulfilled_handlers.fire - // rejected_handlers.fire - list.add( tuple[ 3 ].fire ); - - // deferred.notify = function() { deferred.notifyWith(...) } - // deferred.resolve = function() { deferred.resolveWith(...) } - // deferred.reject = function() { deferred.rejectWith(...) } - deferred[ tuple[ 0 ] ] = function() { - deferred[ tuple[ 0 ] + "With" ]( this === deferred ? undefined : this, arguments ); - return this; - }; - - // deferred.notifyWith = list.fireWith - // deferred.resolveWith = list.fireWith - // deferred.rejectWith = list.fireWith - deferred[ tuple[ 0 ] + "With" ] = list.fireWith; - } ); - - // Make the deferred a promise - promise.promise( deferred ); - - // Call given func if any - if ( func ) { - func.call( deferred, deferred ); - } - - // All done! - return deferred; - }, - - // Deferred helper - when: function( singleValue ) { - var - - // count of uncompleted subordinates - remaining = arguments.length, - - // count of unprocessed arguments - i = remaining, - - // subordinate fulfillment data - resolveContexts = Array( i ), - resolveValues = slice.call( arguments ), - - // the master Deferred - master = jQuery.Deferred(), - - // subordinate callback factory - updateFunc = function( i ) { - return function( value ) { - resolveContexts[ i ] = this; - resolveValues[ i ] = arguments.length > 1 ? slice.call( arguments ) : value; - if ( !( --remaining ) ) { - master.resolveWith( resolveContexts, resolveValues ); - } - }; - }; - - // Single- and empty arguments are adopted like Promise.resolve - if ( remaining <= 1 ) { - adoptValue( singleValue, master.done( updateFunc( i ) ).resolve, master.reject, - !remaining ); - - // Use .then() to unwrap secondary thenables (cf. gh-3000) - if ( master.state() === "pending" || - jQuery.isFunction( resolveValues[ i ] && resolveValues[ i ].then ) ) { - - return master.then(); - } - } - - // Multiple arguments are aggregated like Promise.all array elements - while ( i-- ) { - adoptValue( resolveValues[ i ], updateFunc( i ), master.reject ); - } - - return master.promise(); - } -} ); - - -// These usually indicate a programmer mistake during development, -// warn about them ASAP rather than swallowing them by default. -var rerrorNames = /^(Eval|Internal|Range|Reference|Syntax|Type|URI)Error$/; - -jQuery.Deferred.exceptionHook = function( error, stack ) { - - // Support: IE 8 - 9 only - // Console exists when dev tools are open, which can happen at any time - if ( window.console && window.console.warn && error && rerrorNames.test( error.name ) ) { - window.console.warn( "jQuery.Deferred exception: " + error.message, error.stack, stack ); - } -}; - - - - -jQuery.readyException = function( error ) { - window.setTimeout( function() { - throw error; - } ); -}; - - - - -// The deferred used on DOM ready -var readyList = jQuery.Deferred(); - -jQuery.fn.ready = function( fn ) { - - readyList - .then( fn ) - - // Wrap jQuery.readyException in a function so that the lookup - // happens at the time of error handling instead of callback - // registration. - .catch( function( error ) { - jQuery.readyException( error ); - } ); - - return this; -}; - -jQuery.extend( { - - // Is the DOM ready to be used? Set to true once it occurs. - isReady: false, - - // A counter to track how many items to wait for before - // the ready event fires. See #6781 - readyWait: 1, - - // Handle when the DOM is ready - ready: function( wait ) { - - // Abort if there are pending holds or we're already ready - if ( wait === true ? --jQuery.readyWait : jQuery.isReady ) { - return; - } - - // Remember that the DOM is ready - jQuery.isReady = true; - - // If a normal DOM Ready event fired, decrement, and wait if need be - if ( wait !== true && --jQuery.readyWait > 0 ) { - return; - } - - // If there are functions bound, to execute - readyList.resolveWith( document, [ jQuery ] ); - } -} ); - -jQuery.ready.then = readyList.then; - -// The ready event handler and self cleanup method -function completed() { - document.removeEventListener( "DOMContentLoaded", completed ); - window.removeEventListener( "load", completed ); - jQuery.ready(); -} - -// Catch cases where $(document).ready() is called -// after the browser event has already occurred. -// Support: IE <=9 - 10 only -// Older IE sometimes signals "interactive" too soon -if ( document.readyState === "complete" || - ( document.readyState !== "loading" && !document.documentElement.doScroll ) ) { - - // Handle it asynchronously to allow scripts the opportunity to delay ready - window.setTimeout( jQuery.ready ); - -} else { - - // Use the handy event callback - document.addEventListener( "DOMContentLoaded", completed ); - - // A fallback to window.onload, that will always work - window.addEventListener( "load", completed ); -} - - - - -// Multifunctional method to get and set values of a collection -// The value/s can optionally be executed if it's a function -var access = function( elems, fn, key, value, chainable, emptyGet, raw ) { - var i = 0, - len = elems.length, - bulk = key == null; - - // Sets many values - if ( jQuery.type( key ) === "object" ) { - chainable = true; - for ( i in key ) { - access( elems, fn, i, key[ i ], true, emptyGet, raw ); - } - - // Sets one value - } else if ( value !== undefined ) { - chainable = true; - - if ( !jQuery.isFunction( value ) ) { - raw = true; - } - - if ( bulk ) { - - // Bulk operations run against the entire set - if ( raw ) { - fn.call( elems, value ); - fn = null; - - // ...except when executing function values - } else { - bulk = fn; - fn = function( elem, key, value ) { - return bulk.call( jQuery( elem ), value ); - }; - } - } - - if ( fn ) { - for ( ; i < len; i++ ) { - fn( - elems[ i ], key, raw ? - value : - value.call( elems[ i ], i, fn( elems[ i ], key ) ) - ); - } - } - } - - if ( chainable ) { - return elems; - } - - // Gets - if ( bulk ) { - return fn.call( elems ); - } - - return len ? fn( elems[ 0 ], key ) : emptyGet; -}; -var acceptData = function( owner ) { - - // Accepts only: - // - Node - // - Node.ELEMENT_NODE - // - Node.DOCUMENT_NODE - // - Object - // - Any - return owner.nodeType === 1 || owner.nodeType === 9 || !( +owner.nodeType ); -}; - - - - -function Data() { - this.expando = jQuery.expando + Data.uid++; -} - -Data.uid = 1; - -Data.prototype = { - - cache: function( owner ) { - - // Check if the owner object already has a cache - var value = owner[ this.expando ]; - - // If not, create one - if ( !value ) { - value = {}; - - // We can accept data for non-element nodes in modern browsers, - // but we should not, see #8335. - // Always return an empty object. - if ( acceptData( owner ) ) { - - // If it is a node unlikely to be stringify-ed or looped over - // use plain assignment - if ( owner.nodeType ) { - owner[ this.expando ] = value; - - // Otherwise secure it in a non-enumerable property - // configurable must be true to allow the property to be - // deleted when data is removed - } else { - Object.defineProperty( owner, this.expando, { - value: value, - configurable: true - } ); - } - } - } - - return value; - }, - set: function( owner, data, value ) { - var prop, - cache = this.cache( owner ); - - // Handle: [ owner, key, value ] args - // Always use camelCase key (gh-2257) - if ( typeof data === "string" ) { - cache[ jQuery.camelCase( data ) ] = value; - - // Handle: [ owner, { properties } ] args - } else { - - // Copy the properties one-by-one to the cache object - for ( prop in data ) { - cache[ jQuery.camelCase( prop ) ] = data[ prop ]; - } - } - return cache; - }, - get: function( owner, key ) { - return key === undefined ? - this.cache( owner ) : - - // Always use camelCase key (gh-2257) - owner[ this.expando ] && owner[ this.expando ][ jQuery.camelCase( key ) ]; - }, - access: function( owner, key, value ) { - - // In cases where either: - // - // 1. No key was specified - // 2. A string key was specified, but no value provided - // - // Take the "read" path and allow the get method to determine - // which value to return, respectively either: - // - // 1. The entire cache object - // 2. The data stored at the key - // - if ( key === undefined || - ( ( key && typeof key === "string" ) && value === undefined ) ) { - - return this.get( owner, key ); - } - - // When the key is not a string, or both a key and value - // are specified, set or extend (existing objects) with either: - // - // 1. An object of properties - // 2. A key and value - // - this.set( owner, key, value ); - - // Since the "set" path can have two possible entry points - // return the expected data based on which path was taken[*] - return value !== undefined ? value : key; - }, - remove: function( owner, key ) { - var i, - cache = owner[ this.expando ]; - - if ( cache === undefined ) { - return; - } - - if ( key !== undefined ) { - - // Support array or space separated string of keys - if ( Array.isArray( key ) ) { - - // If key is an array of keys... - // We always set camelCase keys, so remove that. - key = key.map( jQuery.camelCase ); - } else { - key = jQuery.camelCase( key ); - - // If a key with the spaces exists, use it. - // Otherwise, create an array by matching non-whitespace - key = key in cache ? - [ key ] : - ( key.match( rnothtmlwhite ) || [] ); - } - - i = key.length; - - while ( i-- ) { - delete cache[ key[ i ] ]; - } - } - - // Remove the expando if there's no more data - if ( key === undefined || jQuery.isEmptyObject( cache ) ) { - - // Support: Chrome <=35 - 45 - // Webkit & Blink performance suffers when deleting properties - // from DOM nodes, so set to undefined instead - // https://bugs.chromium.org/p/chromium/issues/detail?id=378607 (bug restricted) - if ( owner.nodeType ) { - owner[ this.expando ] = undefined; - } else { - delete owner[ this.expando ]; - } - } - }, - hasData: function( owner ) { - var cache = owner[ this.expando ]; - return cache !== undefined && !jQuery.isEmptyObject( cache ); - } -}; -var dataPriv = new Data(); - -var dataUser = new Data(); - - - -// Implementation Summary -// -// 1. Enforce API surface and semantic compatibility with 1.9.x branch -// 2. Improve the module's maintainability by reducing the storage -// paths to a single mechanism. -// 3. Use the same single mechanism to support "private" and "user" data. -// 4. _Never_ expose "private" data to user code (TODO: Drop _data, _removeData) -// 5. Avoid exposing implementation details on user objects (eg. expando properties) -// 6. Provide a clear path for implementation upgrade to WeakMap in 2014 - -var rbrace = /^(?:\{[\w\W]*\}|\[[\w\W]*\])$/, - rmultiDash = /[A-Z]/g; - -function getData( data ) { - if ( data === "true" ) { - return true; - } - - if ( data === "false" ) { - return false; - } - - if ( data === "null" ) { - return null; - } - - // Only convert to a number if it doesn't change the string - if ( data === +data + "" ) { - return +data; - } - - if ( rbrace.test( data ) ) { - return JSON.parse( data ); - } - - return data; -} - -function dataAttr( elem, key, data ) { - var name; - - // If nothing was found internally, try to fetch any - // data from the HTML5 data-* attribute - if ( data === undefined && elem.nodeType === 1 ) { - name = "data-" + key.replace( rmultiDash, "-$&" ).toLowerCase(); - data = elem.getAttribute( name ); - - if ( typeof data === "string" ) { - try { - data = getData( data ); - } catch ( e ) {} - - // Make sure we set the data so it isn't changed later - dataUser.set( elem, key, data ); - } else { - data = undefined; - } - } - return data; -} - -jQuery.extend( { - hasData: function( elem ) { - return dataUser.hasData( elem ) || dataPriv.hasData( elem ); - }, - - data: function( elem, name, data ) { - return dataUser.access( elem, name, data ); - }, - - removeData: function( elem, name ) { - dataUser.remove( elem, name ); - }, - - // TODO: Now that all calls to _data and _removeData have been replaced - // with direct calls to dataPriv methods, these can be deprecated. - _data: function( elem, name, data ) { - return dataPriv.access( elem, name, data ); - }, - - _removeData: function( elem, name ) { - dataPriv.remove( elem, name ); - } -} ); - -jQuery.fn.extend( { - data: function( key, value ) { - var i, name, data, - elem = this[ 0 ], - attrs = elem && elem.attributes; - - // Gets all values - if ( key === undefined ) { - if ( this.length ) { - data = dataUser.get( elem ); - - if ( elem.nodeType === 1 && !dataPriv.get( elem, "hasDataAttrs" ) ) { - i = attrs.length; - while ( i-- ) { - - // Support: IE 11 only - // The attrs elements can be null (#14894) - if ( attrs[ i ] ) { - name = attrs[ i ].name; - if ( name.indexOf( "data-" ) === 0 ) { - name = jQuery.camelCase( name.slice( 5 ) ); - dataAttr( elem, name, data[ name ] ); - } - } - } - dataPriv.set( elem, "hasDataAttrs", true ); - } - } - - return data; - } - - // Sets multiple values - if ( typeof key === "object" ) { - return this.each( function() { - dataUser.set( this, key ); - } ); - } - - return access( this, function( value ) { - var data; - - // The calling jQuery object (element matches) is not empty - // (and therefore has an element appears at this[ 0 ]) and the - // `value` parameter was not undefined. An empty jQuery object - // will result in `undefined` for elem = this[ 0 ] which will - // throw an exception if an attempt to read a data cache is made. - if ( elem && value === undefined ) { - - // Attempt to get data from the cache - // The key will always be camelCased in Data - data = dataUser.get( elem, key ); - if ( data !== undefined ) { - return data; - } - - // Attempt to "discover" the data in - // HTML5 custom data-* attrs - data = dataAttr( elem, key ); - if ( data !== undefined ) { - return data; - } - - // We tried really hard, but the data doesn't exist. - return; - } - - // Set the data... - this.each( function() { - - // We always store the camelCased key - dataUser.set( this, key, value ); - } ); - }, null, value, arguments.length > 1, null, true ); - }, - - removeData: function( key ) { - return this.each( function() { - dataUser.remove( this, key ); - } ); - } -} ); - - -jQuery.extend( { - queue: function( elem, type, data ) { - var queue; - - if ( elem ) { - type = ( type || "fx" ) + "queue"; - queue = dataPriv.get( elem, type ); - - // Speed up dequeue by getting out quickly if this is just a lookup - if ( data ) { - if ( !queue || Array.isArray( data ) ) { - queue = dataPriv.access( elem, type, jQuery.makeArray( data ) ); - } else { - queue.push( data ); - } - } - return queue || []; - } - }, - - dequeue: function( elem, type ) { - type = type || "fx"; - - var queue = jQuery.queue( elem, type ), - startLength = queue.length, - fn = queue.shift(), - hooks = jQuery._queueHooks( elem, type ), - next = function() { - jQuery.dequeue( elem, type ); - }; - - // If the fx queue is dequeued, always remove the progress sentinel - if ( fn === "inprogress" ) { - fn = queue.shift(); - startLength--; - } - - if ( fn ) { - - // Add a progress sentinel to prevent the fx queue from being - // automatically dequeued - if ( type === "fx" ) { - queue.unshift( "inprogress" ); - } - - // Clear up the last queue stop function - delete hooks.stop; - fn.call( elem, next, hooks ); - } - - if ( !startLength && hooks ) { - hooks.empty.fire(); - } - }, - - // Not public - generate a queueHooks object, or return the current one - _queueHooks: function( elem, type ) { - var key = type + "queueHooks"; - return dataPriv.get( elem, key ) || dataPriv.access( elem, key, { - empty: jQuery.Callbacks( "once memory" ).add( function() { - dataPriv.remove( elem, [ type + "queue", key ] ); - } ) - } ); - } -} ); - -jQuery.fn.extend( { - queue: function( type, data ) { - var setter = 2; - - if ( typeof type !== "string" ) { - data = type; - type = "fx"; - setter--; - } - - if ( arguments.length < setter ) { - return jQuery.queue( this[ 0 ], type ); - } - - return data === undefined ? - this : - this.each( function() { - var queue = jQuery.queue( this, type, data ); - - // Ensure a hooks for this queue - jQuery._queueHooks( this, type ); - - if ( type === "fx" && queue[ 0 ] !== "inprogress" ) { - jQuery.dequeue( this, type ); - } - } ); - }, - dequeue: function( type ) { - return this.each( function() { - jQuery.dequeue( this, type ); - } ); - }, - clearQueue: function( type ) { - return this.queue( type || "fx", [] ); - }, - - // Get a promise resolved when queues of a certain type - // are emptied (fx is the type by default) - promise: function( type, obj ) { - var tmp, - count = 1, - defer = jQuery.Deferred(), - elements = this, - i = this.length, - resolve = function() { - if ( !( --count ) ) { - defer.resolveWith( elements, [ elements ] ); - } - }; - - if ( typeof type !== "string" ) { - obj = type; - type = undefined; - } - type = type || "fx"; - - while ( i-- ) { - tmp = dataPriv.get( elements[ i ], type + "queueHooks" ); - if ( tmp && tmp.empty ) { - count++; - tmp.empty.add( resolve ); - } - } - resolve(); - return defer.promise( obj ); - } -} ); -var pnum = ( /[+-]?(?:\d*\.|)\d+(?:[eE][+-]?\d+|)/ ).source; - -var rcssNum = new RegExp( "^(?:([+-])=|)(" + pnum + ")([a-z%]*)$", "i" ); - - -var cssExpand = [ "Top", "Right", "Bottom", "Left" ]; - -var isHiddenWithinTree = function( elem, el ) { - - // isHiddenWithinTree might be called from jQuery#filter function; - // in that case, element will be second argument - elem = el || elem; - - // Inline style trumps all - return elem.style.display === "none" || - elem.style.display === "" && - - // Otherwise, check computed style - // Support: Firefox <=43 - 45 - // Disconnected elements can have computed display: none, so first confirm that elem is - // in the document. - jQuery.contains( elem.ownerDocument, elem ) && - - jQuery.css( elem, "display" ) === "none"; - }; - -var swap = function( elem, options, callback, args ) { - var ret, name, - old = {}; - - // Remember the old values, and insert the new ones - for ( name in options ) { - old[ name ] = elem.style[ name ]; - elem.style[ name ] = options[ name ]; - } - - ret = callback.apply( elem, args || [] ); - - // Revert the old values - for ( name in options ) { - elem.style[ name ] = old[ name ]; - } - - return ret; -}; - - - - -function adjustCSS( elem, prop, valueParts, tween ) { - var adjusted, - scale = 1, - maxIterations = 20, - currentValue = tween ? - function() { - return tween.cur(); - } : - function() { - return jQuery.css( elem, prop, "" ); - }, - initial = currentValue(), - unit = valueParts && valueParts[ 3 ] || ( jQuery.cssNumber[ prop ] ? "" : "px" ), - - // Starting value computation is required for potential unit mismatches - initialInUnit = ( jQuery.cssNumber[ prop ] || unit !== "px" && +initial ) && - rcssNum.exec( jQuery.css( elem, prop ) ); - - if ( initialInUnit && initialInUnit[ 3 ] !== unit ) { - - // Trust units reported by jQuery.css - unit = unit || initialInUnit[ 3 ]; - - // Make sure we update the tween properties later on - valueParts = valueParts || []; - - // Iteratively approximate from a nonzero starting point - initialInUnit = +initial || 1; - - do { - - // If previous iteration zeroed out, double until we get *something*. - // Use string for doubling so we don't accidentally see scale as unchanged below - scale = scale || ".5"; - - // Adjust and apply - initialInUnit = initialInUnit / scale; - jQuery.style( elem, prop, initialInUnit + unit ); - - // Update scale, tolerating zero or NaN from tween.cur() - // Break the loop if scale is unchanged or perfect, or if we've just had enough. - } while ( - scale !== ( scale = currentValue() / initial ) && scale !== 1 && --maxIterations - ); - } - - if ( valueParts ) { - initialInUnit = +initialInUnit || +initial || 0; - - // Apply relative offset (+=/-=) if specified - adjusted = valueParts[ 1 ] ? - initialInUnit + ( valueParts[ 1 ] + 1 ) * valueParts[ 2 ] : - +valueParts[ 2 ]; - if ( tween ) { - tween.unit = unit; - tween.start = initialInUnit; - tween.end = adjusted; - } - } - return adjusted; -} - - -var defaultDisplayMap = {}; - -function getDefaultDisplay( elem ) { - var temp, - doc = elem.ownerDocument, - nodeName = elem.nodeName, - display = defaultDisplayMap[ nodeName ]; - - if ( display ) { - return display; - } - - temp = doc.body.appendChild( doc.createElement( nodeName ) ); - display = jQuery.css( temp, "display" ); - - temp.parentNode.removeChild( temp ); - - if ( display === "none" ) { - display = "block"; - } - defaultDisplayMap[ nodeName ] = display; - - return display; -} - -function showHide( elements, show ) { - var display, elem, - values = [], - index = 0, - length = elements.length; - - // Determine new display value for elements that need to change - for ( ; index < length; index++ ) { - elem = elements[ index ]; - if ( !elem.style ) { - continue; - } - - display = elem.style.display; - if ( show ) { - - // Since we force visibility upon cascade-hidden elements, an immediate (and slow) - // check is required in this first loop unless we have a nonempty display value (either - // inline or about-to-be-restored) - if ( display === "none" ) { - values[ index ] = dataPriv.get( elem, "display" ) || null; - if ( !values[ index ] ) { - elem.style.display = ""; - } - } - if ( elem.style.display === "" && isHiddenWithinTree( elem ) ) { - values[ index ] = getDefaultDisplay( elem ); - } - } else { - if ( display !== "none" ) { - values[ index ] = "none"; - - // Remember what we're overwriting - dataPriv.set( elem, "display", display ); - } - } - } - - // Set the display of the elements in a second loop to avoid constant reflow - for ( index = 0; index < length; index++ ) { - if ( values[ index ] != null ) { - elements[ index ].style.display = values[ index ]; - } - } - - return elements; -} - -jQuery.fn.extend( { - show: function() { - return showHide( this, true ); - }, - hide: function() { - return showHide( this ); - }, - toggle: function( state ) { - if ( typeof state === "boolean" ) { - return state ? this.show() : this.hide(); - } - - return this.each( function() { - if ( isHiddenWithinTree( this ) ) { - jQuery( this ).show(); - } else { - jQuery( this ).hide(); - } - } ); - } -} ); -var rcheckableType = ( /^(?:checkbox|radio)$/i ); - -var rtagName = ( /<([a-z][^\/\0>\x20\t\r\n\f]+)/i ); - -var rscriptType = ( /^$|\/(?:java|ecma)script/i ); - - - -// We have to close these tags to support XHTML (#13200) -var wrapMap = { - - // Support: IE <=9 only - option: [ 1, "" ], - - // XHTML parsers do not magically insert elements in the - // same way that tag soup parsers do. So we cannot shorten - // this by omitting or other required elements. - thead: [ 1, "", "
" ], - col: [ 2, "", "
" ], - tr: [ 2, "", "
" ], - td: [ 3, "", "
" ], - - _default: [ 0, "", "" ] -}; - -// Support: IE <=9 only -wrapMap.optgroup = wrapMap.option; - -wrapMap.tbody = wrapMap.tfoot = wrapMap.colgroup = wrapMap.caption = wrapMap.thead; -wrapMap.th = wrapMap.td; - - -function getAll( context, tag ) { - - // Support: IE <=9 - 11 only - // Use typeof to avoid zero-argument method invocation on host objects (#15151) - var ret; - - if ( typeof context.getElementsByTagName !== "undefined" ) { - ret = context.getElementsByTagName( tag || "*" ); - - } else if ( typeof context.querySelectorAll !== "undefined" ) { - ret = context.querySelectorAll( tag || "*" ); - - } else { - ret = []; - } - - if ( tag === undefined || tag && nodeName( context, tag ) ) { - return jQuery.merge( [ context ], ret ); - } - - return ret; -} - - -// Mark scripts as having already been evaluated -function setGlobalEval( elems, refElements ) { - var i = 0, - l = elems.length; - - for ( ; i < l; i++ ) { - dataPriv.set( - elems[ i ], - "globalEval", - !refElements || dataPriv.get( refElements[ i ], "globalEval" ) - ); - } -} - - -var rhtml = /<|&#?\w+;/; - -function buildFragment( elems, context, scripts, selection, ignored ) { - var elem, tmp, tag, wrap, contains, j, - fragment = context.createDocumentFragment(), - nodes = [], - i = 0, - l = elems.length; - - for ( ; i < l; i++ ) { - elem = elems[ i ]; - - if ( elem || elem === 0 ) { - - // Add nodes directly - if ( jQuery.type( elem ) === "object" ) { - - // Support: Android <=4.0 only, PhantomJS 1 only - // push.apply(_, arraylike) throws on ancient WebKit - jQuery.merge( nodes, elem.nodeType ? [ elem ] : elem ); - - // Convert non-html into a text node - } else if ( !rhtml.test( elem ) ) { - nodes.push( context.createTextNode( elem ) ); - - // Convert html into DOM nodes - } else { - tmp = tmp || fragment.appendChild( context.createElement( "div" ) ); - - // Deserialize a standard representation - tag = ( rtagName.exec( elem ) || [ "", "" ] )[ 1 ].toLowerCase(); - wrap = wrapMap[ tag ] || wrapMap._default; - tmp.innerHTML = wrap[ 1 ] + jQuery.htmlPrefilter( elem ) + wrap[ 2 ]; - - // Descend through wrappers to the right content - j = wrap[ 0 ]; - while ( j-- ) { - tmp = tmp.lastChild; - } - - // Support: Android <=4.0 only, PhantomJS 1 only - // push.apply(_, arraylike) throws on ancient WebKit - jQuery.merge( nodes, tmp.childNodes ); - - // Remember the top-level container - tmp = fragment.firstChild; - - // Ensure the created nodes are orphaned (#12392) - tmp.textContent = ""; - } - } - } - - // Remove wrapper from fragment - fragment.textContent = ""; - - i = 0; - while ( ( elem = nodes[ i++ ] ) ) { - - // Skip elements already in the context collection (trac-4087) - if ( selection && jQuery.inArray( elem, selection ) > -1 ) { - if ( ignored ) { - ignored.push( elem ); - } - continue; - } - - contains = jQuery.contains( elem.ownerDocument, elem ); - - // Append to fragment - tmp = getAll( fragment.appendChild( elem ), "script" ); - - // Preserve script evaluation history - if ( contains ) { - setGlobalEval( tmp ); - } - - // Capture executables - if ( scripts ) { - j = 0; - while ( ( elem = tmp[ j++ ] ) ) { - if ( rscriptType.test( elem.type || "" ) ) { - scripts.push( elem ); - } - } - } - } - - return fragment; -} - - -( function() { - var fragment = document.createDocumentFragment(), - div = fragment.appendChild( document.createElement( "div" ) ), - input = document.createElement( "input" ); - - // Support: Android 4.0 - 4.3 only - // Check state lost if the name is set (#11217) - // Support: Windows Web Apps (WWA) - // `name` and `type` must use .setAttribute for WWA (#14901) - input.setAttribute( "type", "radio" ); - input.setAttribute( "checked", "checked" ); - input.setAttribute( "name", "t" ); - - div.appendChild( input ); - - // Support: Android <=4.1 only - // Older WebKit doesn't clone checked state correctly in fragments - support.checkClone = div.cloneNode( true ).cloneNode( true ).lastChild.checked; - - // Support: IE <=11 only - // Make sure textarea (and checkbox) defaultValue is properly cloned - div.innerHTML = ""; - support.noCloneChecked = !!div.cloneNode( true ).lastChild.defaultValue; -} )(); -var documentElement = document.documentElement; - - - -var - rkeyEvent = /^key/, - rmouseEvent = /^(?:mouse|pointer|contextmenu|drag|drop)|click/, - rtypenamespace = /^([^.]*)(?:\.(.+)|)/; - -function returnTrue() { - return true; -} - -function returnFalse() { - return false; -} - -// Support: IE <=9 only -// See #13393 for more info -function safeActiveElement() { - try { - return document.activeElement; - } catch ( err ) { } -} - -function on( elem, types, selector, data, fn, one ) { - var origFn, type; - - // Types can be a map of types/handlers - if ( typeof types === "object" ) { - - // ( types-Object, selector, data ) - if ( typeof selector !== "string" ) { - - // ( types-Object, data ) - data = data || selector; - selector = undefined; - } - for ( type in types ) { - on( elem, type, selector, data, types[ type ], one ); - } - return elem; - } - - if ( data == null && fn == null ) { - - // ( types, fn ) - fn = selector; - data = selector = undefined; - } else if ( fn == null ) { - if ( typeof selector === "string" ) { - - // ( types, selector, fn ) - fn = data; - data = undefined; - } else { - - // ( types, data, fn ) - fn = data; - data = selector; - selector = undefined; - } - } - if ( fn === false ) { - fn = returnFalse; - } else if ( !fn ) { - return elem; - } - - if ( one === 1 ) { - origFn = fn; - fn = function( event ) { - - // Can use an empty set, since event contains the info - jQuery().off( event ); - return origFn.apply( this, arguments ); - }; - - // Use same guid so caller can remove using origFn - fn.guid = origFn.guid || ( origFn.guid = jQuery.guid++ ); - } - return elem.each( function() { - jQuery.event.add( this, types, fn, data, selector ); - } ); -} - -/* - * Helper functions for managing events -- not part of the public interface. - * Props to Dean Edwards' addEvent library for many of the ideas. - */ -jQuery.event = { - - global: {}, - - add: function( elem, types, handler, data, selector ) { - - var handleObjIn, eventHandle, tmp, - events, t, handleObj, - special, handlers, type, namespaces, origType, - elemData = dataPriv.get( elem ); - - // Don't attach events to noData or text/comment nodes (but allow plain objects) - if ( !elemData ) { - return; - } - - // Caller can pass in an object of custom data in lieu of the handler - if ( handler.handler ) { - handleObjIn = handler; - handler = handleObjIn.handler; - selector = handleObjIn.selector; - } - - // Ensure that invalid selectors throw exceptions at attach time - // Evaluate against documentElement in case elem is a non-element node (e.g., document) - if ( selector ) { - jQuery.find.matchesSelector( documentElement, selector ); - } - - // Make sure that the handler has a unique ID, used to find/remove it later - if ( !handler.guid ) { - handler.guid = jQuery.guid++; - } - - // Init the element's event structure and main handler, if this is the first - if ( !( events = elemData.events ) ) { - events = elemData.events = {}; - } - if ( !( eventHandle = elemData.handle ) ) { - eventHandle = elemData.handle = function( e ) { - - // Discard the second event of a jQuery.event.trigger() and - // when an event is called after a page has unloaded - return typeof jQuery !== "undefined" && jQuery.event.triggered !== e.type ? - jQuery.event.dispatch.apply( elem, arguments ) : undefined; - }; - } - - // Handle multiple events separated by a space - types = ( types || "" ).match( rnothtmlwhite ) || [ "" ]; - t = types.length; - while ( t-- ) { - tmp = rtypenamespace.exec( types[ t ] ) || []; - type = origType = tmp[ 1 ]; - namespaces = ( tmp[ 2 ] || "" ).split( "." ).sort(); - - // There *must* be a type, no attaching namespace-only handlers - if ( !type ) { - continue; - } - - // If event changes its type, use the special event handlers for the changed type - special = jQuery.event.special[ type ] || {}; - - // If selector defined, determine special event api type, otherwise given type - type = ( selector ? special.delegateType : special.bindType ) || type; - - // Update special based on newly reset type - special = jQuery.event.special[ type ] || {}; - - // handleObj is passed to all event handlers - handleObj = jQuery.extend( { - type: type, - origType: origType, - data: data, - handler: handler, - guid: handler.guid, - selector: selector, - needsContext: selector && jQuery.expr.match.needsContext.test( selector ), - namespace: namespaces.join( "." ) - }, handleObjIn ); - - // Init the event handler queue if we're the first - if ( !( handlers = events[ type ] ) ) { - handlers = events[ type ] = []; - handlers.delegateCount = 0; - - // Only use addEventListener if the special events handler returns false - if ( !special.setup || - special.setup.call( elem, data, namespaces, eventHandle ) === false ) { - - if ( elem.addEventListener ) { - elem.addEventListener( type, eventHandle ); - } - } - } - - if ( special.add ) { - special.add.call( elem, handleObj ); - - if ( !handleObj.handler.guid ) { - handleObj.handler.guid = handler.guid; - } - } - - // Add to the element's handler list, delegates in front - if ( selector ) { - handlers.splice( handlers.delegateCount++, 0, handleObj ); - } else { - handlers.push( handleObj ); - } - - // Keep track of which events have ever been used, for event optimization - jQuery.event.global[ type ] = true; - } - - }, - - // Detach an event or set of events from an element - remove: function( elem, types, handler, selector, mappedTypes ) { - - var j, origCount, tmp, - events, t, handleObj, - special, handlers, type, namespaces, origType, - elemData = dataPriv.hasData( elem ) && dataPriv.get( elem ); - - if ( !elemData || !( events = elemData.events ) ) { - return; - } - - // Once for each type.namespace in types; type may be omitted - types = ( types || "" ).match( rnothtmlwhite ) || [ "" ]; - t = types.length; - while ( t-- ) { - tmp = rtypenamespace.exec( types[ t ] ) || []; - type = origType = tmp[ 1 ]; - namespaces = ( tmp[ 2 ] || "" ).split( "." ).sort(); - - // Unbind all events (on this namespace, if provided) for the element - if ( !type ) { - for ( type in events ) { - jQuery.event.remove( elem, type + types[ t ], handler, selector, true ); - } - continue; - } - - special = jQuery.event.special[ type ] || {}; - type = ( selector ? special.delegateType : special.bindType ) || type; - handlers = events[ type ] || []; - tmp = tmp[ 2 ] && - new RegExp( "(^|\\.)" + namespaces.join( "\\.(?:.*\\.|)" ) + "(\\.|$)" ); - - // Remove matching events - origCount = j = handlers.length; - while ( j-- ) { - handleObj = handlers[ j ]; - - if ( ( mappedTypes || origType === handleObj.origType ) && - ( !handler || handler.guid === handleObj.guid ) && - ( !tmp || tmp.test( handleObj.namespace ) ) && - ( !selector || selector === handleObj.selector || - selector === "**" && handleObj.selector ) ) { - handlers.splice( j, 1 ); - - if ( handleObj.selector ) { - handlers.delegateCount--; - } - if ( special.remove ) { - special.remove.call( elem, handleObj ); - } - } - } - - // Remove generic event handler if we removed something and no more handlers exist - // (avoids potential for endless recursion during removal of special event handlers) - if ( origCount && !handlers.length ) { - if ( !special.teardown || - special.teardown.call( elem, namespaces, elemData.handle ) === false ) { - - jQuery.removeEvent( elem, type, elemData.handle ); - } - - delete events[ type ]; - } - } - - // Remove data and the expando if it's no longer used - if ( jQuery.isEmptyObject( events ) ) { - dataPriv.remove( elem, "handle events" ); - } - }, - - dispatch: function( nativeEvent ) { - - // Make a writable jQuery.Event from the native event object - var event = jQuery.event.fix( nativeEvent ); - - var i, j, ret, matched, handleObj, handlerQueue, - args = new Array( arguments.length ), - handlers = ( dataPriv.get( this, "events" ) || {} )[ event.type ] || [], - special = jQuery.event.special[ event.type ] || {}; - - // Use the fix-ed jQuery.Event rather than the (read-only) native event - args[ 0 ] = event; - - for ( i = 1; i < arguments.length; i++ ) { - args[ i ] = arguments[ i ]; - } - - event.delegateTarget = this; - - // Call the preDispatch hook for the mapped type, and let it bail if desired - if ( special.preDispatch && special.preDispatch.call( this, event ) === false ) { - return; - } - - // Determine handlers - handlerQueue = jQuery.event.handlers.call( this, event, handlers ); - - // Run delegates first; they may want to stop propagation beneath us - i = 0; - while ( ( matched = handlerQueue[ i++ ] ) && !event.isPropagationStopped() ) { - event.currentTarget = matched.elem; - - j = 0; - while ( ( handleObj = matched.handlers[ j++ ] ) && - !event.isImmediatePropagationStopped() ) { - - // Triggered event must either 1) have no namespace, or 2) have namespace(s) - // a subset or equal to those in the bound event (both can have no namespace). - if ( !event.rnamespace || event.rnamespace.test( handleObj.namespace ) ) { - - event.handleObj = handleObj; - event.data = handleObj.data; - - ret = ( ( jQuery.event.special[ handleObj.origType ] || {} ).handle || - handleObj.handler ).apply( matched.elem, args ); - - if ( ret !== undefined ) { - if ( ( event.result = ret ) === false ) { - event.preventDefault(); - event.stopPropagation(); - } - } - } - } - } - - // Call the postDispatch hook for the mapped type - if ( special.postDispatch ) { - special.postDispatch.call( this, event ); - } - - return event.result; - }, - - handlers: function( event, handlers ) { - var i, handleObj, sel, matchedHandlers, matchedSelectors, - handlerQueue = [], - delegateCount = handlers.delegateCount, - cur = event.target; - - // Find delegate handlers - if ( delegateCount && - - // Support: IE <=9 - // Black-hole SVG instance trees (trac-13180) - cur.nodeType && - - // Support: Firefox <=42 - // Suppress spec-violating clicks indicating a non-primary pointer button (trac-3861) - // https://www.w3.org/TR/DOM-Level-3-Events/#event-type-click - // Support: IE 11 only - // ...but not arrow key "clicks" of radio inputs, which can have `button` -1 (gh-2343) - !( event.type === "click" && event.button >= 1 ) ) { - - for ( ; cur !== this; cur = cur.parentNode || this ) { - - // Don't check non-elements (#13208) - // Don't process clicks on disabled elements (#6911, #8165, #11382, #11764) - if ( cur.nodeType === 1 && !( event.type === "click" && cur.disabled === true ) ) { - matchedHandlers = []; - matchedSelectors = {}; - for ( i = 0; i < delegateCount; i++ ) { - handleObj = handlers[ i ]; - - // Don't conflict with Object.prototype properties (#13203) - sel = handleObj.selector + " "; - - if ( matchedSelectors[ sel ] === undefined ) { - matchedSelectors[ sel ] = handleObj.needsContext ? - jQuery( sel, this ).index( cur ) > -1 : - jQuery.find( sel, this, null, [ cur ] ).length; - } - if ( matchedSelectors[ sel ] ) { - matchedHandlers.push( handleObj ); - } - } - if ( matchedHandlers.length ) { - handlerQueue.push( { elem: cur, handlers: matchedHandlers } ); - } - } - } - } - - // Add the remaining (directly-bound) handlers - cur = this; - if ( delegateCount < handlers.length ) { - handlerQueue.push( { elem: cur, handlers: handlers.slice( delegateCount ) } ); - } - - return handlerQueue; - }, - - addProp: function( name, hook ) { - Object.defineProperty( jQuery.Event.prototype, name, { - enumerable: true, - configurable: true, - - get: jQuery.isFunction( hook ) ? - function() { - if ( this.originalEvent ) { - return hook( this.originalEvent ); - } - } : - function() { - if ( this.originalEvent ) { - return this.originalEvent[ name ]; - } - }, - - set: function( value ) { - Object.defineProperty( this, name, { - enumerable: true, - configurable: true, - writable: true, - value: value - } ); - } - } ); - }, - - fix: function( originalEvent ) { - return originalEvent[ jQuery.expando ] ? - originalEvent : - new jQuery.Event( originalEvent ); - }, - - special: { - load: { - - // Prevent triggered image.load events from bubbling to window.load - noBubble: true - }, - focus: { - - // Fire native event if possible so blur/focus sequence is correct - trigger: function() { - if ( this !== safeActiveElement() && this.focus ) { - this.focus(); - return false; - } - }, - delegateType: "focusin" - }, - blur: { - trigger: function() { - if ( this === safeActiveElement() && this.blur ) { - this.blur(); - return false; - } - }, - delegateType: "focusout" - }, - click: { - - // For checkbox, fire native event so checked state will be right - trigger: function() { - if ( this.type === "checkbox" && this.click && nodeName( this, "input" ) ) { - this.click(); - return false; - } - }, - - // For cross-browser consistency, don't fire native .click() on links - _default: function( event ) { - return nodeName( event.target, "a" ); - } - }, - - beforeunload: { - postDispatch: function( event ) { - - // Support: Firefox 20+ - // Firefox doesn't alert if the returnValue field is not set. - if ( event.result !== undefined && event.originalEvent ) { - event.originalEvent.returnValue = event.result; - } - } - } - } -}; - -jQuery.removeEvent = function( elem, type, handle ) { - - // This "if" is needed for plain objects - if ( elem.removeEventListener ) { - elem.removeEventListener( type, handle ); - } -}; - -jQuery.Event = function( src, props ) { - - // Allow instantiation without the 'new' keyword - if ( !( this instanceof jQuery.Event ) ) { - return new jQuery.Event( src, props ); - } - - // Event object - if ( src && src.type ) { - this.originalEvent = src; - this.type = src.type; - - // Events bubbling up the document may have been marked as prevented - // by a handler lower down the tree; reflect the correct value. - this.isDefaultPrevented = src.defaultPrevented || - src.defaultPrevented === undefined && - - // Support: Android <=2.3 only - src.returnValue === false ? - returnTrue : - returnFalse; - - // Create target properties - // Support: Safari <=6 - 7 only - // Target should not be a text node (#504, #13143) - this.target = ( src.target && src.target.nodeType === 3 ) ? - src.target.parentNode : - src.target; - - this.currentTarget = src.currentTarget; - this.relatedTarget = src.relatedTarget; - - // Event type - } else { - this.type = src; - } - - // Put explicitly provided properties onto the event object - if ( props ) { - jQuery.extend( this, props ); - } - - // Create a timestamp if incoming event doesn't have one - this.timeStamp = src && src.timeStamp || jQuery.now(); - - // Mark it as fixed - this[ jQuery.expando ] = true; -}; - -// jQuery.Event is based on DOM3 Events as specified by the ECMAScript Language Binding -// https://www.w3.org/TR/2003/WD-DOM-Level-3-Events-20030331/ecma-script-binding.html -jQuery.Event.prototype = { - constructor: jQuery.Event, - isDefaultPrevented: returnFalse, - isPropagationStopped: returnFalse, - isImmediatePropagationStopped: returnFalse, - isSimulated: false, - - preventDefault: function() { - var e = this.originalEvent; - - this.isDefaultPrevented = returnTrue; - - if ( e && !this.isSimulated ) { - e.preventDefault(); - } - }, - stopPropagation: function() { - var e = this.originalEvent; - - this.isPropagationStopped = returnTrue; - - if ( e && !this.isSimulated ) { - e.stopPropagation(); - } - }, - stopImmediatePropagation: function() { - var e = this.originalEvent; - - this.isImmediatePropagationStopped = returnTrue; - - if ( e && !this.isSimulated ) { - e.stopImmediatePropagation(); - } - - this.stopPropagation(); - } -}; - -// Includes all common event props including KeyEvent and MouseEvent specific props -jQuery.each( { - altKey: true, - bubbles: true, - cancelable: true, - changedTouches: true, - ctrlKey: true, - detail: true, - eventPhase: true, - metaKey: true, - pageX: true, - pageY: true, - shiftKey: true, - view: true, - "char": true, - charCode: true, - key: true, - keyCode: true, - button: true, - buttons: true, - clientX: true, - clientY: true, - offsetX: true, - offsetY: true, - pointerId: true, - pointerType: true, - screenX: true, - screenY: true, - targetTouches: true, - toElement: true, - touches: true, - - which: function( event ) { - var button = event.button; - - // Add which for key events - if ( event.which == null && rkeyEvent.test( event.type ) ) { - return event.charCode != null ? event.charCode : event.keyCode; - } - - // Add which for click: 1 === left; 2 === middle; 3 === right - if ( !event.which && button !== undefined && rmouseEvent.test( event.type ) ) { - if ( button & 1 ) { - return 1; - } - - if ( button & 2 ) { - return 3; - } - - if ( button & 4 ) { - return 2; - } - - return 0; - } - - return event.which; - } -}, jQuery.event.addProp ); - -// Create mouseenter/leave events using mouseover/out and event-time checks -// so that event delegation works in jQuery. -// Do the same for pointerenter/pointerleave and pointerover/pointerout -// -// Support: Safari 7 only -// Safari sends mouseenter too often; see: -// https://bugs.chromium.org/p/chromium/issues/detail?id=470258 -// for the description of the bug (it existed in older Chrome versions as well). -jQuery.each( { - mouseenter: "mouseover", - mouseleave: "mouseout", - pointerenter: "pointerover", - pointerleave: "pointerout" -}, function( orig, fix ) { - jQuery.event.special[ orig ] = { - delegateType: fix, - bindType: fix, - - handle: function( event ) { - var ret, - target = this, - related = event.relatedTarget, - handleObj = event.handleObj; - - // For mouseenter/leave call the handler if related is outside the target. - // NB: No relatedTarget if the mouse left/entered the browser window - if ( !related || ( related !== target && !jQuery.contains( target, related ) ) ) { - event.type = handleObj.origType; - ret = handleObj.handler.apply( this, arguments ); - event.type = fix; - } - return ret; - } - }; -} ); - -jQuery.fn.extend( { - - on: function( types, selector, data, fn ) { - return on( this, types, selector, data, fn ); - }, - one: function( types, selector, data, fn ) { - return on( this, types, selector, data, fn, 1 ); - }, - off: function( types, selector, fn ) { - var handleObj, type; - if ( types && types.preventDefault && types.handleObj ) { - - // ( event ) dispatched jQuery.Event - handleObj = types.handleObj; - jQuery( types.delegateTarget ).off( - handleObj.namespace ? - handleObj.origType + "." + handleObj.namespace : - handleObj.origType, - handleObj.selector, - handleObj.handler - ); - return this; - } - if ( typeof types === "object" ) { - - // ( types-object [, selector] ) - for ( type in types ) { - this.off( type, selector, types[ type ] ); - } - return this; - } - if ( selector === false || typeof selector === "function" ) { - - // ( types [, fn] ) - fn = selector; - selector = undefined; - } - if ( fn === false ) { - fn = returnFalse; - } - return this.each( function() { - jQuery.event.remove( this, types, fn, selector ); - } ); - } -} ); - - -var - - /* eslint-disable max-len */ - - // See https://github.com/eslint/eslint/issues/3229 - rxhtmlTag = /<(?!area|br|col|embed|hr|img|input|link|meta|param)(([a-z][^\/\0>\x20\t\r\n\f]*)[^>]*)\/>/gi, - - /* eslint-enable */ - - // Support: IE <=10 - 11, Edge 12 - 13 - // In IE/Edge using regex groups here causes severe slowdowns. - // See https://connect.microsoft.com/IE/feedback/details/1736512/ - rnoInnerhtml = /\s*$/g; - -// Prefer a tbody over its parent table for containing new rows -function manipulationTarget( elem, content ) { - if ( nodeName( elem, "table" ) && - nodeName( content.nodeType !== 11 ? content : content.firstChild, "tr" ) ) { - - return jQuery( ">tbody", elem )[ 0 ] || elem; - } - - return elem; -} - -// Replace/restore the type attribute of script elements for safe DOM manipulation -function disableScript( elem ) { - elem.type = ( elem.getAttribute( "type" ) !== null ) + "/" + elem.type; - return elem; -} -function restoreScript( elem ) { - var match = rscriptTypeMasked.exec( elem.type ); - - if ( match ) { - elem.type = match[ 1 ]; - } else { - elem.removeAttribute( "type" ); - } - - return elem; -} - -function cloneCopyEvent( src, dest ) { - var i, l, type, pdataOld, pdataCur, udataOld, udataCur, events; - - if ( dest.nodeType !== 1 ) { - return; - } - - // 1. Copy private data: events, handlers, etc. - if ( dataPriv.hasData( src ) ) { - pdataOld = dataPriv.access( src ); - pdataCur = dataPriv.set( dest, pdataOld ); - events = pdataOld.events; - - if ( events ) { - delete pdataCur.handle; - pdataCur.events = {}; - - for ( type in events ) { - for ( i = 0, l = events[ type ].length; i < l; i++ ) { - jQuery.event.add( dest, type, events[ type ][ i ] ); - } - } - } - } - - // 2. Copy user data - if ( dataUser.hasData( src ) ) { - udataOld = dataUser.access( src ); - udataCur = jQuery.extend( {}, udataOld ); - - dataUser.set( dest, udataCur ); - } -} - -// Fix IE bugs, see support tests -function fixInput( src, dest ) { - var nodeName = dest.nodeName.toLowerCase(); - - // Fails to persist the checked state of a cloned checkbox or radio button. - if ( nodeName === "input" && rcheckableType.test( src.type ) ) { - dest.checked = src.checked; - - // Fails to return the selected option to the default selected state when cloning options - } else if ( nodeName === "input" || nodeName === "textarea" ) { - dest.defaultValue = src.defaultValue; - } -} - -function domManip( collection, args, callback, ignored ) { - - // Flatten any nested arrays - args = concat.apply( [], args ); - - var fragment, first, scripts, hasScripts, node, doc, - i = 0, - l = collection.length, - iNoClone = l - 1, - value = args[ 0 ], - isFunction = jQuery.isFunction( value ); - - // We can't cloneNode fragments that contain checked, in WebKit - if ( isFunction || - ( l > 1 && typeof value === "string" && - !support.checkClone && rchecked.test( value ) ) ) { - return collection.each( function( index ) { - var self = collection.eq( index ); - if ( isFunction ) { - args[ 0 ] = value.call( this, index, self.html() ); - } - domManip( self, args, callback, ignored ); - } ); - } - - if ( l ) { - fragment = buildFragment( args, collection[ 0 ].ownerDocument, false, collection, ignored ); - first = fragment.firstChild; - - if ( fragment.childNodes.length === 1 ) { - fragment = first; - } - - // Require either new content or an interest in ignored elements to invoke the callback - if ( first || ignored ) { - scripts = jQuery.map( getAll( fragment, "script" ), disableScript ); - hasScripts = scripts.length; - - // Use the original fragment for the last item - // instead of the first because it can end up - // being emptied incorrectly in certain situations (#8070). - for ( ; i < l; i++ ) { - node = fragment; - - if ( i !== iNoClone ) { - node = jQuery.clone( node, true, true ); - - // Keep references to cloned scripts for later restoration - if ( hasScripts ) { - - // Support: Android <=4.0 only, PhantomJS 1 only - // push.apply(_, arraylike) throws on ancient WebKit - jQuery.merge( scripts, getAll( node, "script" ) ); - } - } - - callback.call( collection[ i ], node, i ); - } - - if ( hasScripts ) { - doc = scripts[ scripts.length - 1 ].ownerDocument; - - // Reenable scripts - jQuery.map( scripts, restoreScript ); - - // Evaluate executable scripts on first document insertion - for ( i = 0; i < hasScripts; i++ ) { - node = scripts[ i ]; - if ( rscriptType.test( node.type || "" ) && - !dataPriv.access( node, "globalEval" ) && - jQuery.contains( doc, node ) ) { - - if ( node.src ) { - - // Optional AJAX dependency, but won't run scripts if not present - if ( jQuery._evalUrl ) { - jQuery._evalUrl( node.src ); - } - } else { - DOMEval( node.textContent.replace( rcleanScript, "" ), doc ); - } - } - } - } - } - } - - return collection; -} - -function remove( elem, selector, keepData ) { - var node, - nodes = selector ? jQuery.filter( selector, elem ) : elem, - i = 0; - - for ( ; ( node = nodes[ i ] ) != null; i++ ) { - if ( !keepData && node.nodeType === 1 ) { - jQuery.cleanData( getAll( node ) ); - } - - if ( node.parentNode ) { - if ( keepData && jQuery.contains( node.ownerDocument, node ) ) { - setGlobalEval( getAll( node, "script" ) ); - } - node.parentNode.removeChild( node ); - } - } - - return elem; -} - -jQuery.extend( { - htmlPrefilter: function( html ) { - return html.replace( rxhtmlTag, "<$1>" ); - }, - - clone: function( elem, dataAndEvents, deepDataAndEvents ) { - var i, l, srcElements, destElements, - clone = elem.cloneNode( true ), - inPage = jQuery.contains( elem.ownerDocument, elem ); - - // Fix IE cloning issues - if ( !support.noCloneChecked && ( elem.nodeType === 1 || elem.nodeType === 11 ) && - !jQuery.isXMLDoc( elem ) ) { - - // We eschew Sizzle here for performance reasons: https://jsperf.com/getall-vs-sizzle/2 - destElements = getAll( clone ); - srcElements = getAll( elem ); - - for ( i = 0, l = srcElements.length; i < l; i++ ) { - fixInput( srcElements[ i ], destElements[ i ] ); - } - } - - // Copy the events from the original to the clone - if ( dataAndEvents ) { - if ( deepDataAndEvents ) { - srcElements = srcElements || getAll( elem ); - destElements = destElements || getAll( clone ); - - for ( i = 0, l = srcElements.length; i < l; i++ ) { - cloneCopyEvent( srcElements[ i ], destElements[ i ] ); - } - } else { - cloneCopyEvent( elem, clone ); - } - } - - // Preserve script evaluation history - destElements = getAll( clone, "script" ); - if ( destElements.length > 0 ) { - setGlobalEval( destElements, !inPage && getAll( elem, "script" ) ); - } - - // Return the cloned set - return clone; - }, - - cleanData: function( elems ) { - var data, elem, type, - special = jQuery.event.special, - i = 0; - - for ( ; ( elem = elems[ i ] ) !== undefined; i++ ) { - if ( acceptData( elem ) ) { - if ( ( data = elem[ dataPriv.expando ] ) ) { - if ( data.events ) { - for ( type in data.events ) { - if ( special[ type ] ) { - jQuery.event.remove( elem, type ); - - // This is a shortcut to avoid jQuery.event.remove's overhead - } else { - jQuery.removeEvent( elem, type, data.handle ); - } - } - } - - // Support: Chrome <=35 - 45+ - // Assign undefined instead of using delete, see Data#remove - elem[ dataPriv.expando ] = undefined; - } - if ( elem[ dataUser.expando ] ) { - - // Support: Chrome <=35 - 45+ - // Assign undefined instead of using delete, see Data#remove - elem[ dataUser.expando ] = undefined; - } - } - } - } -} ); - -jQuery.fn.extend( { - detach: function( selector ) { - return remove( this, selector, true ); - }, - - remove: function( selector ) { - return remove( this, selector ); - }, - - text: function( value ) { - return access( this, function( value ) { - return value === undefined ? - jQuery.text( this ) : - this.empty().each( function() { - if ( this.nodeType === 1 || this.nodeType === 11 || this.nodeType === 9 ) { - this.textContent = value; - } - } ); - }, null, value, arguments.length ); - }, - - append: function() { - return domManip( this, arguments, function( elem ) { - if ( this.nodeType === 1 || this.nodeType === 11 || this.nodeType === 9 ) { - var target = manipulationTarget( this, elem ); - target.appendChild( elem ); - } - } ); - }, - - prepend: function() { - return domManip( this, arguments, function( elem ) { - if ( this.nodeType === 1 || this.nodeType === 11 || this.nodeType === 9 ) { - var target = manipulationTarget( this, elem ); - target.insertBefore( elem, target.firstChild ); - } - } ); - }, - - before: function() { - return domManip( this, arguments, function( elem ) { - if ( this.parentNode ) { - this.parentNode.insertBefore( elem, this ); - } - } ); - }, - - after: function() { - return domManip( this, arguments, function( elem ) { - if ( this.parentNode ) { - this.parentNode.insertBefore( elem, this.nextSibling ); - } - } ); - }, - - empty: function() { - var elem, - i = 0; - - for ( ; ( elem = this[ i ] ) != null; i++ ) { - if ( elem.nodeType === 1 ) { - - // Prevent memory leaks - jQuery.cleanData( getAll( elem, false ) ); - - // Remove any remaining nodes - elem.textContent = ""; - } - } - - return this; - }, - - clone: function( dataAndEvents, deepDataAndEvents ) { - dataAndEvents = dataAndEvents == null ? false : dataAndEvents; - deepDataAndEvents = deepDataAndEvents == null ? dataAndEvents : deepDataAndEvents; - - return this.map( function() { - return jQuery.clone( this, dataAndEvents, deepDataAndEvents ); - } ); - }, - - html: function( value ) { - return access( this, function( value ) { - var elem = this[ 0 ] || {}, - i = 0, - l = this.length; - - if ( value === undefined && elem.nodeType === 1 ) { - return elem.innerHTML; - } - - // See if we can take a shortcut and just use innerHTML - if ( typeof value === "string" && !rnoInnerhtml.test( value ) && - !wrapMap[ ( rtagName.exec( value ) || [ "", "" ] )[ 1 ].toLowerCase() ] ) { - - value = jQuery.htmlPrefilter( value ); - - try { - for ( ; i < l; i++ ) { - elem = this[ i ] || {}; - - // Remove element nodes and prevent memory leaks - if ( elem.nodeType === 1 ) { - jQuery.cleanData( getAll( elem, false ) ); - elem.innerHTML = value; - } - } - - elem = 0; - - // If using innerHTML throws an exception, use the fallback method - } catch ( e ) {} - } - - if ( elem ) { - this.empty().append( value ); - } - }, null, value, arguments.length ); - }, - - replaceWith: function() { - var ignored = []; - - // Make the changes, replacing each non-ignored context element with the new content - return domManip( this, arguments, function( elem ) { - var parent = this.parentNode; - - if ( jQuery.inArray( this, ignored ) < 0 ) { - jQuery.cleanData( getAll( this ) ); - if ( parent ) { - parent.replaceChild( elem, this ); - } - } - - // Force callback invocation - }, ignored ); - } -} ); - -jQuery.each( { - appendTo: "append", - prependTo: "prepend", - insertBefore: "before", - insertAfter: "after", - replaceAll: "replaceWith" -}, function( name, original ) { - jQuery.fn[ name ] = function( selector ) { - var elems, - ret = [], - insert = jQuery( selector ), - last = insert.length - 1, - i = 0; - - for ( ; i <= last; i++ ) { - elems = i === last ? this : this.clone( true ); - jQuery( insert[ i ] )[ original ]( elems ); - - // Support: Android <=4.0 only, PhantomJS 1 only - // .get() because push.apply(_, arraylike) throws on ancient WebKit - push.apply( ret, elems.get() ); - } - - return this.pushStack( ret ); - }; -} ); -var rmargin = ( /^margin/ ); - -var rnumnonpx = new RegExp( "^(" + pnum + ")(?!px)[a-z%]+$", "i" ); - -var getStyles = function( elem ) { - - // Support: IE <=11 only, Firefox <=30 (#15098, #14150) - // IE throws on elements created in popups - // FF meanwhile throws on frame elements through "defaultView.getComputedStyle" - var view = elem.ownerDocument.defaultView; - - if ( !view || !view.opener ) { - view = window; - } - - return view.getComputedStyle( elem ); - }; - - - -( function() { - - // Executing both pixelPosition & boxSizingReliable tests require only one layout - // so they're executed at the same time to save the second computation. - function computeStyleTests() { - - // This is a singleton, we need to execute it only once - if ( !div ) { - return; - } - - div.style.cssText = - "box-sizing:border-box;" + - "position:relative;display:block;" + - "margin:auto;border:1px;padding:1px;" + - "top:1%;width:50%"; - div.innerHTML = ""; - documentElement.appendChild( container ); - - var divStyle = window.getComputedStyle( div ); - pixelPositionVal = divStyle.top !== "1%"; - - // Support: Android 4.0 - 4.3 only, Firefox <=3 - 44 - reliableMarginLeftVal = divStyle.marginLeft === "2px"; - boxSizingReliableVal = divStyle.width === "4px"; - - // Support: Android 4.0 - 4.3 only - // Some styles come back with percentage values, even though they shouldn't - div.style.marginRight = "50%"; - pixelMarginRightVal = divStyle.marginRight === "4px"; - - documentElement.removeChild( container ); - - // Nullify the div so it wouldn't be stored in the memory and - // it will also be a sign that checks already performed - div = null; - } - - var pixelPositionVal, boxSizingReliableVal, pixelMarginRightVal, reliableMarginLeftVal, - container = document.createElement( "div" ), - div = document.createElement( "div" ); - - // Finish early in limited (non-browser) environments - if ( !div.style ) { - return; - } - - // Support: IE <=9 - 11 only - // Style of cloned element affects source element cloned (#8908) - div.style.backgroundClip = "content-box"; - div.cloneNode( true ).style.backgroundClip = ""; - support.clearCloneStyle = div.style.backgroundClip === "content-box"; - - container.style.cssText = "border:0;width:8px;height:0;top:0;left:-9999px;" + - "padding:0;margin-top:1px;position:absolute"; - container.appendChild( div ); - - jQuery.extend( support, { - pixelPosition: function() { - computeStyleTests(); - return pixelPositionVal; - }, - boxSizingReliable: function() { - computeStyleTests(); - return boxSizingReliableVal; - }, - pixelMarginRight: function() { - computeStyleTests(); - return pixelMarginRightVal; - }, - reliableMarginLeft: function() { - computeStyleTests(); - return reliableMarginLeftVal; - } - } ); -} )(); - - -function curCSS( elem, name, computed ) { - var width, minWidth, maxWidth, ret, - - // Support: Firefox 51+ - // Retrieving style before computed somehow - // fixes an issue with getting wrong values - // on detached elements - style = elem.style; - - computed = computed || getStyles( elem ); - - // getPropertyValue is needed for: - // .css('filter') (IE 9 only, #12537) - // .css('--customProperty) (#3144) - if ( computed ) { - ret = computed.getPropertyValue( name ) || computed[ name ]; - - if ( ret === "" && !jQuery.contains( elem.ownerDocument, elem ) ) { - ret = jQuery.style( elem, name ); - } - - // A tribute to the "awesome hack by Dean Edwards" - // Android Browser returns percentage for some values, - // but width seems to be reliably pixels. - // This is against the CSSOM draft spec: - // https://drafts.csswg.org/cssom/#resolved-values - if ( !support.pixelMarginRight() && rnumnonpx.test( ret ) && rmargin.test( name ) ) { - - // Remember the original values - width = style.width; - minWidth = style.minWidth; - maxWidth = style.maxWidth; - - // Put in the new values to get a computed value out - style.minWidth = style.maxWidth = style.width = ret; - ret = computed.width; - - // Revert the changed values - style.width = width; - style.minWidth = minWidth; - style.maxWidth = maxWidth; - } - } - - return ret !== undefined ? - - // Support: IE <=9 - 11 only - // IE returns zIndex value as an integer. - ret + "" : - ret; -} - - -function addGetHookIf( conditionFn, hookFn ) { - - // Define the hook, we'll check on the first run if it's really needed. - return { - get: function() { - if ( conditionFn() ) { - - // Hook not needed (or it's not possible to use it due - // to missing dependency), remove it. - delete this.get; - return; - } - - // Hook needed; redefine it so that the support test is not executed again. - return ( this.get = hookFn ).apply( this, arguments ); - } - }; -} - - -var - - // Swappable if display is none or starts with table - // except "table", "table-cell", or "table-caption" - // See here for display values: https://developer.mozilla.org/en-US/docs/CSS/display - rdisplayswap = /^(none|table(?!-c[ea]).+)/, - rcustomProp = /^--/, - cssShow = { position: "absolute", visibility: "hidden", display: "block" }, - cssNormalTransform = { - letterSpacing: "0", - fontWeight: "400" - }, - - cssPrefixes = [ "Webkit", "Moz", "ms" ], - emptyStyle = document.createElement( "div" ).style; - -// Return a css property mapped to a potentially vendor prefixed property -function vendorPropName( name ) { - - // Shortcut for names that are not vendor prefixed - if ( name in emptyStyle ) { - return name; - } - - // Check for vendor prefixed names - var capName = name[ 0 ].toUpperCase() + name.slice( 1 ), - i = cssPrefixes.length; - - while ( i-- ) { - name = cssPrefixes[ i ] + capName; - if ( name in emptyStyle ) { - return name; - } - } -} - -// Return a property mapped along what jQuery.cssProps suggests or to -// a vendor prefixed property. -function finalPropName( name ) { - var ret = jQuery.cssProps[ name ]; - if ( !ret ) { - ret = jQuery.cssProps[ name ] = vendorPropName( name ) || name; - } - return ret; -} - -function setPositiveNumber( elem, value, subtract ) { - - // Any relative (+/-) values have already been - // normalized at this point - var matches = rcssNum.exec( value ); - return matches ? - - // Guard against undefined "subtract", e.g., when used as in cssHooks - Math.max( 0, matches[ 2 ] - ( subtract || 0 ) ) + ( matches[ 3 ] || "px" ) : - value; -} - -function augmentWidthOrHeight( elem, name, extra, isBorderBox, styles ) { - var i, - val = 0; - - // If we already have the right measurement, avoid augmentation - if ( extra === ( isBorderBox ? "border" : "content" ) ) { - i = 4; - - // Otherwise initialize for horizontal or vertical properties - } else { - i = name === "width" ? 1 : 0; - } - - for ( ; i < 4; i += 2 ) { - - // Both box models exclude margin, so add it if we want it - if ( extra === "margin" ) { - val += jQuery.css( elem, extra + cssExpand[ i ], true, styles ); - } - - if ( isBorderBox ) { - - // border-box includes padding, so remove it if we want content - if ( extra === "content" ) { - val -= jQuery.css( elem, "padding" + cssExpand[ i ], true, styles ); - } - - // At this point, extra isn't border nor margin, so remove border - if ( extra !== "margin" ) { - val -= jQuery.css( elem, "border" + cssExpand[ i ] + "Width", true, styles ); - } - } else { - - // At this point, extra isn't content, so add padding - val += jQuery.css( elem, "padding" + cssExpand[ i ], true, styles ); - - // At this point, extra isn't content nor padding, so add border - if ( extra !== "padding" ) { - val += jQuery.css( elem, "border" + cssExpand[ i ] + "Width", true, styles ); - } - } - } - - return val; -} - -function getWidthOrHeight( elem, name, extra ) { - - // Start with computed style - var valueIsBorderBox, - styles = getStyles( elem ), - val = curCSS( elem, name, styles ), - isBorderBox = jQuery.css( elem, "boxSizing", false, styles ) === "border-box"; - - // Computed unit is not pixels. Stop here and return. - if ( rnumnonpx.test( val ) ) { - return val; - } - - // Check for style in case a browser which returns unreliable values - // for getComputedStyle silently falls back to the reliable elem.style - valueIsBorderBox = isBorderBox && - ( support.boxSizingReliable() || val === elem.style[ name ] ); - - // Fall back to offsetWidth/Height when value is "auto" - // This happens for inline elements with no explicit setting (gh-3571) - if ( val === "auto" ) { - val = elem[ "offset" + name[ 0 ].toUpperCase() + name.slice( 1 ) ]; - } - - // Normalize "", auto, and prepare for extra - val = parseFloat( val ) || 0; - - // Use the active box-sizing model to add/subtract irrelevant styles - return ( val + - augmentWidthOrHeight( - elem, - name, - extra || ( isBorderBox ? "border" : "content" ), - valueIsBorderBox, - styles - ) - ) + "px"; -} - -jQuery.extend( { - - // Add in style property hooks for overriding the default - // behavior of getting and setting a style property - cssHooks: { - opacity: { - get: function( elem, computed ) { - if ( computed ) { - - // We should always get a number back from opacity - var ret = curCSS( elem, "opacity" ); - return ret === "" ? "1" : ret; - } - } - } - }, - - // Don't automatically add "px" to these possibly-unitless properties - cssNumber: { - "animationIterationCount": true, - "columnCount": true, - "fillOpacity": true, - "flexGrow": true, - "flexShrink": true, - "fontWeight": true, - "lineHeight": true, - "opacity": true, - "order": true, - "orphans": true, - "widows": true, - "zIndex": true, - "zoom": true - }, - - // Add in properties whose names you wish to fix before - // setting or getting the value - cssProps: { - "float": "cssFloat" - }, - - // Get and set the style property on a DOM Node - style: function( elem, name, value, extra ) { - - // Don't set styles on text and comment nodes - if ( !elem || elem.nodeType === 3 || elem.nodeType === 8 || !elem.style ) { - return; - } - - // Make sure that we're working with the right name - var ret, type, hooks, - origName = jQuery.camelCase( name ), - isCustomProp = rcustomProp.test( name ), - style = elem.style; - - // Make sure that we're working with the right name. We don't - // want to query the value if it is a CSS custom property - // since they are user-defined. - if ( !isCustomProp ) { - name = finalPropName( origName ); - } - - // Gets hook for the prefixed version, then unprefixed version - hooks = jQuery.cssHooks[ name ] || jQuery.cssHooks[ origName ]; - - // Check if we're setting a value - if ( value !== undefined ) { - type = typeof value; - - // Convert "+=" or "-=" to relative numbers (#7345) - if ( type === "string" && ( ret = rcssNum.exec( value ) ) && ret[ 1 ] ) { - value = adjustCSS( elem, name, ret ); - - // Fixes bug #9237 - type = "number"; - } - - // Make sure that null and NaN values aren't set (#7116) - if ( value == null || value !== value ) { - return; - } - - // If a number was passed in, add the unit (except for certain CSS properties) - if ( type === "number" ) { - value += ret && ret[ 3 ] || ( jQuery.cssNumber[ origName ] ? "" : "px" ); - } - - // background-* props affect original clone's values - if ( !support.clearCloneStyle && value === "" && name.indexOf( "background" ) === 0 ) { - style[ name ] = "inherit"; - } - - // If a hook was provided, use that value, otherwise just set the specified value - if ( !hooks || !( "set" in hooks ) || - ( value = hooks.set( elem, value, extra ) ) !== undefined ) { - - if ( isCustomProp ) { - style.setProperty( name, value ); - } else { - style[ name ] = value; - } - } - - } else { - - // If a hook was provided get the non-computed value from there - if ( hooks && "get" in hooks && - ( ret = hooks.get( elem, false, extra ) ) !== undefined ) { - - return ret; - } - - // Otherwise just get the value from the style object - return style[ name ]; - } - }, - - css: function( elem, name, extra, styles ) { - var val, num, hooks, - origName = jQuery.camelCase( name ), - isCustomProp = rcustomProp.test( name ); - - // Make sure that we're working with the right name. We don't - // want to modify the value if it is a CSS custom property - // since they are user-defined. - if ( !isCustomProp ) { - name = finalPropName( origName ); - } - - // Try prefixed name followed by the unprefixed name - hooks = jQuery.cssHooks[ name ] || jQuery.cssHooks[ origName ]; - - // If a hook was provided get the computed value from there - if ( hooks && "get" in hooks ) { - val = hooks.get( elem, true, extra ); - } - - // Otherwise, if a way to get the computed value exists, use that - if ( val === undefined ) { - val = curCSS( elem, name, styles ); - } - - // Convert "normal" to computed value - if ( val === "normal" && name in cssNormalTransform ) { - val = cssNormalTransform[ name ]; - } - - // Make numeric if forced or a qualifier was provided and val looks numeric - if ( extra === "" || extra ) { - num = parseFloat( val ); - return extra === true || isFinite( num ) ? num || 0 : val; - } - - return val; - } -} ); - -jQuery.each( [ "height", "width" ], function( i, name ) { - jQuery.cssHooks[ name ] = { - get: function( elem, computed, extra ) { - if ( computed ) { - - // Certain elements can have dimension info if we invisibly show them - // but it must have a current display style that would benefit - return rdisplayswap.test( jQuery.css( elem, "display" ) ) && - - // Support: Safari 8+ - // Table columns in Safari have non-zero offsetWidth & zero - // getBoundingClientRect().width unless display is changed. - // Support: IE <=11 only - // Running getBoundingClientRect on a disconnected node - // in IE throws an error. - ( !elem.getClientRects().length || !elem.getBoundingClientRect().width ) ? - swap( elem, cssShow, function() { - return getWidthOrHeight( elem, name, extra ); - } ) : - getWidthOrHeight( elem, name, extra ); - } - }, - - set: function( elem, value, extra ) { - var matches, - styles = extra && getStyles( elem ), - subtract = extra && augmentWidthOrHeight( - elem, - name, - extra, - jQuery.css( elem, "boxSizing", false, styles ) === "border-box", - styles - ); - - // Convert to pixels if value adjustment is needed - if ( subtract && ( matches = rcssNum.exec( value ) ) && - ( matches[ 3 ] || "px" ) !== "px" ) { - - elem.style[ name ] = value; - value = jQuery.css( elem, name ); - } - - return setPositiveNumber( elem, value, subtract ); - } - }; -} ); - -jQuery.cssHooks.marginLeft = addGetHookIf( support.reliableMarginLeft, - function( elem, computed ) { - if ( computed ) { - return ( parseFloat( curCSS( elem, "marginLeft" ) ) || - elem.getBoundingClientRect().left - - swap( elem, { marginLeft: 0 }, function() { - return elem.getBoundingClientRect().left; - } ) - ) + "px"; - } - } -); - -// These hooks are used by animate to expand properties -jQuery.each( { - margin: "", - padding: "", - border: "Width" -}, function( prefix, suffix ) { - jQuery.cssHooks[ prefix + suffix ] = { - expand: function( value ) { - var i = 0, - expanded = {}, - - // Assumes a single number if not a string - parts = typeof value === "string" ? value.split( " " ) : [ value ]; - - for ( ; i < 4; i++ ) { - expanded[ prefix + cssExpand[ i ] + suffix ] = - parts[ i ] || parts[ i - 2 ] || parts[ 0 ]; - } - - return expanded; - } - }; - - if ( !rmargin.test( prefix ) ) { - jQuery.cssHooks[ prefix + suffix ].set = setPositiveNumber; - } -} ); - -jQuery.fn.extend( { - css: function( name, value ) { - return access( this, function( elem, name, value ) { - var styles, len, - map = {}, - i = 0; - - if ( Array.isArray( name ) ) { - styles = getStyles( elem ); - len = name.length; - - for ( ; i < len; i++ ) { - map[ name[ i ] ] = jQuery.css( elem, name[ i ], false, styles ); - } - - return map; - } - - return value !== undefined ? - jQuery.style( elem, name, value ) : - jQuery.css( elem, name ); - }, name, value, arguments.length > 1 ); - } -} ); - - -function Tween( elem, options, prop, end, easing ) { - return new Tween.prototype.init( elem, options, prop, end, easing ); -} -jQuery.Tween = Tween; - -Tween.prototype = { - constructor: Tween, - init: function( elem, options, prop, end, easing, unit ) { - this.elem = elem; - this.prop = prop; - this.easing = easing || jQuery.easing._default; - this.options = options; - this.start = this.now = this.cur(); - this.end = end; - this.unit = unit || ( jQuery.cssNumber[ prop ] ? "" : "px" ); - }, - cur: function() { - var hooks = Tween.propHooks[ this.prop ]; - - return hooks && hooks.get ? - hooks.get( this ) : - Tween.propHooks._default.get( this ); - }, - run: function( percent ) { - var eased, - hooks = Tween.propHooks[ this.prop ]; - - if ( this.options.duration ) { - this.pos = eased = jQuery.easing[ this.easing ]( - percent, this.options.duration * percent, 0, 1, this.options.duration - ); - } else { - this.pos = eased = percent; - } - this.now = ( this.end - this.start ) * eased + this.start; - - if ( this.options.step ) { - this.options.step.call( this.elem, this.now, this ); - } - - if ( hooks && hooks.set ) { - hooks.set( this ); - } else { - Tween.propHooks._default.set( this ); - } - return this; - } -}; - -Tween.prototype.init.prototype = Tween.prototype; - -Tween.propHooks = { - _default: { - get: function( tween ) { - var result; - - // Use a property on the element directly when it is not a DOM element, - // or when there is no matching style property that exists. - if ( tween.elem.nodeType !== 1 || - tween.elem[ tween.prop ] != null && tween.elem.style[ tween.prop ] == null ) { - return tween.elem[ tween.prop ]; - } - - // Passing an empty string as a 3rd parameter to .css will automatically - // attempt a parseFloat and fallback to a string if the parse fails. - // Simple values such as "10px" are parsed to Float; - // complex values such as "rotate(1rad)" are returned as-is. - result = jQuery.css( tween.elem, tween.prop, "" ); - - // Empty strings, null, undefined and "auto" are converted to 0. - return !result || result === "auto" ? 0 : result; - }, - set: function( tween ) { - - // Use step hook for back compat. - // Use cssHook if its there. - // Use .style if available and use plain properties where available. - if ( jQuery.fx.step[ tween.prop ] ) { - jQuery.fx.step[ tween.prop ]( tween ); - } else if ( tween.elem.nodeType === 1 && - ( tween.elem.style[ jQuery.cssProps[ tween.prop ] ] != null || - jQuery.cssHooks[ tween.prop ] ) ) { - jQuery.style( tween.elem, tween.prop, tween.now + tween.unit ); - } else { - tween.elem[ tween.prop ] = tween.now; - } - } - } -}; - -// Support: IE <=9 only -// Panic based approach to setting things on disconnected nodes -Tween.propHooks.scrollTop = Tween.propHooks.scrollLeft = { - set: function( tween ) { - if ( tween.elem.nodeType && tween.elem.parentNode ) { - tween.elem[ tween.prop ] = tween.now; - } - } -}; - -jQuery.easing = { - linear: function( p ) { - return p; - }, - swing: function( p ) { - return 0.5 - Math.cos( p * Math.PI ) / 2; - }, - _default: "swing" -}; - -jQuery.fx = Tween.prototype.init; - -// Back compat <1.8 extension point -jQuery.fx.step = {}; - - - - -var - fxNow, inProgress, - rfxtypes = /^(?:toggle|show|hide)$/, - rrun = /queueHooks$/; - -function schedule() { - if ( inProgress ) { - if ( document.hidden === false && window.requestAnimationFrame ) { - window.requestAnimationFrame( schedule ); - } else { - window.setTimeout( schedule, jQuery.fx.interval ); - } - - jQuery.fx.tick(); - } -} - -// Animations created synchronously will run synchronously -function createFxNow() { - window.setTimeout( function() { - fxNow = undefined; - } ); - return ( fxNow = jQuery.now() ); -} - -// Generate parameters to create a standard animation -function genFx( type, includeWidth ) { - var which, - i = 0, - attrs = { height: type }; - - // If we include width, step value is 1 to do all cssExpand values, - // otherwise step value is 2 to skip over Left and Right - includeWidth = includeWidth ? 1 : 0; - for ( ; i < 4; i += 2 - includeWidth ) { - which = cssExpand[ i ]; - attrs[ "margin" + which ] = attrs[ "padding" + which ] = type; - } - - if ( includeWidth ) { - attrs.opacity = attrs.width = type; - } - - return attrs; -} - -function createTween( value, prop, animation ) { - var tween, - collection = ( Animation.tweeners[ prop ] || [] ).concat( Animation.tweeners[ "*" ] ), - index = 0, - length = collection.length; - for ( ; index < length; index++ ) { - if ( ( tween = collection[ index ].call( animation, prop, value ) ) ) { - - // We're done with this property - return tween; - } - } -} - -function defaultPrefilter( elem, props, opts ) { - var prop, value, toggle, hooks, oldfire, propTween, restoreDisplay, display, - isBox = "width" in props || "height" in props, - anim = this, - orig = {}, - style = elem.style, - hidden = elem.nodeType && isHiddenWithinTree( elem ), - dataShow = dataPriv.get( elem, "fxshow" ); - - // Queue-skipping animations hijack the fx hooks - if ( !opts.queue ) { - hooks = jQuery._queueHooks( elem, "fx" ); - if ( hooks.unqueued == null ) { - hooks.unqueued = 0; - oldfire = hooks.empty.fire; - hooks.empty.fire = function() { - if ( !hooks.unqueued ) { - oldfire(); - } - }; - } - hooks.unqueued++; - - anim.always( function() { - - // Ensure the complete handler is called before this completes - anim.always( function() { - hooks.unqueued--; - if ( !jQuery.queue( elem, "fx" ).length ) { - hooks.empty.fire(); - } - } ); - } ); - } - - // Detect show/hide animations - for ( prop in props ) { - value = props[ prop ]; - if ( rfxtypes.test( value ) ) { - delete props[ prop ]; - toggle = toggle || value === "toggle"; - if ( value === ( hidden ? "hide" : "show" ) ) { - - // Pretend to be hidden if this is a "show" and - // there is still data from a stopped show/hide - if ( value === "show" && dataShow && dataShow[ prop ] !== undefined ) { - hidden = true; - - // Ignore all other no-op show/hide data - } else { - continue; - } - } - orig[ prop ] = dataShow && dataShow[ prop ] || jQuery.style( elem, prop ); - } - } - - // Bail out if this is a no-op like .hide().hide() - propTween = !jQuery.isEmptyObject( props ); - if ( !propTween && jQuery.isEmptyObject( orig ) ) { - return; - } - - // Restrict "overflow" and "display" styles during box animations - if ( isBox && elem.nodeType === 1 ) { - - // Support: IE <=9 - 11, Edge 12 - 13 - // Record all 3 overflow attributes because IE does not infer the shorthand - // from identically-valued overflowX and overflowY - opts.overflow = [ style.overflow, style.overflowX, style.overflowY ]; - - // Identify a display type, preferring old show/hide data over the CSS cascade - restoreDisplay = dataShow && dataShow.display; - if ( restoreDisplay == null ) { - restoreDisplay = dataPriv.get( elem, "display" ); - } - display = jQuery.css( elem, "display" ); - if ( display === "none" ) { - if ( restoreDisplay ) { - display = restoreDisplay; - } else { - - // Get nonempty value(s) by temporarily forcing visibility - showHide( [ elem ], true ); - restoreDisplay = elem.style.display || restoreDisplay; - display = jQuery.css( elem, "display" ); - showHide( [ elem ] ); - } - } - - // Animate inline elements as inline-block - if ( display === "inline" || display === "inline-block" && restoreDisplay != null ) { - if ( jQuery.css( elem, "float" ) === "none" ) { - - // Restore the original display value at the end of pure show/hide animations - if ( !propTween ) { - anim.done( function() { - style.display = restoreDisplay; - } ); - if ( restoreDisplay == null ) { - display = style.display; - restoreDisplay = display === "none" ? "" : display; - } - } - style.display = "inline-block"; - } - } - } - - if ( opts.overflow ) { - style.overflow = "hidden"; - anim.always( function() { - style.overflow = opts.overflow[ 0 ]; - style.overflowX = opts.overflow[ 1 ]; - style.overflowY = opts.overflow[ 2 ]; - } ); - } - - // Implement show/hide animations - propTween = false; - for ( prop in orig ) { - - // General show/hide setup for this element animation - if ( !propTween ) { - if ( dataShow ) { - if ( "hidden" in dataShow ) { - hidden = dataShow.hidden; - } - } else { - dataShow = dataPriv.access( elem, "fxshow", { display: restoreDisplay } ); - } - - // Store hidden/visible for toggle so `.stop().toggle()` "reverses" - if ( toggle ) { - dataShow.hidden = !hidden; - } - - // Show elements before animating them - if ( hidden ) { - showHide( [ elem ], true ); - } - - /* eslint-disable no-loop-func */ - - anim.done( function() { - - /* eslint-enable no-loop-func */ - - // The final step of a "hide" animation is actually hiding the element - if ( !hidden ) { - showHide( [ elem ] ); - } - dataPriv.remove( elem, "fxshow" ); - for ( prop in orig ) { - jQuery.style( elem, prop, orig[ prop ] ); - } - } ); - } - - // Per-property setup - propTween = createTween( hidden ? dataShow[ prop ] : 0, prop, anim ); - if ( !( prop in dataShow ) ) { - dataShow[ prop ] = propTween.start; - if ( hidden ) { - propTween.end = propTween.start; - propTween.start = 0; - } - } - } -} - -function propFilter( props, specialEasing ) { - var index, name, easing, value, hooks; - - // camelCase, specialEasing and expand cssHook pass - for ( index in props ) { - name = jQuery.camelCase( index ); - easing = specialEasing[ name ]; - value = props[ index ]; - if ( Array.isArray( value ) ) { - easing = value[ 1 ]; - value = props[ index ] = value[ 0 ]; - } - - if ( index !== name ) { - props[ name ] = value; - delete props[ index ]; - } - - hooks = jQuery.cssHooks[ name ]; - if ( hooks && "expand" in hooks ) { - value = hooks.expand( value ); - delete props[ name ]; - - // Not quite $.extend, this won't overwrite existing keys. - // Reusing 'index' because we have the correct "name" - for ( index in value ) { - if ( !( index in props ) ) { - props[ index ] = value[ index ]; - specialEasing[ index ] = easing; - } - } - } else { - specialEasing[ name ] = easing; - } - } -} - -function Animation( elem, properties, options ) { - var result, - stopped, - index = 0, - length = Animation.prefilters.length, - deferred = jQuery.Deferred().always( function() { - - // Don't match elem in the :animated selector - delete tick.elem; - } ), - tick = function() { - if ( stopped ) { - return false; - } - var currentTime = fxNow || createFxNow(), - remaining = Math.max( 0, animation.startTime + animation.duration - currentTime ), - - // Support: Android 2.3 only - // Archaic crash bug won't allow us to use `1 - ( 0.5 || 0 )` (#12497) - temp = remaining / animation.duration || 0, - percent = 1 - temp, - index = 0, - length = animation.tweens.length; - - for ( ; index < length; index++ ) { - animation.tweens[ index ].run( percent ); - } - - deferred.notifyWith( elem, [ animation, percent, remaining ] ); - - // If there's more to do, yield - if ( percent < 1 && length ) { - return remaining; - } - - // If this was an empty animation, synthesize a final progress notification - if ( !length ) { - deferred.notifyWith( elem, [ animation, 1, 0 ] ); - } - - // Resolve the animation and report its conclusion - deferred.resolveWith( elem, [ animation ] ); - return false; - }, - animation = deferred.promise( { - elem: elem, - props: jQuery.extend( {}, properties ), - opts: jQuery.extend( true, { - specialEasing: {}, - easing: jQuery.easing._default - }, options ), - originalProperties: properties, - originalOptions: options, - startTime: fxNow || createFxNow(), - duration: options.duration, - tweens: [], - createTween: function( prop, end ) { - var tween = jQuery.Tween( elem, animation.opts, prop, end, - animation.opts.specialEasing[ prop ] || animation.opts.easing ); - animation.tweens.push( tween ); - return tween; - }, - stop: function( gotoEnd ) { - var index = 0, - - // If we are going to the end, we want to run all the tweens - // otherwise we skip this part - length = gotoEnd ? animation.tweens.length : 0; - if ( stopped ) { - return this; - } - stopped = true; - for ( ; index < length; index++ ) { - animation.tweens[ index ].run( 1 ); - } - - // Resolve when we played the last frame; otherwise, reject - if ( gotoEnd ) { - deferred.notifyWith( elem, [ animation, 1, 0 ] ); - deferred.resolveWith( elem, [ animation, gotoEnd ] ); - } else { - deferred.rejectWith( elem, [ animation, gotoEnd ] ); - } - return this; - } - } ), - props = animation.props; - - propFilter( props, animation.opts.specialEasing ); - - for ( ; index < length; index++ ) { - result = Animation.prefilters[ index ].call( animation, elem, props, animation.opts ); - if ( result ) { - if ( jQuery.isFunction( result.stop ) ) { - jQuery._queueHooks( animation.elem, animation.opts.queue ).stop = - jQuery.proxy( result.stop, result ); - } - return result; - } - } - - jQuery.map( props, createTween, animation ); - - if ( jQuery.isFunction( animation.opts.start ) ) { - animation.opts.start.call( elem, animation ); - } - - // Attach callbacks from options - animation - .progress( animation.opts.progress ) - .done( animation.opts.done, animation.opts.complete ) - .fail( animation.opts.fail ) - .always( animation.opts.always ); - - jQuery.fx.timer( - jQuery.extend( tick, { - elem: elem, - anim: animation, - queue: animation.opts.queue - } ) - ); - - return animation; -} - -jQuery.Animation = jQuery.extend( Animation, { - - tweeners: { - "*": [ function( prop, value ) { - var tween = this.createTween( prop, value ); - adjustCSS( tween.elem, prop, rcssNum.exec( value ), tween ); - return tween; - } ] - }, - - tweener: function( props, callback ) { - if ( jQuery.isFunction( props ) ) { - callback = props; - props = [ "*" ]; - } else { - props = props.match( rnothtmlwhite ); - } - - var prop, - index = 0, - length = props.length; - - for ( ; index < length; index++ ) { - prop = props[ index ]; - Animation.tweeners[ prop ] = Animation.tweeners[ prop ] || []; - Animation.tweeners[ prop ].unshift( callback ); - } - }, - - prefilters: [ defaultPrefilter ], - - prefilter: function( callback, prepend ) { - if ( prepend ) { - Animation.prefilters.unshift( callback ); - } else { - Animation.prefilters.push( callback ); - } - } -} ); - -jQuery.speed = function( speed, easing, fn ) { - var opt = speed && typeof speed === "object" ? jQuery.extend( {}, speed ) : { - complete: fn || !fn && easing || - jQuery.isFunction( speed ) && speed, - duration: speed, - easing: fn && easing || easing && !jQuery.isFunction( easing ) && easing - }; - - // Go to the end state if fx are off - if ( jQuery.fx.off ) { - opt.duration = 0; - - } else { - if ( typeof opt.duration !== "number" ) { - if ( opt.duration in jQuery.fx.speeds ) { - opt.duration = jQuery.fx.speeds[ opt.duration ]; - - } else { - opt.duration = jQuery.fx.speeds._default; - } - } - } - - // Normalize opt.queue - true/undefined/null -> "fx" - if ( opt.queue == null || opt.queue === true ) { - opt.queue = "fx"; - } - - // Queueing - opt.old = opt.complete; - - opt.complete = function() { - if ( jQuery.isFunction( opt.old ) ) { - opt.old.call( this ); - } - - if ( opt.queue ) { - jQuery.dequeue( this, opt.queue ); - } - }; - - return opt; -}; - -jQuery.fn.extend( { - fadeTo: function( speed, to, easing, callback ) { - - // Show any hidden elements after setting opacity to 0 - return this.filter( isHiddenWithinTree ).css( "opacity", 0 ).show() - - // Animate to the value specified - .end().animate( { opacity: to }, speed, easing, callback ); - }, - animate: function( prop, speed, easing, callback ) { - var empty = jQuery.isEmptyObject( prop ), - optall = jQuery.speed( speed, easing, callback ), - doAnimation = function() { - - // Operate on a copy of prop so per-property easing won't be lost - var anim = Animation( this, jQuery.extend( {}, prop ), optall ); - - // Empty animations, or finishing resolves immediately - if ( empty || dataPriv.get( this, "finish" ) ) { - anim.stop( true ); - } - }; - doAnimation.finish = doAnimation; - - return empty || optall.queue === false ? - this.each( doAnimation ) : - this.queue( optall.queue, doAnimation ); - }, - stop: function( type, clearQueue, gotoEnd ) { - var stopQueue = function( hooks ) { - var stop = hooks.stop; - delete hooks.stop; - stop( gotoEnd ); - }; - - if ( typeof type !== "string" ) { - gotoEnd = clearQueue; - clearQueue = type; - type = undefined; - } - if ( clearQueue && type !== false ) { - this.queue( type || "fx", [] ); - } - - return this.each( function() { - var dequeue = true, - index = type != null && type + "queueHooks", - timers = jQuery.timers, - data = dataPriv.get( this ); - - if ( index ) { - if ( data[ index ] && data[ index ].stop ) { - stopQueue( data[ index ] ); - } - } else { - for ( index in data ) { - if ( data[ index ] && data[ index ].stop && rrun.test( index ) ) { - stopQueue( data[ index ] ); - } - } - } - - for ( index = timers.length; index--; ) { - if ( timers[ index ].elem === this && - ( type == null || timers[ index ].queue === type ) ) { - - timers[ index ].anim.stop( gotoEnd ); - dequeue = false; - timers.splice( index, 1 ); - } - } - - // Start the next in the queue if the last step wasn't forced. - // Timers currently will call their complete callbacks, which - // will dequeue but only if they were gotoEnd. - if ( dequeue || !gotoEnd ) { - jQuery.dequeue( this, type ); - } - } ); - }, - finish: function( type ) { - if ( type !== false ) { - type = type || "fx"; - } - return this.each( function() { - var index, - data = dataPriv.get( this ), - queue = data[ type + "queue" ], - hooks = data[ type + "queueHooks" ], - timers = jQuery.timers, - length = queue ? queue.length : 0; - - // Enable finishing flag on private data - data.finish = true; - - // Empty the queue first - jQuery.queue( this, type, [] ); - - if ( hooks && hooks.stop ) { - hooks.stop.call( this, true ); - } - - // Look for any active animations, and finish them - for ( index = timers.length; index--; ) { - if ( timers[ index ].elem === this && timers[ index ].queue === type ) { - timers[ index ].anim.stop( true ); - timers.splice( index, 1 ); - } - } - - // Look for any animations in the old queue and finish them - for ( index = 0; index < length; index++ ) { - if ( queue[ index ] && queue[ index ].finish ) { - queue[ index ].finish.call( this ); - } - } - - // Turn off finishing flag - delete data.finish; - } ); - } -} ); - -jQuery.each( [ "toggle", "show", "hide" ], function( i, name ) { - var cssFn = jQuery.fn[ name ]; - jQuery.fn[ name ] = function( speed, easing, callback ) { - return speed == null || typeof speed === "boolean" ? - cssFn.apply( this, arguments ) : - this.animate( genFx( name, true ), speed, easing, callback ); - }; -} ); - -// Generate shortcuts for custom animations -jQuery.each( { - slideDown: genFx( "show" ), - slideUp: genFx( "hide" ), - slideToggle: genFx( "toggle" ), - fadeIn: { opacity: "show" }, - fadeOut: { opacity: "hide" }, - fadeToggle: { opacity: "toggle" } -}, function( name, props ) { - jQuery.fn[ name ] = function( speed, easing, callback ) { - return this.animate( props, speed, easing, callback ); - }; -} ); - -jQuery.timers = []; -jQuery.fx.tick = function() { - var timer, - i = 0, - timers = jQuery.timers; - - fxNow = jQuery.now(); - - for ( ; i < timers.length; i++ ) { - timer = timers[ i ]; - - // Run the timer and safely remove it when done (allowing for external removal) - if ( !timer() && timers[ i ] === timer ) { - timers.splice( i--, 1 ); - } - } - - if ( !timers.length ) { - jQuery.fx.stop(); - } - fxNow = undefined; -}; - -jQuery.fx.timer = function( timer ) { - jQuery.timers.push( timer ); - jQuery.fx.start(); -}; - -jQuery.fx.interval = 13; -jQuery.fx.start = function() { - if ( inProgress ) { - return; - } - - inProgress = true; - schedule(); -}; - -jQuery.fx.stop = function() { - inProgress = null; -}; - -jQuery.fx.speeds = { - slow: 600, - fast: 200, - - // Default speed - _default: 400 -}; - - -// Based off of the plugin by Clint Helfers, with permission. -// https://web.archive.org/web/20100324014747/http://blindsignals.com/index.php/2009/07/jquery-delay/ -jQuery.fn.delay = function( time, type ) { - time = jQuery.fx ? jQuery.fx.speeds[ time ] || time : time; - type = type || "fx"; - - return this.queue( type, function( next, hooks ) { - var timeout = window.setTimeout( next, time ); - hooks.stop = function() { - window.clearTimeout( timeout ); - }; - } ); -}; - - -( function() { - var input = document.createElement( "input" ), - select = document.createElement( "select" ), - opt = select.appendChild( document.createElement( "option" ) ); - - input.type = "checkbox"; - - // Support: Android <=4.3 only - // Default value for a checkbox should be "on" - support.checkOn = input.value !== ""; - - // Support: IE <=11 only - // Must access selectedIndex to make default options select - support.optSelected = opt.selected; - - // Support: IE <=11 only - // An input loses its value after becoming a radio - input = document.createElement( "input" ); - input.value = "t"; - input.type = "radio"; - support.radioValue = input.value === "t"; -} )(); - - -var boolHook, - attrHandle = jQuery.expr.attrHandle; - -jQuery.fn.extend( { - attr: function( name, value ) { - return access( this, jQuery.attr, name, value, arguments.length > 1 ); - }, - - removeAttr: function( name ) { - return this.each( function() { - jQuery.removeAttr( this, name ); - } ); - } -} ); - -jQuery.extend( { - attr: function( elem, name, value ) { - var ret, hooks, - nType = elem.nodeType; - - // Don't get/set attributes on text, comment and attribute nodes - if ( nType === 3 || nType === 8 || nType === 2 ) { - return; - } - - // Fallback to prop when attributes are not supported - if ( typeof elem.getAttribute === "undefined" ) { - return jQuery.prop( elem, name, value ); - } - - // Attribute hooks are determined by the lowercase version - // Grab necessary hook if one is defined - if ( nType !== 1 || !jQuery.isXMLDoc( elem ) ) { - hooks = jQuery.attrHooks[ name.toLowerCase() ] || - ( jQuery.expr.match.bool.test( name ) ? boolHook : undefined ); - } - - if ( value !== undefined ) { - if ( value === null ) { - jQuery.removeAttr( elem, name ); - return; - } - - if ( hooks && "set" in hooks && - ( ret = hooks.set( elem, value, name ) ) !== undefined ) { - return ret; - } - - elem.setAttribute( name, value + "" ); - return value; - } - - if ( hooks && "get" in hooks && ( ret = hooks.get( elem, name ) ) !== null ) { - return ret; - } - - ret = jQuery.find.attr( elem, name ); - - // Non-existent attributes return null, we normalize to undefined - return ret == null ? undefined : ret; - }, - - attrHooks: { - type: { - set: function( elem, value ) { - if ( !support.radioValue && value === "radio" && - nodeName( elem, "input" ) ) { - var val = elem.value; - elem.setAttribute( "type", value ); - if ( val ) { - elem.value = val; - } - return value; - } - } - } - }, - - removeAttr: function( elem, value ) { - var name, - i = 0, - - // Attribute names can contain non-HTML whitespace characters - // https://html.spec.whatwg.org/multipage/syntax.html#attributes-2 - attrNames = value && value.match( rnothtmlwhite ); - - if ( attrNames && elem.nodeType === 1 ) { - while ( ( name = attrNames[ i++ ] ) ) { - elem.removeAttribute( name ); - } - } - } -} ); - -// Hooks for boolean attributes -boolHook = { - set: function( elem, value, name ) { - if ( value === false ) { - - // Remove boolean attributes when set to false - jQuery.removeAttr( elem, name ); - } else { - elem.setAttribute( name, name ); - } - return name; - } -}; - -jQuery.each( jQuery.expr.match.bool.source.match( /\w+/g ), function( i, name ) { - var getter = attrHandle[ name ] || jQuery.find.attr; - - attrHandle[ name ] = function( elem, name, isXML ) { - var ret, handle, - lowercaseName = name.toLowerCase(); - - if ( !isXML ) { - - // Avoid an infinite loop by temporarily removing this function from the getter - handle = attrHandle[ lowercaseName ]; - attrHandle[ lowercaseName ] = ret; - ret = getter( elem, name, isXML ) != null ? - lowercaseName : - null; - attrHandle[ lowercaseName ] = handle; - } - return ret; - }; -} ); - - - - -var rfocusable = /^(?:input|select|textarea|button)$/i, - rclickable = /^(?:a|area)$/i; - -jQuery.fn.extend( { - prop: function( name, value ) { - return access( this, jQuery.prop, name, value, arguments.length > 1 ); - }, - - removeProp: function( name ) { - return this.each( function() { - delete this[ jQuery.propFix[ name ] || name ]; - } ); - } -} ); - -jQuery.extend( { - prop: function( elem, name, value ) { - var ret, hooks, - nType = elem.nodeType; - - // Don't get/set properties on text, comment and attribute nodes - if ( nType === 3 || nType === 8 || nType === 2 ) { - return; - } - - if ( nType !== 1 || !jQuery.isXMLDoc( elem ) ) { - - // Fix name and attach hooks - name = jQuery.propFix[ name ] || name; - hooks = jQuery.propHooks[ name ]; - } - - if ( value !== undefined ) { - if ( hooks && "set" in hooks && - ( ret = hooks.set( elem, value, name ) ) !== undefined ) { - return ret; - } - - return ( elem[ name ] = value ); - } - - if ( hooks && "get" in hooks && ( ret = hooks.get( elem, name ) ) !== null ) { - return ret; - } - - return elem[ name ]; - }, - - propHooks: { - tabIndex: { - get: function( elem ) { - - // Support: IE <=9 - 11 only - // elem.tabIndex doesn't always return the - // correct value when it hasn't been explicitly set - // https://web.archive.org/web/20141116233347/http://fluidproject.org/blog/2008/01/09/getting-setting-and-removing-tabindex-values-with-javascript/ - // Use proper attribute retrieval(#12072) - var tabindex = jQuery.find.attr( elem, "tabindex" ); - - if ( tabindex ) { - return parseInt( tabindex, 10 ); - } - - if ( - rfocusable.test( elem.nodeName ) || - rclickable.test( elem.nodeName ) && - elem.href - ) { - return 0; - } - - return -1; - } - } - }, - - propFix: { - "for": "htmlFor", - "class": "className" - } -} ); - -// Support: IE <=11 only -// Accessing the selectedIndex property -// forces the browser to respect setting selected -// on the option -// The getter ensures a default option is selected -// when in an optgroup -// eslint rule "no-unused-expressions" is disabled for this code -// since it considers such accessions noop -if ( !support.optSelected ) { - jQuery.propHooks.selected = { - get: function( elem ) { - - /* eslint no-unused-expressions: "off" */ - - var parent = elem.parentNode; - if ( parent && parent.parentNode ) { - parent.parentNode.selectedIndex; - } - return null; - }, - set: function( elem ) { - - /* eslint no-unused-expressions: "off" */ - - var parent = elem.parentNode; - if ( parent ) { - parent.selectedIndex; - - if ( parent.parentNode ) { - parent.parentNode.selectedIndex; - } - } - } - }; -} - -jQuery.each( [ - "tabIndex", - "readOnly", - "maxLength", - "cellSpacing", - "cellPadding", - "rowSpan", - "colSpan", - "useMap", - "frameBorder", - "contentEditable" -], function() { - jQuery.propFix[ this.toLowerCase() ] = this; -} ); - - - - - // Strip and collapse whitespace according to HTML spec - // https://html.spec.whatwg.org/multipage/infrastructure.html#strip-and-collapse-whitespace - function stripAndCollapse( value ) { - var tokens = value.match( rnothtmlwhite ) || []; - return tokens.join( " " ); - } - - -function getClass( elem ) { - return elem.getAttribute && elem.getAttribute( "class" ) || ""; -} - -jQuery.fn.extend( { - addClass: function( value ) { - var classes, elem, cur, curValue, clazz, j, finalValue, - i = 0; - - if ( jQuery.isFunction( value ) ) { - return this.each( function( j ) { - jQuery( this ).addClass( value.call( this, j, getClass( this ) ) ); - } ); - } - - if ( typeof value === "string" && value ) { - classes = value.match( rnothtmlwhite ) || []; - - while ( ( elem = this[ i++ ] ) ) { - curValue = getClass( elem ); - cur = elem.nodeType === 1 && ( " " + stripAndCollapse( curValue ) + " " ); - - if ( cur ) { - j = 0; - while ( ( clazz = classes[ j++ ] ) ) { - if ( cur.indexOf( " " + clazz + " " ) < 0 ) { - cur += clazz + " "; - } - } - - // Only assign if different to avoid unneeded rendering. - finalValue = stripAndCollapse( cur ); - if ( curValue !== finalValue ) { - elem.setAttribute( "class", finalValue ); - } - } - } - } - - return this; - }, - - removeClass: function( value ) { - var classes, elem, cur, curValue, clazz, j, finalValue, - i = 0; - - if ( jQuery.isFunction( value ) ) { - return this.each( function( j ) { - jQuery( this ).removeClass( value.call( this, j, getClass( this ) ) ); - } ); - } - - if ( !arguments.length ) { - return this.attr( "class", "" ); - } - - if ( typeof value === "string" && value ) { - classes = value.match( rnothtmlwhite ) || []; - - while ( ( elem = this[ i++ ] ) ) { - curValue = getClass( elem ); - - // This expression is here for better compressibility (see addClass) - cur = elem.nodeType === 1 && ( " " + stripAndCollapse( curValue ) + " " ); - - if ( cur ) { - j = 0; - while ( ( clazz = classes[ j++ ] ) ) { - - // Remove *all* instances - while ( cur.indexOf( " " + clazz + " " ) > -1 ) { - cur = cur.replace( " " + clazz + " ", " " ); - } - } - - // Only assign if different to avoid unneeded rendering. - finalValue = stripAndCollapse( cur ); - if ( curValue !== finalValue ) { - elem.setAttribute( "class", finalValue ); - } - } - } - } - - return this; - }, - - toggleClass: function( value, stateVal ) { - var type = typeof value; - - if ( typeof stateVal === "boolean" && type === "string" ) { - return stateVal ? this.addClass( value ) : this.removeClass( value ); - } - - if ( jQuery.isFunction( value ) ) { - return this.each( function( i ) { - jQuery( this ).toggleClass( - value.call( this, i, getClass( this ), stateVal ), - stateVal - ); - } ); - } - - return this.each( function() { - var className, i, self, classNames; - - if ( type === "string" ) { - - // Toggle individual class names - i = 0; - self = jQuery( this ); - classNames = value.match( rnothtmlwhite ) || []; - - while ( ( className = classNames[ i++ ] ) ) { - - // Check each className given, space separated list - if ( self.hasClass( className ) ) { - self.removeClass( className ); - } else { - self.addClass( className ); - } - } - - // Toggle whole class name - } else if ( value === undefined || type === "boolean" ) { - className = getClass( this ); - if ( className ) { - - // Store className if set - dataPriv.set( this, "__className__", className ); - } - - // If the element has a class name or if we're passed `false`, - // then remove the whole classname (if there was one, the above saved it). - // Otherwise bring back whatever was previously saved (if anything), - // falling back to the empty string if nothing was stored. - if ( this.setAttribute ) { - this.setAttribute( "class", - className || value === false ? - "" : - dataPriv.get( this, "__className__" ) || "" - ); - } - } - } ); - }, - - hasClass: function( selector ) { - var className, elem, - i = 0; - - className = " " + selector + " "; - while ( ( elem = this[ i++ ] ) ) { - if ( elem.nodeType === 1 && - ( " " + stripAndCollapse( getClass( elem ) ) + " " ).indexOf( className ) > -1 ) { - return true; - } - } - - return false; - } -} ); - - - - -var rreturn = /\r/g; - -jQuery.fn.extend( { - val: function( value ) { - var hooks, ret, isFunction, - elem = this[ 0 ]; - - if ( !arguments.length ) { - if ( elem ) { - hooks = jQuery.valHooks[ elem.type ] || - jQuery.valHooks[ elem.nodeName.toLowerCase() ]; - - if ( hooks && - "get" in hooks && - ( ret = hooks.get( elem, "value" ) ) !== undefined - ) { - return ret; - } - - ret = elem.value; - - // Handle most common string cases - if ( typeof ret === "string" ) { - return ret.replace( rreturn, "" ); - } - - // Handle cases where value is null/undef or number - return ret == null ? "" : ret; - } - - return; - } - - isFunction = jQuery.isFunction( value ); - - return this.each( function( i ) { - var val; - - if ( this.nodeType !== 1 ) { - return; - } - - if ( isFunction ) { - val = value.call( this, i, jQuery( this ).val() ); - } else { - val = value; - } - - // Treat null/undefined as ""; convert numbers to string - if ( val == null ) { - val = ""; - - } else if ( typeof val === "number" ) { - val += ""; - - } else if ( Array.isArray( val ) ) { - val = jQuery.map( val, function( value ) { - return value == null ? "" : value + ""; - } ); - } - - hooks = jQuery.valHooks[ this.type ] || jQuery.valHooks[ this.nodeName.toLowerCase() ]; - - // If set returns undefined, fall back to normal setting - if ( !hooks || !( "set" in hooks ) || hooks.set( this, val, "value" ) === undefined ) { - this.value = val; - } - } ); - } -} ); - -jQuery.extend( { - valHooks: { - option: { - get: function( elem ) { - - var val = jQuery.find.attr( elem, "value" ); - return val != null ? - val : - - // Support: IE <=10 - 11 only - // option.text throws exceptions (#14686, #14858) - // Strip and collapse whitespace - // https://html.spec.whatwg.org/#strip-and-collapse-whitespace - stripAndCollapse( jQuery.text( elem ) ); - } - }, - select: { - get: function( elem ) { - var value, option, i, - options = elem.options, - index = elem.selectedIndex, - one = elem.type === "select-one", - values = one ? null : [], - max = one ? index + 1 : options.length; - - if ( index < 0 ) { - i = max; - - } else { - i = one ? index : 0; - } - - // Loop through all the selected options - for ( ; i < max; i++ ) { - option = options[ i ]; - - // Support: IE <=9 only - // IE8-9 doesn't update selected after form reset (#2551) - if ( ( option.selected || i === index ) && - - // Don't return options that are disabled or in a disabled optgroup - !option.disabled && - ( !option.parentNode.disabled || - !nodeName( option.parentNode, "optgroup" ) ) ) { - - // Get the specific value for the option - value = jQuery( option ).val(); - - // We don't need an array for one selects - if ( one ) { - return value; - } - - // Multi-Selects return an array - values.push( value ); - } - } - - return values; - }, - - set: function( elem, value ) { - var optionSet, option, - options = elem.options, - values = jQuery.makeArray( value ), - i = options.length; - - while ( i-- ) { - option = options[ i ]; - - /* eslint-disable no-cond-assign */ - - if ( option.selected = - jQuery.inArray( jQuery.valHooks.option.get( option ), values ) > -1 - ) { - optionSet = true; - } - - /* eslint-enable no-cond-assign */ - } - - // Force browsers to behave consistently when non-matching value is set - if ( !optionSet ) { - elem.selectedIndex = -1; - } - return values; - } - } - } -} ); - -// Radios and checkboxes getter/setter -jQuery.each( [ "radio", "checkbox" ], function() { - jQuery.valHooks[ this ] = { - set: function( elem, value ) { - if ( Array.isArray( value ) ) { - return ( elem.checked = jQuery.inArray( jQuery( elem ).val(), value ) > -1 ); - } - } - }; - if ( !support.checkOn ) { - jQuery.valHooks[ this ].get = function( elem ) { - return elem.getAttribute( "value" ) === null ? "on" : elem.value; - }; - } -} ); - - - - -// Return jQuery for attributes-only inclusion - - -var rfocusMorph = /^(?:focusinfocus|focusoutblur)$/; - -jQuery.extend( jQuery.event, { - - trigger: function( event, data, elem, onlyHandlers ) { - - var i, cur, tmp, bubbleType, ontype, handle, special, - eventPath = [ elem || document ], - type = hasOwn.call( event, "type" ) ? event.type : event, - namespaces = hasOwn.call( event, "namespace" ) ? event.namespace.split( "." ) : []; - - cur = tmp = elem = elem || document; - - // Don't do events on text and comment nodes - if ( elem.nodeType === 3 || elem.nodeType === 8 ) { - return; - } - - // focus/blur morphs to focusin/out; ensure we're not firing them right now - if ( rfocusMorph.test( type + jQuery.event.triggered ) ) { - return; - } - - if ( type.indexOf( "." ) > -1 ) { - - // Namespaced trigger; create a regexp to match event type in handle() - namespaces = type.split( "." ); - type = namespaces.shift(); - namespaces.sort(); - } - ontype = type.indexOf( ":" ) < 0 && "on" + type; - - // Caller can pass in a jQuery.Event object, Object, or just an event type string - event = event[ jQuery.expando ] ? - event : - new jQuery.Event( type, typeof event === "object" && event ); - - // Trigger bitmask: & 1 for native handlers; & 2 for jQuery (always true) - event.isTrigger = onlyHandlers ? 2 : 3; - event.namespace = namespaces.join( "." ); - event.rnamespace = event.namespace ? - new RegExp( "(^|\\.)" + namespaces.join( "\\.(?:.*\\.|)" ) + "(\\.|$)" ) : - null; - - // Clean up the event in case it is being reused - event.result = undefined; - if ( !event.target ) { - event.target = elem; - } - - // Clone any incoming data and prepend the event, creating the handler arg list - data = data == null ? - [ event ] : - jQuery.makeArray( data, [ event ] ); - - // Allow special events to draw outside the lines - special = jQuery.event.special[ type ] || {}; - if ( !onlyHandlers && special.trigger && special.trigger.apply( elem, data ) === false ) { - return; - } - - // Determine event propagation path in advance, per W3C events spec (#9951) - // Bubble up to document, then to window; watch for a global ownerDocument var (#9724) - if ( !onlyHandlers && !special.noBubble && !jQuery.isWindow( elem ) ) { - - bubbleType = special.delegateType || type; - if ( !rfocusMorph.test( bubbleType + type ) ) { - cur = cur.parentNode; - } - for ( ; cur; cur = cur.parentNode ) { - eventPath.push( cur ); - tmp = cur; - } - - // Only add window if we got to document (e.g., not plain obj or detached DOM) - if ( tmp === ( elem.ownerDocument || document ) ) { - eventPath.push( tmp.defaultView || tmp.parentWindow || window ); - } - } - - // Fire handlers on the event path - i = 0; - while ( ( cur = eventPath[ i++ ] ) && !event.isPropagationStopped() ) { - - event.type = i > 1 ? - bubbleType : - special.bindType || type; - - // jQuery handler - handle = ( dataPriv.get( cur, "events" ) || {} )[ event.type ] && - dataPriv.get( cur, "handle" ); - if ( handle ) { - handle.apply( cur, data ); - } - - // Native handler - handle = ontype && cur[ ontype ]; - if ( handle && handle.apply && acceptData( cur ) ) { - event.result = handle.apply( cur, data ); - if ( event.result === false ) { - event.preventDefault(); - } - } - } - event.type = type; - - // If nobody prevented the default action, do it now - if ( !onlyHandlers && !event.isDefaultPrevented() ) { - - if ( ( !special._default || - special._default.apply( eventPath.pop(), data ) === false ) && - acceptData( elem ) ) { - - // Call a native DOM method on the target with the same name as the event. - // Don't do default actions on window, that's where global variables be (#6170) - if ( ontype && jQuery.isFunction( elem[ type ] ) && !jQuery.isWindow( elem ) ) { - - // Don't re-trigger an onFOO event when we call its FOO() method - tmp = elem[ ontype ]; - - if ( tmp ) { - elem[ ontype ] = null; - } - - // Prevent re-triggering of the same event, since we already bubbled it above - jQuery.event.triggered = type; - elem[ type ](); - jQuery.event.triggered = undefined; - - if ( tmp ) { - elem[ ontype ] = tmp; - } - } - } - } - - return event.result; - }, - - // Piggyback on a donor event to simulate a different one - // Used only for `focus(in | out)` events - simulate: function( type, elem, event ) { - var e = jQuery.extend( - new jQuery.Event(), - event, - { - type: type, - isSimulated: true - } - ); - - jQuery.event.trigger( e, null, elem ); - } - -} ); - -jQuery.fn.extend( { - - trigger: function( type, data ) { - return this.each( function() { - jQuery.event.trigger( type, data, this ); - } ); - }, - triggerHandler: function( type, data ) { - var elem = this[ 0 ]; - if ( elem ) { - return jQuery.event.trigger( type, data, elem, true ); - } - } -} ); - - -jQuery.each( ( "blur focus focusin focusout resize scroll click dblclick " + - "mousedown mouseup mousemove mouseover mouseout mouseenter mouseleave " + - "change select submit keydown keypress keyup contextmenu" ).split( " " ), - function( i, name ) { - - // Handle event binding - jQuery.fn[ name ] = function( data, fn ) { - return arguments.length > 0 ? - this.on( name, null, data, fn ) : - this.trigger( name ); - }; -} ); - -jQuery.fn.extend( { - hover: function( fnOver, fnOut ) { - return this.mouseenter( fnOver ).mouseleave( fnOut || fnOver ); - } -} ); - - - - -support.focusin = "onfocusin" in window; - - -// Support: Firefox <=44 -// Firefox doesn't have focus(in | out) events -// Related ticket - https://bugzilla.mozilla.org/show_bug.cgi?id=687787 -// -// Support: Chrome <=48 - 49, Safari <=9.0 - 9.1 -// focus(in | out) events fire after focus & blur events, -// which is spec violation - http://www.w3.org/TR/DOM-Level-3-Events/#events-focusevent-event-order -// Related ticket - https://bugs.chromium.org/p/chromium/issues/detail?id=449857 -if ( !support.focusin ) { - jQuery.each( { focus: "focusin", blur: "focusout" }, function( orig, fix ) { - - // Attach a single capturing handler on the document while someone wants focusin/focusout - var handler = function( event ) { - jQuery.event.simulate( fix, event.target, jQuery.event.fix( event ) ); - }; - - jQuery.event.special[ fix ] = { - setup: function() { - var doc = this.ownerDocument || this, - attaches = dataPriv.access( doc, fix ); - - if ( !attaches ) { - doc.addEventListener( orig, handler, true ); - } - dataPriv.access( doc, fix, ( attaches || 0 ) + 1 ); - }, - teardown: function() { - var doc = this.ownerDocument || this, - attaches = dataPriv.access( doc, fix ) - 1; - - if ( !attaches ) { - doc.removeEventListener( orig, handler, true ); - dataPriv.remove( doc, fix ); - - } else { - dataPriv.access( doc, fix, attaches ); - } - } - }; - } ); -} -var location = window.location; - -var nonce = jQuery.now(); - -var rquery = ( /\?/ ); - - - -// Cross-browser xml parsing -jQuery.parseXML = function( data ) { - var xml; - if ( !data || typeof data !== "string" ) { - return null; - } - - // Support: IE 9 - 11 only - // IE throws on parseFromString with invalid input. - try { - xml = ( new window.DOMParser() ).parseFromString( data, "text/xml" ); - } catch ( e ) { - xml = undefined; - } - - if ( !xml || xml.getElementsByTagName( "parsererror" ).length ) { - jQuery.error( "Invalid XML: " + data ); - } - return xml; -}; - - -var - rbracket = /\[\]$/, - rCRLF = /\r?\n/g, - rsubmitterTypes = /^(?:submit|button|image|reset|file)$/i, - rsubmittable = /^(?:input|select|textarea|keygen)/i; - -function buildParams( prefix, obj, traditional, add ) { - var name; - - if ( Array.isArray( obj ) ) { - - // Serialize array item. - jQuery.each( obj, function( i, v ) { - if ( traditional || rbracket.test( prefix ) ) { - - // Treat each array item as a scalar. - add( prefix, v ); - - } else { - - // Item is non-scalar (array or object), encode its numeric index. - buildParams( - prefix + "[" + ( typeof v === "object" && v != null ? i : "" ) + "]", - v, - traditional, - add - ); - } - } ); - - } else if ( !traditional && jQuery.type( obj ) === "object" ) { - - // Serialize object item. - for ( name in obj ) { - buildParams( prefix + "[" + name + "]", obj[ name ], traditional, add ); - } - - } else { - - // Serialize scalar item. - add( prefix, obj ); - } -} - -// Serialize an array of form elements or a set of -// key/values into a query string -jQuery.param = function( a, traditional ) { - var prefix, - s = [], - add = function( key, valueOrFunction ) { - - // If value is a function, invoke it and use its return value - var value = jQuery.isFunction( valueOrFunction ) ? - valueOrFunction() : - valueOrFunction; - - s[ s.length ] = encodeURIComponent( key ) + "=" + - encodeURIComponent( value == null ? "" : value ); - }; - - // If an array was passed in, assume that it is an array of form elements. - if ( Array.isArray( a ) || ( a.jquery && !jQuery.isPlainObject( a ) ) ) { - - // Serialize the form elements - jQuery.each( a, function() { - add( this.name, this.value ); - } ); - - } else { - - // If traditional, encode the "old" way (the way 1.3.2 or older - // did it), otherwise encode params recursively. - for ( prefix in a ) { - buildParams( prefix, a[ prefix ], traditional, add ); - } - } - - // Return the resulting serialization - return s.join( "&" ); -}; - -jQuery.fn.extend( { - serialize: function() { - return jQuery.param( this.serializeArray() ); - }, - serializeArray: function() { - return this.map( function() { - - // Can add propHook for "elements" to filter or add form elements - var elements = jQuery.prop( this, "elements" ); - return elements ? jQuery.makeArray( elements ) : this; - } ) - .filter( function() { - var type = this.type; - - // Use .is( ":disabled" ) so that fieldset[disabled] works - return this.name && !jQuery( this ).is( ":disabled" ) && - rsubmittable.test( this.nodeName ) && !rsubmitterTypes.test( type ) && - ( this.checked || !rcheckableType.test( type ) ); - } ) - .map( function( i, elem ) { - var val = jQuery( this ).val(); - - if ( val == null ) { - return null; - } - - if ( Array.isArray( val ) ) { - return jQuery.map( val, function( val ) { - return { name: elem.name, value: val.replace( rCRLF, "\r\n" ) }; - } ); - } - - return { name: elem.name, value: val.replace( rCRLF, "\r\n" ) }; - } ).get(); - } -} ); - - -var - r20 = /%20/g, - rhash = /#.*$/, - rantiCache = /([?&])_=[^&]*/, - rheaders = /^(.*?):[ \t]*([^\r\n]*)$/mg, - - // #7653, #8125, #8152: local protocol detection - rlocalProtocol = /^(?:about|app|app-storage|.+-extension|file|res|widget):$/, - rnoContent = /^(?:GET|HEAD)$/, - rprotocol = /^\/\//, - - /* Prefilters - * 1) They are useful to introduce custom dataTypes (see ajax/jsonp.js for an example) - * 2) These are called: - * - BEFORE asking for a transport - * - AFTER param serialization (s.data is a string if s.processData is true) - * 3) key is the dataType - * 4) the catchall symbol "*" can be used - * 5) execution will start with transport dataType and THEN continue down to "*" if needed - */ - prefilters = {}, - - /* Transports bindings - * 1) key is the dataType - * 2) the catchall symbol "*" can be used - * 3) selection will start with transport dataType and THEN go to "*" if needed - */ - transports = {}, - - // Avoid comment-prolog char sequence (#10098); must appease lint and evade compression - allTypes = "*/".concat( "*" ), - - // Anchor tag for parsing the document origin - originAnchor = document.createElement( "a" ); - originAnchor.href = location.href; - -// Base "constructor" for jQuery.ajaxPrefilter and jQuery.ajaxTransport -function addToPrefiltersOrTransports( structure ) { - - // dataTypeExpression is optional and defaults to "*" - return function( dataTypeExpression, func ) { - - if ( typeof dataTypeExpression !== "string" ) { - func = dataTypeExpression; - dataTypeExpression = "*"; - } - - var dataType, - i = 0, - dataTypes = dataTypeExpression.toLowerCase().match( rnothtmlwhite ) || []; - - if ( jQuery.isFunction( func ) ) { - - // For each dataType in the dataTypeExpression - while ( ( dataType = dataTypes[ i++ ] ) ) { - - // Prepend if requested - if ( dataType[ 0 ] === "+" ) { - dataType = dataType.slice( 1 ) || "*"; - ( structure[ dataType ] = structure[ dataType ] || [] ).unshift( func ); - - // Otherwise append - } else { - ( structure[ dataType ] = structure[ dataType ] || [] ).push( func ); - } - } - } - }; -} - -// Base inspection function for prefilters and transports -function inspectPrefiltersOrTransports( structure, options, originalOptions, jqXHR ) { - - var inspected = {}, - seekingTransport = ( structure === transports ); - - function inspect( dataType ) { - var selected; - inspected[ dataType ] = true; - jQuery.each( structure[ dataType ] || [], function( _, prefilterOrFactory ) { - var dataTypeOrTransport = prefilterOrFactory( options, originalOptions, jqXHR ); - if ( typeof dataTypeOrTransport === "string" && - !seekingTransport && !inspected[ dataTypeOrTransport ] ) { - - options.dataTypes.unshift( dataTypeOrTransport ); - inspect( dataTypeOrTransport ); - return false; - } else if ( seekingTransport ) { - return !( selected = dataTypeOrTransport ); - } - } ); - return selected; - } - - return inspect( options.dataTypes[ 0 ] ) || !inspected[ "*" ] && inspect( "*" ); -} - -// A special extend for ajax options -// that takes "flat" options (not to be deep extended) -// Fixes #9887 -function ajaxExtend( target, src ) { - var key, deep, - flatOptions = jQuery.ajaxSettings.flatOptions || {}; - - for ( key in src ) { - if ( src[ key ] !== undefined ) { - ( flatOptions[ key ] ? target : ( deep || ( deep = {} ) ) )[ key ] = src[ key ]; - } - } - if ( deep ) { - jQuery.extend( true, target, deep ); - } - - return target; -} - -/* Handles responses to an ajax request: - * - finds the right dataType (mediates between content-type and expected dataType) - * - returns the corresponding response - */ -function ajaxHandleResponses( s, jqXHR, responses ) { - - var ct, type, finalDataType, firstDataType, - contents = s.contents, - dataTypes = s.dataTypes; - - // Remove auto dataType and get content-type in the process - while ( dataTypes[ 0 ] === "*" ) { - dataTypes.shift(); - if ( ct === undefined ) { - ct = s.mimeType || jqXHR.getResponseHeader( "Content-Type" ); - } - } - - // Check if we're dealing with a known content-type - if ( ct ) { - for ( type in contents ) { - if ( contents[ type ] && contents[ type ].test( ct ) ) { - dataTypes.unshift( type ); - break; - } - } - } - - // Check to see if we have a response for the expected dataType - if ( dataTypes[ 0 ] in responses ) { - finalDataType = dataTypes[ 0 ]; - } else { - - // Try convertible dataTypes - for ( type in responses ) { - if ( !dataTypes[ 0 ] || s.converters[ type + " " + dataTypes[ 0 ] ] ) { - finalDataType = type; - break; - } - if ( !firstDataType ) { - firstDataType = type; - } - } - - // Or just use first one - finalDataType = finalDataType || firstDataType; - } - - // If we found a dataType - // We add the dataType to the list if needed - // and return the corresponding response - if ( finalDataType ) { - if ( finalDataType !== dataTypes[ 0 ] ) { - dataTypes.unshift( finalDataType ); - } - return responses[ finalDataType ]; - } -} - -/* Chain conversions given the request and the original response - * Also sets the responseXXX fields on the jqXHR instance - */ -function ajaxConvert( s, response, jqXHR, isSuccess ) { - var conv2, current, conv, tmp, prev, - converters = {}, - - // Work with a copy of dataTypes in case we need to modify it for conversion - dataTypes = s.dataTypes.slice(); - - // Create converters map with lowercased keys - if ( dataTypes[ 1 ] ) { - for ( conv in s.converters ) { - converters[ conv.toLowerCase() ] = s.converters[ conv ]; - } - } - - current = dataTypes.shift(); - - // Convert to each sequential dataType - while ( current ) { - - if ( s.responseFields[ current ] ) { - jqXHR[ s.responseFields[ current ] ] = response; - } - - // Apply the dataFilter if provided - if ( !prev && isSuccess && s.dataFilter ) { - response = s.dataFilter( response, s.dataType ); - } - - prev = current; - current = dataTypes.shift(); - - if ( current ) { - - // There's only work to do if current dataType is non-auto - if ( current === "*" ) { - - current = prev; - - // Convert response if prev dataType is non-auto and differs from current - } else if ( prev !== "*" && prev !== current ) { - - // Seek a direct converter - conv = converters[ prev + " " + current ] || converters[ "* " + current ]; - - // If none found, seek a pair - if ( !conv ) { - for ( conv2 in converters ) { - - // If conv2 outputs current - tmp = conv2.split( " " ); - if ( tmp[ 1 ] === current ) { - - // If prev can be converted to accepted input - conv = converters[ prev + " " + tmp[ 0 ] ] || - converters[ "* " + tmp[ 0 ] ]; - if ( conv ) { - - // Condense equivalence converters - if ( conv === true ) { - conv = converters[ conv2 ]; - - // Otherwise, insert the intermediate dataType - } else if ( converters[ conv2 ] !== true ) { - current = tmp[ 0 ]; - dataTypes.unshift( tmp[ 1 ] ); - } - break; - } - } - } - } - - // Apply converter (if not an equivalence) - if ( conv !== true ) { - - // Unless errors are allowed to bubble, catch and return them - if ( conv && s.throws ) { - response = conv( response ); - } else { - try { - response = conv( response ); - } catch ( e ) { - return { - state: "parsererror", - error: conv ? e : "No conversion from " + prev + " to " + current - }; - } - } - } - } - } - } - - return { state: "success", data: response }; -} - -jQuery.extend( { - - // Counter for holding the number of active queries - active: 0, - - // Last-Modified header cache for next request - lastModified: {}, - etag: {}, - - ajaxSettings: { - url: location.href, - type: "GET", - isLocal: rlocalProtocol.test( location.protocol ), - global: true, - processData: true, - async: true, - contentType: "application/x-www-form-urlencoded; charset=UTF-8", - - /* - timeout: 0, - data: null, - dataType: null, - username: null, - password: null, - cache: null, - throws: false, - traditional: false, - headers: {}, - */ - - accepts: { - "*": allTypes, - text: "text/plain", - html: "text/html", - xml: "application/xml, text/xml", - json: "application/json, text/javascript" - }, - - contents: { - xml: /\bxml\b/, - html: /\bhtml/, - json: /\bjson\b/ - }, - - responseFields: { - xml: "responseXML", - text: "responseText", - json: "responseJSON" - }, - - // Data converters - // Keys separate source (or catchall "*") and destination types with a single space - converters: { - - // Convert anything to text - "* text": String, - - // Text to html (true = no transformation) - "text html": true, - - // Evaluate text as a json expression - "text json": JSON.parse, - - // Parse text as xml - "text xml": jQuery.parseXML - }, - - // For options that shouldn't be deep extended: - // you can add your own custom options here if - // and when you create one that shouldn't be - // deep extended (see ajaxExtend) - flatOptions: { - url: true, - context: true - } - }, - - // Creates a full fledged settings object into target - // with both ajaxSettings and settings fields. - // If target is omitted, writes into ajaxSettings. - ajaxSetup: function( target, settings ) { - return settings ? - - // Building a settings object - ajaxExtend( ajaxExtend( target, jQuery.ajaxSettings ), settings ) : - - // Extending ajaxSettings - ajaxExtend( jQuery.ajaxSettings, target ); - }, - - ajaxPrefilter: addToPrefiltersOrTransports( prefilters ), - ajaxTransport: addToPrefiltersOrTransports( transports ), - - // Main method - ajax: function( url, options ) { - - // If url is an object, simulate pre-1.5 signature - if ( typeof url === "object" ) { - options = url; - url = undefined; - } - - // Force options to be an object - options = options || {}; - - var transport, - - // URL without anti-cache param - cacheURL, - - // Response headers - responseHeadersString, - responseHeaders, - - // timeout handle - timeoutTimer, - - // Url cleanup var - urlAnchor, - - // Request state (becomes false upon send and true upon completion) - completed, - - // To know if global events are to be dispatched - fireGlobals, - - // Loop variable - i, - - // uncached part of the url - uncached, - - // Create the final options object - s = jQuery.ajaxSetup( {}, options ), - - // Callbacks context - callbackContext = s.context || s, - - // Context for global events is callbackContext if it is a DOM node or jQuery collection - globalEventContext = s.context && - ( callbackContext.nodeType || callbackContext.jquery ) ? - jQuery( callbackContext ) : - jQuery.event, - - // Deferreds - deferred = jQuery.Deferred(), - completeDeferred = jQuery.Callbacks( "once memory" ), - - // Status-dependent callbacks - statusCode = s.statusCode || {}, - - // Headers (they are sent all at once) - requestHeaders = {}, - requestHeadersNames = {}, - - // Default abort message - strAbort = "canceled", - - // Fake xhr - jqXHR = { - readyState: 0, - - // Builds headers hashtable if needed - getResponseHeader: function( key ) { - var match; - if ( completed ) { - if ( !responseHeaders ) { - responseHeaders = {}; - while ( ( match = rheaders.exec( responseHeadersString ) ) ) { - responseHeaders[ match[ 1 ].toLowerCase() ] = match[ 2 ]; - } - } - match = responseHeaders[ key.toLowerCase() ]; - } - return match == null ? null : match; - }, - - // Raw string - getAllResponseHeaders: function() { - return completed ? responseHeadersString : null; - }, - - // Caches the header - setRequestHeader: function( name, value ) { - if ( completed == null ) { - name = requestHeadersNames[ name.toLowerCase() ] = - requestHeadersNames[ name.toLowerCase() ] || name; - requestHeaders[ name ] = value; - } - return this; - }, - - // Overrides response content-type header - overrideMimeType: function( type ) { - if ( completed == null ) { - s.mimeType = type; - } - return this; - }, - - // Status-dependent callbacks - statusCode: function( map ) { - var code; - if ( map ) { - if ( completed ) { - - // Execute the appropriate callbacks - jqXHR.always( map[ jqXHR.status ] ); - } else { - - // Lazy-add the new callbacks in a way that preserves old ones - for ( code in map ) { - statusCode[ code ] = [ statusCode[ code ], map[ code ] ]; - } - } - } - return this; - }, - - // Cancel the request - abort: function( statusText ) { - var finalText = statusText || strAbort; - if ( transport ) { - transport.abort( finalText ); - } - done( 0, finalText ); - return this; - } - }; - - // Attach deferreds - deferred.promise( jqXHR ); - - // Add protocol if not provided (prefilters might expect it) - // Handle falsy url in the settings object (#10093: consistency with old signature) - // We also use the url parameter if available - s.url = ( ( url || s.url || location.href ) + "" ) - .replace( rprotocol, location.protocol + "//" ); - - // Alias method option to type as per ticket #12004 - s.type = options.method || options.type || s.method || s.type; - - // Extract dataTypes list - s.dataTypes = ( s.dataType || "*" ).toLowerCase().match( rnothtmlwhite ) || [ "" ]; - - // A cross-domain request is in order when the origin doesn't match the current origin. - if ( s.crossDomain == null ) { - urlAnchor = document.createElement( "a" ); - - // Support: IE <=8 - 11, Edge 12 - 13 - // IE throws exception on accessing the href property if url is malformed, - // e.g. http://example.com:80x/ - try { - urlAnchor.href = s.url; - - // Support: IE <=8 - 11 only - // Anchor's host property isn't correctly set when s.url is relative - urlAnchor.href = urlAnchor.href; - s.crossDomain = originAnchor.protocol + "//" + originAnchor.host !== - urlAnchor.protocol + "//" + urlAnchor.host; - } catch ( e ) { - - // If there is an error parsing the URL, assume it is crossDomain, - // it can be rejected by the transport if it is invalid - s.crossDomain = true; - } - } - - // Convert data if not already a string - if ( s.data && s.processData && typeof s.data !== "string" ) { - s.data = jQuery.param( s.data, s.traditional ); - } - - // Apply prefilters - inspectPrefiltersOrTransports( prefilters, s, options, jqXHR ); - - // If request was aborted inside a prefilter, stop there - if ( completed ) { - return jqXHR; - } - - // We can fire global events as of now if asked to - // Don't fire events if jQuery.event is undefined in an AMD-usage scenario (#15118) - fireGlobals = jQuery.event && s.global; - - // Watch for a new set of requests - if ( fireGlobals && jQuery.active++ === 0 ) { - jQuery.event.trigger( "ajaxStart" ); - } - - // Uppercase the type - s.type = s.type.toUpperCase(); - - // Determine if request has content - s.hasContent = !rnoContent.test( s.type ); - - // Save the URL in case we're toying with the If-Modified-Since - // and/or If-None-Match header later on - // Remove hash to simplify url manipulation - cacheURL = s.url.replace( rhash, "" ); - - // More options handling for requests with no content - if ( !s.hasContent ) { - - // Remember the hash so we can put it back - uncached = s.url.slice( cacheURL.length ); - - // If data is available, append data to url - if ( s.data ) { - cacheURL += ( rquery.test( cacheURL ) ? "&" : "?" ) + s.data; - - // #9682: remove data so that it's not used in an eventual retry - delete s.data; - } - - // Add or update anti-cache param if needed - if ( s.cache === false ) { - cacheURL = cacheURL.replace( rantiCache, "$1" ); - uncached = ( rquery.test( cacheURL ) ? "&" : "?" ) + "_=" + ( nonce++ ) + uncached; - } - - // Put hash and anti-cache on the URL that will be requested (gh-1732) - s.url = cacheURL + uncached; - - // Change '%20' to '+' if this is encoded form body content (gh-2658) - } else if ( s.data && s.processData && - ( s.contentType || "" ).indexOf( "application/x-www-form-urlencoded" ) === 0 ) { - s.data = s.data.replace( r20, "+" ); - } - - // Set the If-Modified-Since and/or If-None-Match header, if in ifModified mode. - if ( s.ifModified ) { - if ( jQuery.lastModified[ cacheURL ] ) { - jqXHR.setRequestHeader( "If-Modified-Since", jQuery.lastModified[ cacheURL ] ); - } - if ( jQuery.etag[ cacheURL ] ) { - jqXHR.setRequestHeader( "If-None-Match", jQuery.etag[ cacheURL ] ); - } - } - - // Set the correct header, if data is being sent - if ( s.data && s.hasContent && s.contentType !== false || options.contentType ) { - jqXHR.setRequestHeader( "Content-Type", s.contentType ); - } - - // Set the Accepts header for the server, depending on the dataType - jqXHR.setRequestHeader( - "Accept", - s.dataTypes[ 0 ] && s.accepts[ s.dataTypes[ 0 ] ] ? - s.accepts[ s.dataTypes[ 0 ] ] + - ( s.dataTypes[ 0 ] !== "*" ? ", " + allTypes + "; q=0.01" : "" ) : - s.accepts[ "*" ] - ); - - // Check for headers option - for ( i in s.headers ) { - jqXHR.setRequestHeader( i, s.headers[ i ] ); - } - - // Allow custom headers/mimetypes and early abort - if ( s.beforeSend && - ( s.beforeSend.call( callbackContext, jqXHR, s ) === false || completed ) ) { - - // Abort if not done already and return - return jqXHR.abort(); - } - - // Aborting is no longer a cancellation - strAbort = "abort"; - - // Install callbacks on deferreds - completeDeferred.add( s.complete ); - jqXHR.done( s.success ); - jqXHR.fail( s.error ); - - // Get transport - transport = inspectPrefiltersOrTransports( transports, s, options, jqXHR ); - - // If no transport, we auto-abort - if ( !transport ) { - done( -1, "No Transport" ); - } else { - jqXHR.readyState = 1; - - // Send global event - if ( fireGlobals ) { - globalEventContext.trigger( "ajaxSend", [ jqXHR, s ] ); - } - - // If request was aborted inside ajaxSend, stop there - if ( completed ) { - return jqXHR; - } - - // Timeout - if ( s.async && s.timeout > 0 ) { - timeoutTimer = window.setTimeout( function() { - jqXHR.abort( "timeout" ); - }, s.timeout ); - } - - try { - completed = false; - transport.send( requestHeaders, done ); - } catch ( e ) { - - // Rethrow post-completion exceptions - if ( completed ) { - throw e; - } - - // Propagate others as results - done( -1, e ); - } - } - - // Callback for when everything is done - function done( status, nativeStatusText, responses, headers ) { - var isSuccess, success, error, response, modified, - statusText = nativeStatusText; - - // Ignore repeat invocations - if ( completed ) { - return; - } - - completed = true; - - // Clear timeout if it exists - if ( timeoutTimer ) { - window.clearTimeout( timeoutTimer ); - } - - // Dereference transport for early garbage collection - // (no matter how long the jqXHR object will be used) - transport = undefined; - - // Cache response headers - responseHeadersString = headers || ""; - - // Set readyState - jqXHR.readyState = status > 0 ? 4 : 0; - - // Determine if successful - isSuccess = status >= 200 && status < 300 || status === 304; - - // Get response data - if ( responses ) { - response = ajaxHandleResponses( s, jqXHR, responses ); - } - - // Convert no matter what (that way responseXXX fields are always set) - response = ajaxConvert( s, response, jqXHR, isSuccess ); - - // If successful, handle type chaining - if ( isSuccess ) { - - // Set the If-Modified-Since and/or If-None-Match header, if in ifModified mode. - if ( s.ifModified ) { - modified = jqXHR.getResponseHeader( "Last-Modified" ); - if ( modified ) { - jQuery.lastModified[ cacheURL ] = modified; - } - modified = jqXHR.getResponseHeader( "etag" ); - if ( modified ) { - jQuery.etag[ cacheURL ] = modified; - } - } - - // if no content - if ( status === 204 || s.type === "HEAD" ) { - statusText = "nocontent"; - - // if not modified - } else if ( status === 304 ) { - statusText = "notmodified"; - - // If we have data, let's convert it - } else { - statusText = response.state; - success = response.data; - error = response.error; - isSuccess = !error; - } - } else { - - // Extract error from statusText and normalize for non-aborts - error = statusText; - if ( status || !statusText ) { - statusText = "error"; - if ( status < 0 ) { - status = 0; - } - } - } - - // Set data for the fake xhr object - jqXHR.status = status; - jqXHR.statusText = ( nativeStatusText || statusText ) + ""; - - // Success/Error - if ( isSuccess ) { - deferred.resolveWith( callbackContext, [ success, statusText, jqXHR ] ); - } else { - deferred.rejectWith( callbackContext, [ jqXHR, statusText, error ] ); - } - - // Status-dependent callbacks - jqXHR.statusCode( statusCode ); - statusCode = undefined; - - if ( fireGlobals ) { - globalEventContext.trigger( isSuccess ? "ajaxSuccess" : "ajaxError", - [ jqXHR, s, isSuccess ? success : error ] ); - } - - // Complete - completeDeferred.fireWith( callbackContext, [ jqXHR, statusText ] ); - - if ( fireGlobals ) { - globalEventContext.trigger( "ajaxComplete", [ jqXHR, s ] ); - - // Handle the global AJAX counter - if ( !( --jQuery.active ) ) { - jQuery.event.trigger( "ajaxStop" ); - } - } - } - - return jqXHR; - }, - - getJSON: function( url, data, callback ) { - return jQuery.get( url, data, callback, "json" ); - }, - - getScript: function( url, callback ) { - return jQuery.get( url, undefined, callback, "script" ); - } -} ); - -jQuery.each( [ "get", "post" ], function( i, method ) { - jQuery[ method ] = function( url, data, callback, type ) { - - // Shift arguments if data argument was omitted - if ( jQuery.isFunction( data ) ) { - type = type || callback; - callback = data; - data = undefined; - } - - // The url can be an options object (which then must have .url) - return jQuery.ajax( jQuery.extend( { - url: url, - type: method, - dataType: type, - data: data, - success: callback - }, jQuery.isPlainObject( url ) && url ) ); - }; -} ); - - -jQuery._evalUrl = function( url ) { - return jQuery.ajax( { - url: url, - - // Make this explicit, since user can override this through ajaxSetup (#11264) - type: "GET", - dataType: "script", - cache: true, - async: false, - global: false, - "throws": true - } ); -}; - - -jQuery.fn.extend( { - wrapAll: function( html ) { - var wrap; - - if ( this[ 0 ] ) { - if ( jQuery.isFunction( html ) ) { - html = html.call( this[ 0 ] ); - } - - // The elements to wrap the target around - wrap = jQuery( html, this[ 0 ].ownerDocument ).eq( 0 ).clone( true ); - - if ( this[ 0 ].parentNode ) { - wrap.insertBefore( this[ 0 ] ); - } - - wrap.map( function() { - var elem = this; - - while ( elem.firstElementChild ) { - elem = elem.firstElementChild; - } - - return elem; - } ).append( this ); - } - - return this; - }, - - wrapInner: function( html ) { - if ( jQuery.isFunction( html ) ) { - return this.each( function( i ) { - jQuery( this ).wrapInner( html.call( this, i ) ); - } ); - } - - return this.each( function() { - var self = jQuery( this ), - contents = self.contents(); - - if ( contents.length ) { - contents.wrapAll( html ); - - } else { - self.append( html ); - } - } ); - }, - - wrap: function( html ) { - var isFunction = jQuery.isFunction( html ); - - return this.each( function( i ) { - jQuery( this ).wrapAll( isFunction ? html.call( this, i ) : html ); - } ); - }, - - unwrap: function( selector ) { - this.parent( selector ).not( "body" ).each( function() { - jQuery( this ).replaceWith( this.childNodes ); - } ); - return this; - } -} ); - - -jQuery.expr.pseudos.hidden = function( elem ) { - return !jQuery.expr.pseudos.visible( elem ); -}; -jQuery.expr.pseudos.visible = function( elem ) { - return !!( elem.offsetWidth || elem.offsetHeight || elem.getClientRects().length ); -}; - - - - -jQuery.ajaxSettings.xhr = function() { - try { - return new window.XMLHttpRequest(); - } catch ( e ) {} -}; - -var xhrSuccessStatus = { - - // File protocol always yields status code 0, assume 200 - 0: 200, - - // Support: IE <=9 only - // #1450: sometimes IE returns 1223 when it should be 204 - 1223: 204 - }, - xhrSupported = jQuery.ajaxSettings.xhr(); - -support.cors = !!xhrSupported && ( "withCredentials" in xhrSupported ); -support.ajax = xhrSupported = !!xhrSupported; - -jQuery.ajaxTransport( function( options ) { - var callback, errorCallback; - - // Cross domain only allowed if supported through XMLHttpRequest - if ( support.cors || xhrSupported && !options.crossDomain ) { - return { - send: function( headers, complete ) { - var i, - xhr = options.xhr(); - - xhr.open( - options.type, - options.url, - options.async, - options.username, - options.password - ); - - // Apply custom fields if provided - if ( options.xhrFields ) { - for ( i in options.xhrFields ) { - xhr[ i ] = options.xhrFields[ i ]; - } - } - - // Override mime type if needed - if ( options.mimeType && xhr.overrideMimeType ) { - xhr.overrideMimeType( options.mimeType ); - } - - // X-Requested-With header - // For cross-domain requests, seeing as conditions for a preflight are - // akin to a jigsaw puzzle, we simply never set it to be sure. - // (it can always be set on a per-request basis or even using ajaxSetup) - // For same-domain requests, won't change header if already provided. - if ( !options.crossDomain && !headers[ "X-Requested-With" ] ) { - headers[ "X-Requested-With" ] = "XMLHttpRequest"; - } - - // Set headers - for ( i in headers ) { - xhr.setRequestHeader( i, headers[ i ] ); - } - - // Callback - callback = function( type ) { - return function() { - if ( callback ) { - callback = errorCallback = xhr.onload = - xhr.onerror = xhr.onabort = xhr.onreadystatechange = null; - - if ( type === "abort" ) { - xhr.abort(); - } else if ( type === "error" ) { - - // Support: IE <=9 only - // On a manual native abort, IE9 throws - // errors on any property access that is not readyState - if ( typeof xhr.status !== "number" ) { - complete( 0, "error" ); - } else { - complete( - - // File: protocol always yields status 0; see #8605, #14207 - xhr.status, - xhr.statusText - ); - } - } else { - complete( - xhrSuccessStatus[ xhr.status ] || xhr.status, - xhr.statusText, - - // Support: IE <=9 only - // IE9 has no XHR2 but throws on binary (trac-11426) - // For XHR2 non-text, let the caller handle it (gh-2498) - ( xhr.responseType || "text" ) !== "text" || - typeof xhr.responseText !== "string" ? - { binary: xhr.response } : - { text: xhr.responseText }, - xhr.getAllResponseHeaders() - ); - } - } - }; - }; - - // Listen to events - xhr.onload = callback(); - errorCallback = xhr.onerror = callback( "error" ); - - // Support: IE 9 only - // Use onreadystatechange to replace onabort - // to handle uncaught aborts - if ( xhr.onabort !== undefined ) { - xhr.onabort = errorCallback; - } else { - xhr.onreadystatechange = function() { - - // Check readyState before timeout as it changes - if ( xhr.readyState === 4 ) { - - // Allow onerror to be called first, - // but that will not handle a native abort - // Also, save errorCallback to a variable - // as xhr.onerror cannot be accessed - window.setTimeout( function() { - if ( callback ) { - errorCallback(); - } - } ); - } - }; - } - - // Create the abort callback - callback = callback( "abort" ); - - try { - - // Do send the request (this may raise an exception) - xhr.send( options.hasContent && options.data || null ); - } catch ( e ) { - - // #14683: Only rethrow if this hasn't been notified as an error yet - if ( callback ) { - throw e; - } - } - }, - - abort: function() { - if ( callback ) { - callback(); - } - } - }; - } -} ); - - - - -// Prevent auto-execution of scripts when no explicit dataType was provided (See gh-2432) -jQuery.ajaxPrefilter( function( s ) { - if ( s.crossDomain ) { - s.contents.script = false; - } -} ); - -// Install script dataType -jQuery.ajaxSetup( { - accepts: { - script: "text/javascript, application/javascript, " + - "application/ecmascript, application/x-ecmascript" - }, - contents: { - script: /\b(?:java|ecma)script\b/ - }, - converters: { - "text script": function( text ) { - jQuery.globalEval( text ); - return text; - } - } -} ); - -// Handle cache's special case and crossDomain -jQuery.ajaxPrefilter( "script", function( s ) { - if ( s.cache === undefined ) { - s.cache = false; - } - if ( s.crossDomain ) { - s.type = "GET"; - } -} ); - -// Bind script tag hack transport -jQuery.ajaxTransport( "script", function( s ) { - - // This transport only deals with cross domain requests - if ( s.crossDomain ) { - var script, callback; - return { - send: function( _, complete ) { - script = jQuery( " + diff -r 7a1903ede496 -r 2e5c5d33d62c piecrust/admin/templates/list_source.html --- a/piecrust/admin/templates/list_source.html Tue Nov 21 11:00:06 2017 -0800 +++ b/piecrust/admin/templates/list_source.html Tue Nov 21 22:07:12 2017 -0800 @@ -14,7 +14,12 @@ {% for p in pages %} - + {% if p.timestamp > 0 %} + + {% else %} + no date/time + {% endif %} + {{p.title}} {{p.author}} {{p.category}} diff -r 7a1903ede496 -r 2e5c5d33d62c piecrust/admin/templates/micropub.html --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/piecrust/admin/templates/micropub.html Tue Nov 21 22:07:12 2017 -0800 @@ -0,0 +1,10 @@ +{% set title = 'Micropub Endpoint' %} + +{% extends 'layouts/master.html' %} + +{% block content %} +

This is PieCrust's Micropub endpoint.

+{% endblock %} + + + diff -r 7a1903ede496 -r 2e5c5d33d62c piecrust/admin/templates/publish.html --- a/piecrust/admin/templates/publish.html Tue Nov 21 11:00:06 2017 -0800 +++ b/piecrust/admin/templates/publish.html Tue Nov 21 22:07:12 2017 -0800 @@ -3,16 +3,29 @@ {% block content %}

Publish {{site_title}}

+{% with messages = get_flashed_messages() %} +{% for message in messages %} +

{{message}}

+{% endfor %} +{% endwith %} + {% for target in targets %}

{{target.name}}

- {% if target.description %}
{{target.description}}
{% endif %} + {% if target.description %}

{{target.description}}

{% endif %}
- +
{% endfor %} +{% if last_log %} +

Last Publish Log

+

+{{last_log}}
+
+{% endif %} + {% endblock %} diff -r 7a1903ede496 -r 2e5c5d33d62c piecrust/admin/views/__init__.py --- a/piecrust/admin/views/__init__.py Tue Nov 21 11:00:06 2017 -0800 +++ b/piecrust/admin/views/__init__.py Tue Nov 21 22:07:12 2017 -0800 @@ -1,4 +1,4 @@ -from flask import render_template +from flask import current_app, render_template, request from flask.views import View from .menu import get_menu_context @@ -25,4 +25,3 @@ context = {} context['menu'] = get_menu_context() return context - diff -r 7a1903ede496 -r 2e5c5d33d62c piecrust/admin/views/create.py --- a/piecrust/admin/views/create.py Tue Nov 21 11:00:06 2017 -0800 +++ b/piecrust/admin/views/create.py Tue Nov 21 22:07:12 2017 -0800 @@ -1,12 +1,10 @@ -import os -import os.path import logging from flask import ( g, request, abort, render_template, url_for, redirect, flash) from flask.ext.login import login_required +from piecrust.page import Page from piecrust.sources.interfaces import IInteractiveSource -from piecrust.sources.base import MODE_CREATING -from piecrust.routing import create_route_metadata +from piecrust.uriutil import split_uri from ..blueprint import foodtruck_bp from ..views import with_menu_context @@ -17,8 +15,8 @@ @foodtruck_bp.route('/write/', methods=['GET', 'POST']) @login_required def write_page(source_name): - site = g.site.piecrust_app - source = site.getSource(source_name) + pcapp = g.site.piecrust_app + source = pcapp.getSource(source_name) if source is None: abort(400) if not isinstance(source, IInteractiveSource): @@ -26,52 +24,11 @@ if request.method == 'POST': if 'do_save' in request.form: - metadata = {} - for f in source.getInteractiveFields(): - metadata[f.name] = f.default_value - for fk, fv in request.form.items(): - if fk.startswith('meta-'): - metadata[fk[5:]] = fv - - logger.debug("Searching for page with metadata: %s" % metadata) - fac = source.findPageFactory(metadata, MODE_CREATING) - if fac is None: - logger.error("Can't find page for %s" % metadata) - abort(500) - - logger.debug("Creating page: %s" % fac.path) - os.makedirs(os.path.dirname(fac.path), exist_ok=True) - with open(fac.path, 'w', encoding='utf8') as fp: - fp.write('') - flash("%s was created." % os.path.relpath(fac.path, site.root_dir)) - - route = site.getSourceRoute(source.name, fac.metadata) - if route is None: - logger.error("Can't find route for page: %s" % fac.path) - abort(500) - - dummy = _DummyPage(fac) - route_metadata = create_route_metadata(dummy) - uri = route.getUri(route_metadata) - uri_root = '/site/%s/' % g.site.name - uri = uri[len(uri_root):] - logger.debug("Redirecting to: %s" % uri) - - return redirect(url_for('.edit_page', slug=uri)) - + return _submit_page_form(pcapp, source) abort(400) - return _write_page_form(source) -class _DummyPage: - def __init__(self, fac): - self.source_metadata = fac.metadata - - def getRouteMetadata(self): - return {} - - def _write_page_form(source): data = {} data['is_new_page'] = True @@ -88,3 +45,33 @@ with_menu_context(data) return render_template('create_page.html', **data) + +def _submit_page_form(pcapp, source): + metadata = {} + for f in source.getInteractiveFields(): + metadata[f.name] = f.default_value + for fk, fv in request.form.items(): + if fk.startswith('meta-'): + metadata[fk[5:]] = fv + + logger.debug("Creating item with metadata: %s" % metadata) + content_item = source.createContent(metadata) + if content_item is None: + logger.error("Can't create item for: %s" % metadata) + abort(500) + + logger.debug("Creating content: %s" % content_item.spec) + with source.openItem(content_item, 'w') as fp: + fp.write('---\n') + fp.write('draft: true\n') + fp.write('---\n') + fp.write('\n') + fp.write("Start writing!\n") + flash("'%s' was created." % content_item.spec) + + page = Page(source, content_item) + uri = page.getUri() + logger.debug("Redirecting to: %s" % uri) + _, rel_url = split_uri(page.app, uri) + return redirect(url_for('.edit_page', url=rel_url)) + diff -r 7a1903ede496 -r 2e5c5d33d62c piecrust/admin/views/dashboard.py --- a/piecrust/admin/views/dashboard.py Tue Nov 21 11:00:06 2017 -0800 +++ b/piecrust/admin/views/dashboard.py Tue Nov 21 22:07:12 2017 -0800 @@ -5,11 +5,11 @@ current_app, g, request, render_template, url_for, redirect) from flask.ext.login import login_user, logout_user, login_required -from piecrust.configuration import parse_config_header -from piecrust.rendering import QualifiedPage +from piecrust.sources.fs import FSContentSourceBase +from piecrust.sources.interfaces import IInteractiveSource from piecrust.uriutil import split_uri from ..textutil import text_preview -from ..blueprint import foodtruck_bp, load_user, after_this_request +from ..blueprint import foodtruck_bp, load_user from ..views import with_menu_context @@ -21,63 +21,63 @@ def index(): data = {} data['sources'] = [] - site = g.site + fs_endpoints = {} - for source in site.piecrust_app.sources: + + site = g.site + pcapp = site.piecrust_app + for source in pcapp.sources: if source.is_theme_source: continue - facs = source.getPageFactories() + if not isinstance(source, IInteractiveSource): + continue + src_data = { 'name': source.name, - 'list_url': url_for('.list_source', source_name=source.name), - 'page_count': len(facs)} + 'list_url': url_for('.list_source', source_name=source.name)} data['sources'].append(src_data) - fe = getattr(source, 'fs_endpoint', None) - if fe: - fs_endpoints[fe] = source + if isinstance(source, FSContentSourceBase): + fs_endpoints[source.fs_endpoint] = source data['new_pages'] = [] data['edited_pages'] = [] data['misc_files'] = [] if site.scm: st = site.scm.getStatus() + auto_formats = site.piecrust_app.config.get('site/auto_formats', + ['html']) for p in st.new_files: - pd = _getWipData(p, site, fs_endpoints) + pd = _getWipData(p, fs_endpoints, auto_formats, site.piecrust_app) if pd: data['new_pages'].append(pd) else: data['misc_files'].append(p) for p in st.edited_files: - pd = _getWipData(p, site, fs_endpoints) + pd = _getWipData(p, fs_endpoints, auto_formats, site.piecrust_app) if pd: data['edited_pages'].append(pd) else: data['misc_files'].append(p) - data['site_name'] = site.name - data['site_title'] = site.piecrust_app.config.get('site/title', site.name) + data['site_title'] = pcapp.config.get('site/title', "Unnamed Website") data['url_publish'] = url_for('.publish') - data['url_preview'] = url_for('.preview_site_root', sitename=site.name) + data['url_preview'] = url_for('.preview_root_page') + data['url_bake_assets'] = url_for('.rebake_assets') - data['sites'] = [] - for s in g.sites.getall(): - data['sites'].append({ - 'name': s.name, - 'display_name': s.piecrust_app.config.get('site/title'), - 'url': url_for('.index', site_name=s.name) - }) - data['needs_switch'] = len(g.config.get('sites')) > 1 - data['url_switch'] = url_for('.switch_site') + pub_tgts = pcapp.config.get('publish', {}) + data['publish'] = {'targets': list(pub_tgts.keys())} + + micropub = pcapp.config.get('micropub', {}) + data['publish'] = micropub.get('publish_target') with_menu_context(data) return render_template('dashboard.html', **data) -def _getWipData(path, site, fs_endpoints): - auto_formats = site.piecrust_app.config.get('site/auto_formats', ['html']) +def _getWipData(path, fs_endpoints, auto_formats, pcapp): pathname, pathext = os.path.splitext(path) - if pathext not in auto_formats: + if pathext.lstrip('.') not in auto_formats: return None source = None @@ -88,39 +88,33 @@ if source is None: return None - fac = source.buildPageFactory(os.path.join(site.root_dir, path)) - route = site.piecrust_app.getSourceRoute(source.name, fac.metadata) - if not route: + # TODO: this assumes FS sources, but this comes from the disk anyway. + full_path = os.path.join(pcapp.root_dir, path) + content_item = source.findContentFromSpec(full_path) + if content_item is None: return None - qp = QualifiedPage(fac.buildPage(), route, fac.metadata) - uri = qp.getUri() - _, slug = split_uri(site.piecrust_app, uri) - - with open(fac.path, 'r', encoding='utf8') as fp: - raw_text = fp.read() + page = pcapp.getPage(source, content_item) + uri = page.getUri() + _, slug = split_uri(pcapp, uri) - header, offset = parse_config_header(raw_text) - extract = text_preview(raw_text, offset=offset) + seg = page.getSegment() + if not seg: + return None + + extract = text_preview(seg.content) return { - 'title': qp.config.get('title'), + 'title': page.config.get('title'), 'slug': slug, - 'url': url_for('.edit_page', slug=slug), + 'url': url_for('.edit_page', url=slug), 'text': extract } +@foodtruck_bp.route('/rebake_assets', methods=['POST']) @login_required -@foodtruck_bp.route('/switch_site', methods=['POST']) -def switch_site(): - site_name = request.form.get('site_name') - if not site_name: - return redirect(url_for('.index')) - - @after_this_request - def _save_site(resp): - resp.set_cookie('foodtruck_site_name', site_name) - +def rebake_assets(): + g.site.rebakeAssets() return redirect(url_for('.index')) diff -r 7a1903ede496 -r 2e5c5d33d62c piecrust/admin/views/edit.py --- a/piecrust/admin/views/edit.py Tue Nov 21 11:00:06 2017 -0800 +++ b/piecrust/admin/views/edit.py Tue Nov 21 22:07:12 2017 -0800 @@ -13,15 +13,14 @@ logger = logging.getLogger(__name__) -@foodtruck_bp.route('/edit/', defaults={'slug': ''}, methods=['GET', 'POST']) -@foodtruck_bp.route('/edit/', methods=['GET', 'POST']) +@foodtruck_bp.route('/edit/', defaults={'url': ''}, methods=['GET', 'POST']) +@foodtruck_bp.route('/edit/', methods=['GET', 'POST']) @login_required -def edit_page(slug): +def edit_page(url): site = g.site site_app = site.piecrust_app - rp = get_requested_page(site_app, - '/site/%s/%s' % (g.sites.current_site, slug)) - page = rp.qualified_page + rp = get_requested_page(site_app, site.make_url('/preview/%s' % url)) + page = rp.page if page is None: abort(404) @@ -30,43 +29,47 @@ if request.form['is_dos_nl'] == '0': page_text = page_text.replace('\r\n', '\n') + page_spec = page.content_spec + if 'do_save' in request.form or 'do_save_and_commit' in request.form: - logger.debug("Writing page: %s" % page.path) - with open(page.path, 'w', encoding='utf8', newline='') as fp: + logger.debug("Writing page: %s" % page_spec) + with page.source.openItem(page.content_item, 'w', + encoding='utf8', newline='') as fp: fp.write(page_text) - flash("%s was saved." % os.path.relpath( - page.path, site_app.root_dir)) + flash("%s was saved." % page_spec) if 'do_save_and_commit' in request.form: message = request.form.get('commit_msg') if not message: - message = "Edit %s" % os.path.relpath( - page.path, site_app.root_dir) + message = "Edit %s" % page_spec if site.scm: - site.scm.commit([page.path], message) + commit_paths = [page_spec] + assets_dir = os.path.splitext(page_spec)[0] + '-assets' + if os.path.isdir(assets_dir): + commit_paths += list(os.listdir(assets_dir)) + site.scm.commit(commit_paths, message) if 'do_save' in request.form or 'do_save_and_commit' in request.form: - return _edit_page_form(page, slug, site.name) + return _edit_page_form(page, url) abort(400) - return _edit_page_form(page, slug, site.name) + return _edit_page_form(page, url) -@foodtruck_bp.route('/upload/', methods=['POST']) -def upload_page_asset(slug): +@foodtruck_bp.route('/upload/', methods=['POST']) +def upload_page_asset(url): if 'ft-asset-file' not in request.files: - return redirect(url_for('.edit_page', slug=slug)) + return redirect(url_for('.edit_page', url=url)) asset_file = request.files['ft-asset-file'] if asset_file.filename == '': - return redirect(url_for('.edit_page', slug=slug)) + return redirect(url_for('.edit_page', url=url)) site = g.site site_app = site.piecrust_app - rp = get_requested_page(site_app, - '/site/%s/%s' % (g.sites.current_site, slug)) - page = rp.qualified_page + rp = get_requested_page(site_app, site.make_url('/preview/%s' % url)) + page = rp.page if page is None: abort(404) @@ -83,26 +86,27 @@ asset_path = os.path.join(dirname, filename) logger.info("Uploading file to: %s" % asset_path) asset_file.save(asset_path) - return redirect(url_for('.edit_page', slug=slug)) + return redirect(url_for('.edit_page', url=url)) -def _edit_page_form(page, slug, sitename): +def _edit_page_form(page, url): data = {} data['is_new_page'] = False - data['url_postback'] = url_for('.edit_page', slug=slug) - data['url_upload_asset'] = url_for('.upload_page_asset', slug=slug) - data['url_preview'] = page.getUri() + data['url_postback'] = url_for('.edit_page', url=url) + data['url_upload_asset'] = url_for('.upload_page_asset', url=url) + data['url_preview'] = url_for('.preview_page', url=url) data['url_cancel'] = url_for( '.list_source', source_name=page.source.name) - with open(page.path, 'r', encoding='utf8', newline='') as fp: + with page.source.openItem(page.content_item, 'r', + encoding='utf8', newline='') as fp: data['page_text'] = fp.read() data['is_dos_nl'] = "1" if '\r\n' in data['page_text'] else "0" page.app.env.base_asset_url_format = \ page.app.config.get('site/root') + '_asset/%path%' - assetor = Assetor(page, 'blah') + assetor = Assetor(page) assets_data = [] - for n in assetor.allNames(): + for n in assetor._getAssetNames(): assets_data.append({'name': n, 'url': assetor[n]}) data['assets'] = assets_data diff -r 7a1903ede496 -r 2e5c5d33d62c piecrust/admin/views/menu.py --- a/piecrust/admin/views/menu.py Tue Nov 21 11:00:06 2017 -0800 +++ b/piecrust/admin/views/menu.py Tue Nov 21 22:07:12 2017 -0800 @@ -1,31 +1,39 @@ from flask import g, request, url_for from flask.ext.login import current_user +from piecrust.sources.interfaces import IInteractiveSource def get_menu_context(): entries = [] entries.append({ - 'url': '/', + 'url': url_for('FoodTruck.index'), 'title': "Dashboard", 'icon': 'speedometer'}) site = g.site.piecrust_app - for s in site.sources: - if s.is_theme_source: + for source in site.sources: + if source.is_theme_source: + continue + if not isinstance(source, IInteractiveSource): continue - source_icon = s.config.get('admin_icon', 'document') - if s.name == 'pages': - source_icon = 'document-text' - elif 'blog' in s.name: - source_icon = 'filing' + # Figure out the icon to use... we do some hard-coded stuff to + # have something vaguely pretty out of the box. + source_icon = source.config.get('admin_icon') + if source_icon is None: + if source.name == 'pages': + source_icon = 'document-text' + elif 'blog' in source.name or 'posts' in source.name: + source_icon = 'filing' + else: + source_icon = 'document' - url_write = url_for('.write_page', source_name=s.name) - url_listall = url_for('.list_source', source_name=s.name) + url_write = url_for('.write_page', source_name=source.name) + url_listall = url_for('.list_source', source_name=source.name) ctx = { 'url': url_listall, - 'title': s.name, + 'title': source.name, 'icon': source_icon, 'quicklink': { 'icon': 'plus-round', @@ -44,6 +52,7 @@ 'title': "Publish", 'icon': 'upload'}) + # TODO: re-enable settings UI at some point. # entries.append({ # 'url': url_for('.settings'), # 'title': "Settings", diff -r 7a1903ede496 -r 2e5c5d33d62c piecrust/admin/views/micropub.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/piecrust/admin/views/micropub.py Tue Nov 21 22:07:12 2017 -0800 @@ -0,0 +1,356 @@ +import re +import os +import os.path +import json +import uuid +import logging +import datetime +import yaml +from werkzeug.utils import secure_filename +from flask import g, url_for, request, abort, jsonify, Response +from flask_indieauth import requires_indieauth +from ..blueprint import foodtruck_bp +from piecrust import CACHE_DIR +from piecrust.configuration import merge_dicts +from piecrust.page import Page + + +logger = logging.getLogger(__name__) + +re_unsafe_asset_char = re.compile('[^a-zA-Z0-9_]') + + +def _patch_flask_indieauth(): + import flask_indieauth + + def _patched_get_access_token_from_json_request(request): + try: + jsondata = json.loads(request.get_data(as_text=True)) + return jsondata['access_token'] + except ValueError: + return None + + flask_indieauth.get_access_token_from_json_request = \ + _patched_get_access_token_from_json_request + logger.info("Patched Flask-IndieAuth.") + + +_patch_flask_indieauth() + + +_enable_debug_req = False + + +def _debug_req(): + if _enable_debug_req: + logger.warning("Headers: %s" % request.headers) + logger.warning("Args: %s" % request.args) + logger.warning("Form: %s" % request.form) + logger.warning("Data: %s" % request.get_data(True)) + try: + logger.warning("JSON: %s" % request.json) + except: + pass + + +@foodtruck_bp.route('/micropub', methods=['POST']) +@requires_indieauth +def post_micropub(): + _debug_req() + + if 'h' in request.form: + data = _get_mf2_from_form(request.form) + else: + try: + data = json.loads(request.get_data(as_text=True)) + except: + data = None + + if data: + entry_type = _mf2get(data, 'type') + if entry_type == 'h-entry': + source_name, content_item = _create_hentry(data['properties']) + _run_publisher() + return _get_location_response(source_name, content_item) + + else: + logger.error("Post type '%s' is not supported." % post_type) + else: + logger.error("Missing form or JSON data.") + + abort(400) + + +@foodtruck_bp.route('/micropub/media', methods=['POST']) +@requires_indieauth +def post_micropub_media(): + _debug_req() + + photo = request.files.get('file') + if not photo: + logger.error("Micropub media request without a file part.") + abort(400) + return + + fn = secure_filename(photo.filename) + fn = re_unsafe_asset_char.sub('_', fn) + fn = '%s_%s' % (str(uuid.uuid1()), fn) + + photo_cache_dir = os.path.join( + g.site.root_dir, + CACHE_DIR, g.site.piecrust_factory.cache_key, + 'uploads') + try: + os.makedirs(photo_cache_dir, mode=0o775, exist_ok=True) + except OSError: + pass + + photo_path = os.path.join(photo_cache_dir, fn) + logger.info("Uploading file to: %s" % photo_path) + photo.save(photo_path) + + r = Response() + r.status_code = 201 + r.headers.add('Location', fn) + return r + + +@foodtruck_bp.route('/micropub', methods=['GET']) +def get_micropub(): + data = {} + if request.args.get('q') == 'config': + endpoint_url = (request.host_url.rstrip('/') + + url_for('.post_micropub_media')) + data.update({ + "media-endpoint": endpoint_url + }) + + pcapp = g.site.piecrust_app + syn_data = pcapp.config.get('micropub/syndicate_to') + if syn_data: + data['syndicate-to'] = syn_data + + return jsonify(**data) + + +def _run_publisher(): + pcapp = g.site.piecrust_app + target = pcapp.config.get('micropub/publish_target') + if target: + logger.debug("Running pushing target '%s'." % target) + g.site.publish(target) + + +def _get_location_response(source_name, content_item): + from piecrust.app import PieCrust + pcapp = PieCrust(g.site.root_dir) + source = pcapp.getSource(source_name) + + page = Page(source, content_item) + uri = page.getUri() + + logger.debug("Redirecting to: %s" % uri) + r = Response() + r.status_code = 201 + r.headers.add('Location', uri) + return r + + +re_array_prop = re.compile(r'\[(?P\w*)\]$') + + +def _get_mf2_from_form(f): + post_type = 'h-' + f.get('h', '') + + properties = {} + for key, vals in f.lists(): + m = re_array_prop.search(key) + if not m: + properties[key] = vals + continue + + key_name_only = key[:m.start()] + inner_name = m.group('name') + if not inner_name: + properties[key_name_only] = vals + continue + + properties[key_name_only] = [{inner_name: vals[0]}] + + return { + 'type': [post_type], + 'properties': properties} + + +def _mf2get(data, key): + val = data.get(key) + if val is not None: + return val[0] + return None + + +def _create_hentry(data): + name = _mf2get(data, 'name') + summary = _mf2get(data, 'summary') + location = _mf2get(data, 'location') + reply_to = _mf2get(data, 'in-reply-to') + status = _mf2get(data, 'post-status') + # pubdate = _mf2get(data, 'published') or 'now' + + categories = data.get('category') + + # Get the content. + post_format = None + content = _mf2get(data, 'content') + if isinstance(content, dict): + content = content.get('html') + post_format = 'none' + if not content: + logger.error("No content specified!") + logger.error(data) + abort(400) + + # Clean-up stuff. + # TODO: setting to conserve Windows-type line endings? + content = content.replace('\r\n', '\n') + if summary: + summary = summary.replace('\r\n', '\n') + + # Get the slug. + slug = _mf2get(data, 'slug') or _mf2get(data, 'mp-slug') + now = datetime.datetime.now() + if not slug: + slug = '%02d%02d%02d' % (now.hour, now.minute, now.second) + + # Create the post in the correct content source. + # Note that this won't actually write anything to disk yet, we're + # just creating it in memory. + pcapp = g.site.piecrust_app + source_name = pcapp.config.get('micropub/source', 'posts') + source = pcapp.getSource(source_name) + + metadata = { + 'date': now, + 'slug': slug + } + logger.debug("Creating item with metadata: %s" % metadata) + content_item = source.createContent(metadata) + if content_item is None: + logger.error("Can't create item for: %s" % metadata) + abort(500) + + # Get the media to attach to the post. + photos = None + if 'photo' in request.files: + photos = [request.files['photo']] + elif 'photo[]' in request.files: + photos = request.files.getlist('photo[]') + photo_urls = data.get('photo') + + # Create the assets folder if we have anything to put there. + # TODO: add proper APIs for creating related assets. + if photo_urls or photos: + photo_dir, _ = os.path.splitext(content_item.spec) + photo_dir += '-assets' + try: + os.makedirs(photo_dir, mode=0o775, exist_ok=True) + except OSError: + # An `OSError` can still be raised in older versions of Python + # if the permissions don't match an existing folder. + # Let's ignore it. + pass + + # Photo URLs come from files uploaded via the media endpoint... + # They're waiting for us in the upload cache folder, so let's + # move them to the post's assets folder. + photo_names = [] + if photo_urls: + photo_cache_dir = os.path.join( + g.site.root_dir, + CACHE_DIR, g.site.piecrust_factory.cache_key, + 'uploads') + + for p_url in photo_urls: + _, __, p_url = p_url.rpartition('/') + p_path = os.path.join(photo_cache_dir, p_url) + p_uuid, p_fn = p_url.split('_', 1) + p_asset = os.path.join(photo_dir, p_fn) + logger.info("Moving upload '%s' to '%s'." % (p_path, p_asset)) + try: + os.rename(p_path, p_asset) + except OSError: + logger.error("Can't move '%s' to '%s'." % (p_path, p_asset)) + raise + + p_fn_no_ext, _ = os.path.splitext(p_fn) + photo_names.append(p_fn_no_ext) + + # There could also be some files uploaded along with the post + # so upload them right now. + if photos: + for photo in photos: + if not photo or not photo.filename: + logger.warning("Got empty photo in request files... skipping.") + continue + + fn = secure_filename(photo.filename) + fn = re_unsafe_asset_char.sub('_', fn) + photo_path = os.path.join(photo_dir, fn) + logger.info("Uploading file to: %s" % photo_path) + photo.save(photo_path) + + fn_no_ext, _ = os.path.splitext(fn) + photo_names.append(fn_no_ext) + + # Build the config. + post_config = {} + if name: + post_config['title'] = name + if categories: + post_config['tags'] = categories + if location: + post_config['location'] = location + if reply_to: + post_config['reply_to'] = reply_to + if status and status != 'published': + post_config['draft'] = True + if post_format: + post_config['format'] = post_format + post_config['time'] = '%02d:%02d:%02d' % (now.hour, now.minute, now.second) + + # If there's no title, this is a "microblogging" post. + if not name: + micro_config = pcapp.config.get('micropub/microblogging') + if micro_config: + merge_dicts(post_config, micro_config) + + logger.debug("Writing to item: %s" % content_item.spec) + with source.openItem(content_item, mode='w', encoding='utf8') as fp: + fp.write('---\n') + yaml.dump(post_config, fp, + default_flow_style=False, + allow_unicode=True) + fp.write('---\n') + + if summary: + fp.write(summary) + fp.write('\n') + fp.write('\n\n') + fp.write(content) + + if photo_names: + fp.write('\n\n') + for pn in photo_names: + fp.write('%s\n\n' % + (pn, pn)) + + if os.supports_fd: + import stat + try: + os.chmod(fp.fileno(), + stat.S_IRUSR|stat.S_IWUSR|stat.S_IRGRP|stat.S_IWGRP) + except OSError: + pass + + return source_name, content_item + diff -r 7a1903ede496 -r 2e5c5d33d62c piecrust/admin/views/preview.py --- a/piecrust/admin/views/preview.py Tue Nov 21 11:00:06 2017 -0800 +++ b/piecrust/admin/views/preview.py Tue Nov 21 22:07:12 2017 -0800 @@ -1,25 +1,30 @@ -from flask import current_app, g, make_response +from flask import g, request, make_response from flask.ext.login import login_required -from piecrust.app import PieCrustFactory -from piecrust.serving.server import Server +from piecrust.serving.server import PieCrustServer from ..blueprint import foodtruck_bp -@foodtruck_bp.route('/site//') +@foodtruck_bp.route('/preview/') @login_required -def preview_site_root(sitename): - return preview_site(sitename, '/') +def preview_root_page(): + return preview_page('/') -@foodtruck_bp.route('/site//') +@foodtruck_bp.route('/preview/') @login_required -def preview_site(sitename, url): - root_dir = g.sites.get_root_dir(sitename) - appfactory = PieCrustFactory( - root_dir, - cache_key='foodtruck', - debug=current_app.debug) - server = Server(appfactory, - root_url='/site/%s/' % sitename) - return make_response(server._run_request) +def preview_page(url): + site = g.site + pcappfac = site.piecrust_factory + root_url = request.script_root or '' + root_url += site.make_url('/preview/') + server = PieCrustServer(pcappfac, root_url=root_url) + # Patch the WSGI environment for the underlying PieCrust server, + # because it doesn't generally handle stuff being under a different + # sub folder of the domain. + script_name = request.environ['SCRIPT_NAME'] + request.environ['SCRIPT_NAME'] = '' + request.environ['PATH_INFO'] = script_name + request.environ['PATH_INFO'] + + return make_response(server) + diff -r 7a1903ede496 -r 2e5c5d33d62c piecrust/admin/views/publish.py --- a/piecrust/admin/views/publish.py Tue Nov 21 11:00:06 2017 -0800 +++ b/piecrust/admin/views/publish.py Tue Nov 21 22:07:12 2017 -0800 @@ -23,13 +23,20 @@ site = g.site pub_cfg = copy.deepcopy(site.piecrust_app.config.get('publish', {})) if not pub_cfg: - data = {'error': "There are not publish targets defined in your " + data = {'error': "There are no publish targets defined in your " "configuration file."} return render_template('error.html', **data) + try: + with open(site.publish_log_file, 'r') as fp: + last_pub_log = fp.read() + except OSError: + last_pub_log = None + data = {} data['url_run'] = url_for('.publish') - data['site_title'] = site.piecrust_app.config.get('site/title', site.name) + data['site_title'] = site.piecrust_app.config.get('site/title', + "Unnamed Website") data['targets'] = [] for tn in sorted(pub_cfg.keys()): tc = pub_cfg[tn] @@ -41,6 +48,8 @@ 'description': desc }) + data['last_log'] = last_pub_log + with_menu_context(data) return render_template('publish.html', **data) diff -r 7a1903ede496 -r 2e5c5d33d62c piecrust/admin/views/sources.py --- a/piecrust/admin/views/sources.py Tue Nov 21 11:00:06 2017 -0800 +++ b/piecrust/admin/views/sources.py Tue Nov 21 22:07:12 2017 -0800 @@ -1,3 +1,4 @@ +import re from flask import g, abort, render_template, url_for from flask.ext.login import login_required from piecrust.data.paginator import Paginator @@ -18,17 +19,16 @@ default_author = site.config.get('site/author') data = {'title': "List %s" % source_name} data['pages'] = [] - pgn = Paginator(None, source, page_num=page_num, items_per_page=20) + pgn = Paginator(source, None, sub_num=page_num, items_per_page=20) for p in pgn.items: page_data = { - 'title': p['title'], - 'author': p.get('author', default_author), - 'slug': p['slug'], - 'timestamp': p['timestamp'], + 'title': p.get('title') or _get_first_line_title(p), + 'author': p.get('author') or default_author, + 'timestamp': p.get('timestamp'), 'tags': p.get('tags', []), 'category': p.get('category'), 'source': source_name, - 'url': url_for('.edit_page', slug=p['slug']) + 'url': url_for('.edit_page', url=p['rel_url']) } data['pages'].append(page_data) @@ -59,3 +59,25 @@ with_menu_context(data) return render_template('list_source.html', **data) + +re_first_line_title = re.compile(r'[\n\r\.\!\?;]') + + +def _get_first_line_title(pagedata): + content = pagedata.get('raw_content') or '' + content = content.content.strip() + if not content: + return '' + + m = re_first_line_title.search(content, 1) + if m: + content = content[:m.start()] + + words = content.split(' ') + title = words[0] + cur_word_idx = 1 + while len(title) < 60 and cur_word_idx < len(words): + title += ' ' + words[cur_word_idx] + cur_word_idx += 1 + + return content diff -r 7a1903ede496 -r 2e5c5d33d62c piecrust/admin/web.py --- a/piecrust/admin/web.py Tue Nov 21 11:00:06 2017 -0800 +++ b/piecrust/admin/web.py Tue Nov 21 22:07:12 2017 -0800 @@ -1,69 +1,68 @@ import os.path import logging -from flask import Flask, render_template -from werkzeug import SharedDataMiddleware -from .blueprint import foodtruck_bp -from .configuration import FoodTruckConfigNotFoundError -from .sites import InvalidSiteError +from flask import Flask logger = logging.getLogger(__name__) -def create_foodtruck_app(extra_settings=None): +def create_foodtruck_app(extra_settings=None, url_prefix=None): + from .blueprint import foodtruck_bp + app = Flask(__name__.split('.')[0], static_folder=None) app.config.from_object('piecrust.admin.settings') - app.config.from_envvar('FOODTRUCK_SETTINGS', silent=True) if extra_settings: app.config.update(extra_settings) - admin_root = app.config.setdefault('FOODTRUCK_ROOT', os.getcwd()) - config_path = os.path.join(admin_root, 'app.cfg') + root_dir = app.config.setdefault('FOODTRUCK_ROOT_DIR', os.getcwd()) - # If we're being run as the `chef admin run` command, from inside a - # PieCrust website, do a few things differently. - app.config['FOODTRUCK_PROCEDURAL_CONFIG'] = None - if (app.config.get('FOODTRUCK_CMDLINE_MODE', False) and - os.path.isfile(os.path.join(admin_root, 'config.yml'))): - app.secret_key = os.urandom(22) - app.config['LOGIN_DISABLED'] = True - app.config['FOODTRUCK_PROCEDURAL_CONFIG'] = { - 'sites': { - 'local': admin_root} - } + app.config.from_pyfile(os.path.join(root_dir, 'admin_app.cfg'), + silent=True) + app.config.from_envvar('FOODTRUCK_SETTINGS', silent=True) - # Add a special route for the `.well-known` directory. - app.wsgi_app = SharedDataMiddleware( - app.wsgi_app, - {'/.well-known': os.path.join(admin_root, '.well-known')}) - - if os.path.isfile(config_path): - app.config.from_pyfile(config_path) - + # Setup logging/error handling. if app.config['DEBUG']: l = logging.getLogger() l.setLevel(logging.DEBUG) - else: - @app.errorhandler(FoodTruckConfigNotFoundError) - def _on_config_missing(ex): - return render_template('install.html') - @app.errorhandler(InvalidSiteError) - def _on_invalid_site(ex): - data = {'error': - "The was an error with your configuration file: %s" % - str(ex)} - return render_template('error.html', **data) - - if not app.secret_key: + if not app.config['SECRET_KEY']: # If there's no secret key, create a temp one but mark the app as not # correctly installed so it shows the installation information page. - app.secret_key = 'temp-key' + app.config['SECRET_KEY'] = 'temp-key' # Register extensions and blueprints. - app.register_blueprint(foodtruck_bp) + app.register_blueprint(foodtruck_bp, url_prefix=url_prefix) - logger.debug("Created FoodTruck app with admin root: %s" % admin_root) + # Debugging stuff + if app.config.get('FOODTRUCK_DEBUG_404'): + @app.errorhandler(404) + def page_not_found(e): + return _debug_page_not_found(app, e) + + logger.debug("Created FoodTruck app with admin root: %s" % root_dir) return app + +def _debug_page_not_found(app, e): + from flask import request, url_for + output = [] + for rule in app.url_map.iter_rules(): + options = {} + for arg in rule.arguments: + options[arg] = "[{0}]".format(arg) + methods = ','.join(rule.methods) + try: + url = url_for(rule.endpoint, **options) + except: + url = '???' + line = ("{:50s} {:20s} {}".format(rule.endpoint, methods, url)) + output.append(line) + + resp = 'FOODTRUCK_ROOT_URL=%s
\n' % str( + app.config['FOODTRUCK_ROOT_URL']) + resp += 'PATH=%s
\n' % request.path + resp += 'ENVIRON=%s
\n' % str(request.environ) + resp += 'URL RULES:
\n' + resp += '
\n'.join(sorted(output)) + return resp, 404 diff -r 7a1903ede496 -r 2e5c5d33d62c piecrust/admin/wsgiutil.py --- a/piecrust/admin/wsgiutil.py Tue Nov 21 11:00:06 2017 -0800 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,24 +0,0 @@ -import logging - - -logger = logging.getLogger() - - -def get_wsgi_app(admin_root=None, log_file=None, - max_log_bytes=4096, log_backup_count=0, - log_level=logging.INFO): - if log_file: - from logging.handlers import RotatingFileHandler - handler = RotatingFileHandler(log_file, maxBytes=max_log_bytes, - backupCount=log_backup_count) - handler.setLevel(log_level) - logging.getLogger().addHandler(handler) - - logger.debug("Creating WSGI application.") - es = {} - if admin_root: - es['FOODTRUCK_ROOT'] = admin_root - from .web import create_foodtruck_app - app = create_foodtruck_app(es) - return app - diff -r 7a1903ede496 -r 2e5c5d33d62c piecrust/app.py --- a/piecrust/app.py Tue Nov 21 11:00:06 2017 -0800 +++ b/piecrust/app.py Tue Nov 21 22:07:12 2017 -0800 @@ -1,18 +1,18 @@ import time import os.path -import hashlib import logging import urllib.parse from werkzeug.utils import cached_property from piecrust import ( - RESOURCES_DIR, - CACHE_DIR, TEMPLATES_DIR, ASSETS_DIR, - THEME_DIR, PLUGINS_DIR, - CONFIG_PATH, THEME_CONFIG_PATH) + RESOURCES_DIR, + CACHE_DIR, TEMPLATES_DIR, ASSETS_DIR, + THEME_DIR, PLUGINS_DIR, + CONFIG_PATH, THEME_CONFIG_PATH) from piecrust.appconfig import PieCrustConfiguration from piecrust.cache import ExtensibleCache, NullExtensibleCache -from piecrust.configuration import ConfigurationError, merge_dicts +from piecrust.configuration import ConfigurationError from piecrust.environment import StandardEnvironment +from piecrust.page import Page from piecrust.plugins.base import PluginLoader from piecrust.routing import Route from piecrust.sources.base import REALM_THEME @@ -36,18 +36,24 @@ else: self.cache = NullExtensibleCache() + if env is None: + env = StandardEnvironment() self.env = env - if self.env is None: - self.env = StandardEnvironment() - self.env.initialize(self) - self.env.registerTimer('SiteConfigLoad') - self.env.registerTimer('PageLoad') - self.env.registerTimer("PageDataBuild") - self.env.registerTimer("BuildRenderData") - self.env.registerTimer("PageRender") - self.env.registerTimer("PageRenderSegments") - self.env.registerTimer("PageRenderLayout") - self.env.registerTimer("PageSerialize") + env.initialize(self) + + stats = env.stats + stats.registerTimer('SiteConfigLoad') + stats.registerTimer('PageLoad') + stats.registerTimer("BuildRenderData") + stats.registerTimer("BuildLazyPageData") + stats.registerTimer("PageRender") + stats.registerTimer("PageRenderSegments") + stats.registerTimer("PageRenderLayout") + stats.registerTimer("PageSerialize") + stats.registerTimer("MergedMapping_get") + stats.registerCounter('PageLoads') + stats.registerCounter('PageRenderSegments') + stats.registerCounter('PageRenderLayout') @cached_property def config(self): @@ -65,25 +71,26 @@ config_cache = self.cache.getCache('app') config = PieCrustConfiguration( - path=path, theme_path=theme_path, - cache=config_cache, theme_config=self.theme_site) + path=path, theme_path=theme_path, + cache=config_cache, theme_config=self.theme_site) local_path = os.path.join( - self.root_dir, 'configs', 'local.yml') + self.root_dir, 'configs', 'local.yml') config.addVariant(local_path, raise_if_not_found=False) if self.theme_site: variant_path = os.path.join( - self.root_dir, 'configs', 'theme_preview.yml') + self.root_dir, 'configs', 'theme_preview.yml') config.addVariant(variant_path, raise_if_not_found=False) - self.env.stepTimer('SiteConfigLoad', time.perf_counter() - start_time) + self.env.stats.stepTimer('SiteConfigLoad', + time.perf_counter() - start_time) return config @cached_property def assets_dirs(self): assets_dirs = self._get_configurable_dirs( - ASSETS_DIR, 'site/assets_dirs') + ASSETS_DIR, 'site/assets_dirs') # Also add the theme directory, if any. if self.theme_dir: @@ -96,7 +103,7 @@ @cached_property def templates_dirs(self): templates_dirs = self._get_configurable_dirs( - TEMPLATES_DIR, 'site/templates_dirs') + TEMPLATES_DIR, 'site/templates_dirs') # Also, add the theme directory, if any. if self.theme_dir: @@ -149,6 +156,7 @@ s['type']) src = cls(self, n, s) sources.append(src) + return sources @cached_property @@ -157,25 +165,10 @@ for r in self.config.get('site/routes'): rte = Route(self, r) routes.append(rte) + routes = sorted(routes, key=lambda r: r.pass_num) return routes @cached_property - def generators(self): - defs = {} - for cls in self.plugin_loader.getPageGenerators(): - defs[cls.GENERATOR_NAME] = cls - - gens = [] - for n, g in self.config.get('site/generators').items(): - cls = defs.get(g['type']) - if cls is None: - raise ConfigurationError("No such page generator type: %s" % - g['type']) - gen = cls(self, n, g) - gens.append(gen) - return gens - - @cached_property def publishers(self): defs_by_name = {} defs_by_scheme = {} @@ -188,20 +181,25 @@ publish_config = self.config.get('publish') if publish_config is None: return tgts + for n, t in publish_config.items(): - pub_type = None - is_scheme = False + pub_class = None if isinstance(t, dict): pub_type = t.get('type') + pub_class = defs_by_name[pub_type] + pub_cfg = t elif isinstance(t, str): comps = urllib.parse.urlparse(t) pub_type = comps.scheme - is_scheme = True - cls = (defs_by_scheme.get(pub_type) if is_scheme - else defs_by_name.get(pub_type)) - if cls is None: + pub_class = defs_by_scheme[pub_type] + pub_cfg = None + if pub_class is None: raise ConfigurationError("No such publisher: %s" % pub_type) - tgt = cls(self, n, t) + + tgt = pub_class(self, n, pub_cfg) + if pub_cfg is None: + tgt.parseUrlTarget(comps) + tgts.append(tgt) return tgts @@ -209,31 +207,17 @@ for source in self.sources: if source.name == source_name: return source - return None - def getGenerator(self, generator_name): - for gen in self.generators: - if gen.name == generator_name: - return gen - return None + from piecrust.sources.base import SourceNotFoundError + raise SourceNotFoundError(source_name) - def getSourceRoutes(self, source_name): + def getSourceRoute(self, source_name): for route in self.routes: if route.source_name == source_name: - yield route + return route - def getSourceRoute(self, source_name, route_metadata): - for route in self.getSourceRoutes(source_name): - if (route_metadata is None or - route.matchesMetadata(route_metadata)): - return route - return None - - def getGeneratorRoute(self, generator_name): - for route in self.routes: - if route.generator_name == generator_name: - return route - return None + from piecrust.routing import RouteNotFoundError + raise RouteNotFoundError(source_name) def getPublisher(self, target_name): for pub in self.publishers: @@ -241,6 +225,12 @@ return pub return None + def getPage(self, source, content_item): + cache_key = '%s@%s' % (source.name, content_item.spec) + return self.env.page_repository.get( + cache_key, + lambda: Page(source, content_item)) + def resolvePath(self, path): path = multi_replace(path, {'%theme_dir%': self.theme_dir}) return os.path.join(self.root_dir, path) @@ -264,41 +254,45 @@ return dirs -def apply_variant_and_values(app, config_variant=None, config_values=None): - if config_variant is not None: - logger.debug("Adding configuration variant '%s'." % config_variant) - variant_path = os.path.join( - app.root_dir, 'configs', '%s.yml' % config_variant) - app.config.addVariant(variant_path) +def apply_variants_and_values(app, config_variants=None, config_values=None): + if config_variants is not None: + for value in config_variants: + logger.debug("Adding configuration variant '%s'." % value) + variant_path = os.path.join( + app.root_dir, 'configs', '%s.yml' % value) + app.config.addVariant(variant_path) if config_values is not None: for name, value in config_values: - logger.debug("Adding configuration override '%s': %s" % (name, value)) + logger.debug("Adding configuration override '%s': %s" % + (name, value)) app.config.addVariantValue(name, value) class PieCrustFactory(object): + """ A class that builds a PieCrust app instance. + """ def __init__( self, root_dir, *, cache=True, cache_key=None, - config_variant=None, config_values=None, + config_variants=None, config_values=None, debug=False, theme_site=False): self.root_dir = root_dir self.cache = cache self.cache_key = cache_key - self.config_variant = config_variant + self.config_variants = config_variants self.config_values = config_values self.debug = debug self.theme_site = theme_site def create(self): app = PieCrust( - self.root_dir, - cache=self.cache, - cache_key=self.cache_key, - debug=self.debug, - theme_site=self.theme_site) - apply_variant_and_values( - app, self.config_variant, self.config_values) + self.root_dir, + cache=self.cache, + cache_key=self.cache_key, + debug=self.debug, + theme_site=self.theme_site) + apply_variants_and_values( + app, self.config_variants, self.config_values) return app diff -r 7a1903ede496 -r 2e5c5d33d62c piecrust/appconfig.py --- a/piecrust/appconfig.py Tue Nov 21 11:00:06 2017 -0800 +++ b/piecrust/appconfig.py Tue Nov 21 22:07:12 2017 -0800 @@ -16,10 +16,8 @@ from piecrust.cache import NullCache from piecrust.configuration import ( Configuration, ConfigurationError, ConfigurationLoader, - try_get_dict_values, try_get_dict_value, set_dict_value, - merge_dicts, visit_dict, - MERGE_NEW_VALUES, MERGE_OVERWRITE_VALUES, MERGE_PREPEND_LISTS, - MERGE_APPEND_LISTS) + try_get_dict_values, set_dict_value, + merge_dicts, visit_dict) from piecrust.sources.base import REALM_USER, REALM_THEME @@ -47,6 +45,7 @@ self._path = path self._theme_path = theme_path self._cache = cache or NullCache() + self._cache_hash_mod = '' self._custom_paths = [] self._post_fixups = [] self.theme_config = theme_config @@ -76,6 +75,7 @@ set_dict_value(config, path, value) self._post_fixups.append(_fixup) + self._cache_hash_mod += '&val[%s=%s]' % (path, repr(value)) def setAll(self, values, validate=False): # Override base class implementation @@ -104,6 +104,8 @@ APP_VERSION, CACHE_VERSION)).encode('utf8')) for p in paths: cache_key_hash.update(("&path=%s" % p).encode('utf8')) + if self._cache_hash_mod: + cache_key_hash.update(self._cache_hash_mod.encode('utf8')) cache_key = cache_key_hash.hexdigest() # Check the cache for a valid version. @@ -177,7 +179,6 @@ # [custom theme] + [default theme] + [default] if theme_values is not None: self._processThemeLayer(theme_values, values) - merge_dicts(values, theme_values) # Make all sources belong to the "theme" realm at this point. srcc = values['site'].get('sources') @@ -190,7 +191,6 @@ # [default] if site_values is not None: self._processSiteLayer(site_values, values) - merge_dicts(values, site_values) # Set the theme site flag. if self.theme_config: @@ -209,10 +209,14 @@ # Generate the default theme model. gen_default_theme_model = bool(try_get_dict_values( (theme_values, 'site/use_default_theme_content'), - (values, 'site/use_default_theme_content'), default=True)) if gen_default_theme_model: - self._generateDefaultThemeModel(theme_values, values) + logger.debug("Generating default theme content model...") + cc = copy.deepcopy(default_theme_content_model_base) + merge_dicts(values, cc) + + # Merge the theme config into the result config. + merge_dicts(values, theme_values) def _processSiteLayer(self, site_values, values): # Default site content. @@ -221,34 +225,29 @@ (values, 'site/use_default_content'), default=True)) if gen_default_site_model: - self._generateDefaultSiteModel(site_values, values) + logger.debug("Generating default content model...") + cc = copy.deepcopy(default_content_model_base) + merge_dicts(values, cc) - def _generateDefaultThemeModel(self, theme_values, values): - logger.debug("Generating default theme content model...") - cc = copy.deepcopy(default_theme_content_model_base) - merge_dicts(values, cc) - - def _generateDefaultSiteModel(self, site_values, values): - logger.debug("Generating default content model...") - cc = copy.deepcopy(default_content_model_base) - merge_dicts(values, cc) + dcm = get_default_content_model(site_values, values) + merge_dicts(values, dcm) - dcm = get_default_content_model(site_values, values) - merge_dicts(values, dcm) + blogsc = try_get_dict_values( + (site_values, 'site/blogs'), + (values, 'site/blogs')) + if blogsc is None: + blogsc = ['posts'] + set_dict_value(site_values, 'site/blogs', blogsc) - blogsc = try_get_dict_values( - (site_values, 'site/blogs'), - (values, 'site/blogs')) - if blogsc is None: - blogsc = ['posts'] - set_dict_value(site_values, 'site/blogs', blogsc) + is_only_blog = (len(blogsc) == 1) + for blog_name in reversed(blogsc): + blog_cfg = get_default_content_model_for_blog( + blog_name, is_only_blog, site_values, values, + theme_site=self.theme_config) + merge_dicts(values, blog_cfg) - is_only_blog = (len(blogsc) == 1) - for blog_name in reversed(blogsc): - blog_cfg = get_default_content_model_for_blog( - blog_name, is_only_blog, site_values, values, - theme_site=self.theme_config) - merge_dicts(values, blog_cfg) + # Merge the site config into the result config. + merge_dicts(values, site_values) def _validateAll(self, values): if values is None: @@ -304,9 +303,6 @@ taxonomies = v.get('taxonomies') if taxonomies is None: v['taxonomies'] = {} - generators = v.get('generators') - if generators is None: - v['generators'] = {} return v @@ -333,8 +329,8 @@ v.setdefault('html', values['site']['default_format']) auto_formats_re = r"\.(%s)$" % ( - '|'.join( - [re.escape(i) for i in list(v.keys())])) + '|'.join( + [re.escape(i) for i in list(v.keys())])) cache.write('auto_formats_re', auto_formats_re) return v @@ -343,7 +339,7 @@ def _validate_site_default_auto_format(v, values, cache): if v not in values['site']['auto_formats']: raise ConfigurationError( - "Default auto-format '%s' is not declared." % v) + "Default auto-format '%s' is not declared." % v) return v @@ -393,27 +389,21 @@ sc.setdefault('type', 'default') sc.setdefault('fs_endpoint', sn) sc.setdefault('ignore_missing_dir', False) - sc.setdefault('data_endpoint', sn) - sc.setdefault('data_type', 'iterator') + sc.setdefault('data_endpoint', None) + sc.setdefault('data_type', None) + sc.setdefault('default_layout', 'default') sc.setdefault('item_name', sn) sc.setdefault('items_per_page', 5) sc.setdefault('date_format', DEFAULT_DATE_FORMAT) sc.setdefault('realm', REALM_USER) + sc.setdefault('pipeline', None) # Validate endpoints. endpoint = sc['data_endpoint'] if endpoint in reserved_endpoints: raise ConfigurationError( - "Source '%s' is using a reserved endpoint name: %s" % - (sn, endpoint)) - - # Validate generators. - for gn, gc in sc.get('generators', {}).items(): - if not isinstance(gc, dict): - raise ConfigurationError( - "Generators for source '%s' should be defined in a " - "dictionary." % sn) - gc['source'] = sn + "Source '%s' is using a reserved endpoint name: %s" % + (sn, endpoint)) return v @@ -427,6 +417,8 @@ # Check routes are referencing correct sources, have default # values, etc. + used_sources = set() + existing_sources = set(values['site']['sources'].keys()) for rc in v: if not isinstance(rc, dict): raise ConfigurationError("All routes in 'site/routes' must be " @@ -439,20 +431,17 @@ raise ConfigurationError("Route URLs must start with '/'.") r_source = rc.get('source') - r_generator = rc.get('generator') - if r_source is None and r_generator is None: - raise ConfigurationError("Routes must specify a source or " - "generator.") - if (r_source and - r_source not in list(values['site']['sources'].keys())): + if r_source is None: + raise ConfigurationError("Routes must specify a source.") + if r_source not in existing_sources: raise ConfigurationError("Route is referencing unknown " "source: %s" % r_source) - if (r_generator and - r_generator not in list(values['site']['generators'].keys())): - raise ConfigurationError("Route is referencing unknown " - "generator: %s" % r_generator) + if r_source in used_sources: + raise ConfigurationError("Source '%s' already has a route." % + r_source) + used_sources.add(r_source) - rc.setdefault('generator', None) + rc.setdefault('pass', 1) rc.setdefault('page_suffix', '/%num%') return v @@ -461,7 +450,7 @@ def _validate_site_taxonomies(v, values, cache): if not isinstance(v, dict): raise ConfigurationError( - "The 'site/taxonomies' setting must be a mapping.") + "The 'site/taxonomies' setting must be a mapping.") for tn, tc in v.items(): tc.setdefault('multiple', False) tc.setdefault('term', tn) @@ -469,23 +458,12 @@ return v -def _validate_site_generators(v, values, cache): - if not isinstance(v, dict): - raise ConfigurationError( - "The 'site/generators' setting must be a mapping.") - for gn, gc in v.items(): - if 'type' not in gc: - raise ConfigurationError( - "Generator '%s' doesn't specify a type." % gn) - return v - - def _validate_site_plugins(v, values, cache): if isinstance(v, str): v = v.split(',') elif not isinstance(v, list): raise ConfigurationError( - "The 'site/plugins' setting must be an array, or a " - "comma-separated list.") + "The 'site/plugins' setting must be an array, or a " + "comma-separated list.") return v diff -r 7a1903ede496 -r 2e5c5d33d62c piecrust/appconfigdefaults.py --- a/piecrust/appconfigdefaults.py Tue Nov 21 11:00:06 2017 -0800 +++ b/piecrust/appconfigdefaults.py Tue Nov 21 22:07:12 2017 -0800 @@ -4,16 +4,16 @@ DEFAULT_DATE_FORMAT, DEFAULT_THEME_SOURCE) from piecrust.configuration import ( get_dict_values, try_get_dict_values) -from piecrust.sources.base import REALM_THEME +# Default configuration for all websites. +# default_configuration = collections.OrderedDict({ 'site': collections.OrderedDict({ 'title': "Untitled PieCrust website", 'root': '/', 'default_format': DEFAULT_FORMAT, 'default_template_engine': DEFAULT_TEMPLATE_ENGINE, - 'enable_gzip': True, 'pretty_urls': False, 'trailing_slash': False, 'date_format': DEFAULT_DATE_FORMAT, @@ -26,9 +26,6 @@ 'pagination_suffix': '/%num%', 'slugify_mode': 'encode', 'themes_sources': [DEFAULT_THEME_SOURCE], - 'cache_time': 28800, - 'enable_debug_info': True, - 'show_debug_info': False, 'use_default_content': True, 'use_default_theme_content': True, 'theme_site': False @@ -37,21 +34,32 @@ 'no_bake_setting': 'draft', 'workers': None, 'batch_size': None + }), + 'server': collections.OrderedDict({ + 'enable_gzip': True, + 'cache_time': 28800, + 'enable_debug_info': True, + 'show_debug_info': False }) }) +# Default content model for themes. +# default_theme_content_model_base = collections.OrderedDict({ 'site': collections.OrderedDict({ 'sources': collections.OrderedDict({ 'theme_pages': { - 'type': 'default', + 'fs_endpoint': 'pages', 'ignore_missing_dir': True, - 'fs_endpoint': 'pages', 'data_endpoint': 'site.pages', - 'default_layout': 'default', 'item_name': 'page', - 'realm': REALM_THEME + }, + 'theme_assets': { + 'fs_endpoint': 'assets', + 'ignore_missing_dir': True, + 'type': 'fs', + 'pipeline': 'asset' } }), 'routes': [ @@ -60,15 +68,47 @@ 'source': 'theme_pages', 'func': 'pcurl' } - ], - 'theme_tag_page': 'theme_pages:_tag.%ext%', - 'theme_category_page': 'theme_pages:_category.%ext%', - 'theme_month_page': 'theme_pages:_month.%ext%', - 'theme_year_page': 'theme_pages:_year.%ext%' + ] }) }) +# Additional theme configuration when previewing a theme by itself, +# so it can show some "sample/preview" content. +# +default_theme_preview_content_model = collections.OrderedDict({ + 'site': collections.OrderedDict({ + 'sources': collections.OrderedDict({ + 'theme_preview_pages': { + 'fs_endpoint': 'preview/pages', + 'ignore_missing_dir': True, + 'data_endpoint': 'site.pages', + 'item_name': 'page', + }, + 'theme_preview_posts': { + 'fs_endpoint': 'preview/posts', + 'ignore_missing_dir': True, + 'data_endpoint': 'blog.posts', + 'item_name': 'post' + } + }), + 'routes': [ + { + 'url': '/posts/%year%/%month%/%slug%', + 'source': 'theme_preview_posts' + }, + { + 'url': '/%slug%', + 'source': 'theme_preview_pages', + 'func': 'pcurl' + } + ] + }) +}) + + +# Default content model for websites. +# default_content_model_base = collections.OrderedDict({ 'site': collections.OrderedDict({ 'posts_fs': DEFAULT_POSTS_FS, @@ -77,8 +117,18 @@ 'post_url': '/%year%/%month%/%day%/%slug%', 'year_url': '/archives/%year%', 'tag_url': '/tag/%tag%', + 'tag_feed_url': '/tag/%tag%.xml', 'category_url': '/%category%', - 'posts_per_page': 5 + 'category_feed_url': '/%category%.xml', + 'posts_per_page': 5, + 'sources': { + 'assets': { + 'fs_endpoint': 'assets', + 'ignore_missing_dir': True, + 'type': 'fs', + 'pipeline': 'asset' + } + } }) }) @@ -112,7 +162,7 @@ }), ('categories', { 'term': 'category', - 'func_name': 'pccaturl' + 'func_term_name': 'cat' }) ]) }) @@ -139,6 +189,7 @@ data_endpoint = 'blog' item_name = 'post' tpl_func_prefix = 'pc' + year_archive_tpl = '_year.html' if theme_site: # If this is a theme site, show posts from a `sample` directory @@ -158,6 +209,7 @@ (site_values, '%s/func_prefix' % blog_name), (values, '%s/func_prefix' % blog_name), default=('pc%s' % blog_name)) + year_archive_tpl = '%s_year.html,_year.html' % page_prefix # Figure out the settings values for this blog, specifically. # The value could be set on the blog config itself, globally, or left at @@ -175,14 +227,6 @@ post_url = '/' + url_prefix + blog_values['post_url'].lstrip('/') year_url = '/' + url_prefix + blog_values['year_url'].lstrip('/') - year_archive = 'pages:%s_year.%%ext%%' % page_prefix - if not theme_site: - theme_year_page = try_get_dict_values( - (site_values, 'site/theme_year_page'), - (values, 'site/theme_year_page')) - if theme_year_page: - year_archive += ';' + theme_year_page - cfg = collections.OrderedDict({ 'site': collections.OrderedDict({ 'sources': collections.OrderedDict({ @@ -196,13 +240,11 @@ 'items_per_page': posts_per_page, 'date_format': date_format, 'default_layout': default_layout - }) - }), - 'generators': collections.OrderedDict({ - ('%s_archives' % blog_name): collections.OrderedDict({ + }), + '%s_archives' % blog_name: collections.OrderedDict({ 'type': 'blog_archives', 'source': blog_name, - 'page': year_archive + 'template': year_archive_tpl }) }), 'routes': [ @@ -213,14 +255,14 @@ }, { 'url': year_url, - 'generator': ('%s_archives' % blog_name), + 'source': ('%s_archives' % blog_name), 'func': ('%syearurl' % tpl_func_prefix) } ] }) }) - # Add a generator and a route for each taxonomy. + # Add a source and a route for each taxonomy. taxonomies_cfg = try_get_dict_values( (site_values, 'site/taxonomies'), (values, 'site/taxonomies'), @@ -228,22 +270,16 @@ for tax_name, tax_cfg in taxonomies_cfg.items(): term = tax_cfg.get('term', tax_name) - # Generator. - page_ref = 'pages:%s_%s.%%ext%%' % (page_prefix, term) - if not theme_site: - theme_page_ref = try_get_dict_values( - (site_values, 'site/theme_%s_page' % term), - (values, 'site/theme_%s_page' % term)) - if theme_page_ref: - page_ref += ';' + theme_page_ref - tax_gen_name = '%s_%s' % (blog_name, tax_name) - tax_gen = collections.OrderedDict({ + # Source. + term_tpl = '%s_%s.html' % (page_prefix, term) + tax_src_name = '%s_%s' % (blog_name, tax_name) + tax_src = collections.OrderedDict({ 'type': 'taxonomy', 'source': blog_name, 'taxonomy': tax_name, - 'page': page_ref + 'template': term_tpl }) - cfg['site']['generators'][tax_gen_name] = tax_gen + cfg['site']['sources'][tax_src_name] = tax_src # Route. tax_url_cfg_name = '%s_url' % term @@ -253,14 +289,15 @@ (values, 'site/%s' % tax_url_cfg_name), default=('%s/%%%s%%' % (term, term))) tax_url = '/' + url_prefix + tax_url.lstrip('/') - tax_func_name = try_get_dict_values( - (site_values, 'site/taxonomies/%s/func_name' % tax_name), - (values, 'site/taxonomies/%s/func_name' % tax_name), - default=('%s%surl' % (tpl_func_prefix, term))) + tax_func_term_name = try_get_dict_values( + (site_values, 'site/taxonomies/%s/func_term_name' % tax_name), + (values, 'site/taxonomies/%s/func_term_name' % tax_name), + default=term) + tax_func_name = '%s%surl' % (tpl_func_prefix, tax_func_term_name) tax_route = collections.OrderedDict({ 'url': tax_url, - 'generator': tax_gen_name, - 'taxonomy': tax_name, + 'pass': 2, + 'source': tax_src_name, 'func': tax_func_name }) cfg['site']['routes'].append(tax_route) diff -r 7a1903ede496 -r 2e5c5d33d62c piecrust/baking/baker.py --- a/piecrust/baking/baker.py Tue Nov 21 11:00:06 2017 -0800 +++ b/piecrust/baking/baker.py Tue Nov 21 22:07:12 2017 -0800 @@ -2,146 +2,133 @@ import os.path import hashlib import logging -from piecrust.baking.records import ( - BakeRecordEntry, TransitionalBakeRecord) -from piecrust.baking.worker import ( - save_factory, - JOB_LOAD, JOB_RENDER_FIRST, JOB_BAKE) from piecrust.chefutil import ( - format_timed_scope, format_timed) + format_timed_scope, format_timed) from piecrust.environment import ExecutionStats -from piecrust.generation.base import PageGeneratorBakeContext -from piecrust.routing import create_route_metadata -from piecrust.sources.base import ( - REALM_NAMES, REALM_USER, REALM_THEME) +from piecrust.pipelines.base import ( + PipelineJobCreateContext, PipelineJobResultHandleContext, + PipelineJobValidateContext, PipelineManager, + get_pipeline_name_for_source) +from piecrust.pipelines.records import ( + MultiRecordHistory, MultiRecord, RecordEntry, + load_records) +from piecrust.sources.base import REALM_USER, REALM_THEME, REALM_NAMES logger = logging.getLogger(__name__) +def get_bake_records_path(app, out_dir, *, suffix=''): + records_cache = app.cache.getCache('baker') + records_id = hashlib.md5(out_dir.encode('utf8')).hexdigest() + records_name = '%s%s.records' % (records_id, suffix) + return records_cache.getCachePath(records_name) + + class Baker(object): - def __init__(self, app, out_dir, force=False, - applied_config_variant=None, - applied_config_values=None): - assert app and out_dir + def __init__(self, appfactory, app, out_dir, *, + force=False, + allowed_pipelines=None, + forbidden_pipelines=None, + allowed_sources=None, + rotate_bake_records=True): + self.appfactory = appfactory self.app = app self.out_dir = out_dir self.force = force - self.applied_config_variant = applied_config_variant - self.applied_config_values = applied_config_values - - # Remember what generator pages we should skip. - self.generator_pages = [] - logger.debug("Gathering generator page paths:") - for gen in self.app.generators: - for path in gen.page_ref.possible_paths: - self.generator_pages.append(path) - logger.debug(" - %s" % path) - - # Register some timers. - self.app.env.registerTimer('LoadJob', raise_if_registered=False) - self.app.env.registerTimer('RenderFirstSubJob', - raise_if_registered=False) - self.app.env.registerTimer('BakeJob', raise_if_registered=False) + self.allowed_pipelines = allowed_pipelines + self.forbidden_pipelines = forbidden_pipelines + self.allowed_sources = allowed_sources + self.rotate_bake_records = rotate_bake_records def bake(self): + start_time = time.perf_counter() + + # Setup baker. logger.debug(" Bake Output: %s" % self.out_dir) logger.debug(" Root URL: %s" % self.app.config.get('site/root')) # Get into bake mode. - start_time = time.perf_counter() self.app.config.set('baker/is_baking', True) - self.app.env.base_asset_url_format = '%uri%' + self.app.config.set('site/asset_url_format', '%page_uri%/%filename%') + + stats = self.app.env.stats + stats.registerTimer('LoadSourceContents', raise_if_registered=False) + stats.registerTimer('MasterTaskPut_1', raise_if_registered=False) + stats.registerTimer('MasterTaskPut_2+', raise_if_registered=False) # Make sure the output directory exists. if not os.path.isdir(self.out_dir): os.makedirs(self.out_dir, 0o755) - # Load/create the bake record. - record = TransitionalBakeRecord() - record_cache = self.app.cache.getCache('baker') - record_id = hashlib.md5(self.out_dir.encode('utf8')).hexdigest() - record_name = record_id + '.record' - previous_record_path = None - if not self.force and record_cache.has(record_name): - with format_timed_scope(logger, "loaded previous bake record", + # Load/create the bake records. + records_path = get_bake_records_path( + self.app, self.out_dir) + if not self.force and os.path.isfile(records_path): + with format_timed_scope(logger, "loaded previous bake records", level=logging.DEBUG, colored=False): - previous_record_path = record_cache.getCachePath(record_name) - record.loadPrevious(previous_record_path) - record.current.success = True + previous_records = load_records(records_path) + else: + previous_records = MultiRecord() + current_records = MultiRecord() # Figure out if we need to clean the cache because important things # have changed. - is_cache_valid = self._handleCacheValidity(record) + is_cache_valid = self._handleCacheValidity(previous_records, + current_records) if not is_cache_valid: - previous_record_path = None + previous_records = MultiRecord() + + # Create the bake records history which tracks what's up-to-date + # or not since last time we baked to the given output folder. + record_histories = MultiRecordHistory( + previous_records, current_records) # Pre-create all caches. for cache_name in ['app', 'baker', 'pages', 'renders']: self.app.cache.getCache(cache_name) - # Gather all sources by realm -- we're going to bake each realm - # separately so we can handle "overriding" (i.e. one realm overrides - # another realm's pages, like the user realm overriding the theme - # realm). - sources_by_realm = {} - for source in self.app.sources: - srclist = sources_by_realm.setdefault(source.realm, []) - srclist.append(source) + # Create the pipelines. + ppmngr = self._createPipelineManager(record_histories) # Create the worker processes. - pool = self._createWorkerPool(previous_record_path) + pool_userdata = _PoolUserData(self, ppmngr) + pool = self._createWorkerPool(records_path, pool_userdata) + + # Done with all the setup, let's start the actual work. + logger.info(format_timed(start_time, "setup baker")) + + # Load all sources. + self._loadSources(ppmngr) # Bake the realms. - realm_list = [REALM_USER, REALM_THEME] - for realm in realm_list: - srclist = sources_by_realm.get(realm) - if srclist is not None: - self._bakeRealm(record, pool, realm, srclist) + self._bakeRealms(pool, ppmngr, record_histories) - # Call all the page generators. - self._bakePageGenerators(record, pool) + # Handle deletions, collapse records, etc. + ppmngr.postJobRun() + ppmngr.deleteStaleOutputs() + ppmngr.collapseRecords() # All done with the workers. Close the pool and get reports. - reports = pool.close() - total_stats = ExecutionStats() - record.current.stats['_Total'] = total_stats - for i in range(len(reports)): - worker_stats = reports[i]['data'] - if worker_stats is not None: - worker_name = 'BakeWorker_%d' % i - record.current.stats[worker_name] = worker_stats - total_stats.mergeStats(worker_stats) + pool_stats = pool.close() + current_records.stats = _merge_execution_stats(stats, *pool_stats) - # Delete files from the output. - self._handleDeletetions(record) + # Shutdown the pipelines. + ppmngr.shutdownPipelines() - # Backup previous records. - for i in range(8, -1, -1): - suffix = '' if i == 0 else '.%d' % i - record_path = record_cache.getCachePath( - '%s%s.record' % (record_id, suffix)) - if os.path.exists(record_path): - record_path_next = record_cache.getCachePath( - '%s.%s.record' % (record_id, i + 1)) - if os.path.exists(record_path_next): - os.remove(record_path_next) - os.rename(record_path, record_path_next) - - # Save the bake record. - with format_timed_scope(logger, "saved bake record.", - level=logging.DEBUG, colored=False): - record.current.bake_time = time.time() - record.current.out_dir = self.out_dir - record.saveCurrent(record_cache.getCachePath(record_name)) + # Backup previous records, save the current ones. + current_records.bake_time = time.time() + current_records.out_dir = self.out_dir + _save_bake_records(current_records, records_path, + rotate_previous=self.rotate_bake_records) # All done. self.app.config.set('baker/is_baking', False) logger.debug(format_timed(start_time, 'done baking')) - return record.detach() + return current_records - def _handleCacheValidity(self, record): + def _handleCacheValidity(self, previous_records, current_records): start_time = time.perf_counter() reason = None @@ -151,10 +138,9 @@ # The configuration file was changed, or we're running a new # version of the app. reason = "not valid anymore" - elif (not record.previous.bake_time or - not record.previous.hasLatestVersion()): - # We have no valid previous bake record. - reason = "need bake record regeneration" + elif previous_records.invalidated: + # We have no valid previous bake records. + reason = "need bake records regeneration" else: # Check if any template has changed since the last bake. Since # there could be some advanced conditional logic going on, we'd @@ -165,261 +151,317 @@ for fn in filenames: full_fn = os.path.join(dpath, fn) max_time = max(max_time, os.path.getmtime(full_fn)) - if max_time >= record.previous.bake_time: + if max_time >= previous_records.bake_time: reason = "templates modified" if reason is not None: # We have to bake everything from scratch. self.app.cache.clearCaches(except_names=['app', 'baker']) self.force = True - record.incremental_count = 0 - record.clearPrevious() - logger.info(format_timed( - start_time, - "cleaned cache (reason: %s)" % reason)) + current_records.incremental_count = 0 + previous_records = MultiRecord() + logger.debug(format_timed( + start_time, "cleaned cache (reason: %s)" % reason, + colored=False)) return False else: - record.incremental_count += 1 + current_records.incremental_count += 1 logger.debug(format_timed( - start_time, "cache is assumed valid", - colored=False)) + start_time, "cache is assumed valid", colored=False)) return True - def _bakeRealm(self, record, pool, realm, srclist): - start_time = time.perf_counter() - try: - record.current.baked_count[realm] = 0 - record.current.total_baked_count[realm] = 0 - - all_factories = [] - for source in srclist: - factories = source.getPageFactories() - all_factories += [f for f in factories - if f.path not in self.generator_pages] + def _createPipelineManager(self, record_histories): + # Gather all sources by realm -- we're going to bake each realm + # separately so we can handle "overriding" (i.e. one realm overrides + # another realm's pages, like the user realm overriding the theme + # realm). + # + # Also, create and initialize each pipeline for each source. + has_any_pp = False + ppmngr = PipelineManager( + self.app, self.out_dir, + record_histories=record_histories) + ok_pp = self.allowed_pipelines + nok_pp = self.forbidden_pipelines + ok_src = self.allowed_sources + for source in self.app.sources: + if ok_src is not None and source.name not in ok_src: + continue - self._loadRealmPages(record, pool, all_factories) - self._renderRealmPages(record, pool, all_factories) - self._bakeRealmPages(record, pool, realm, all_factories) - finally: - page_count = record.current.baked_count[realm] - total_page_count = record.current.total_baked_count[realm] - logger.info(format_timed( - start_time, - "baked %d %s pages (%d total)." % - (page_count, REALM_NAMES[realm].lower(), - total_page_count))) + pname = get_pipeline_name_for_source(source) + if ok_pp is not None and pname not in ok_pp: + continue + if nok_pp is not None and pname in nok_pp: + continue - def _loadRealmPages(self, record, pool, factories): - def _handler(res): - # Create the record entry for this page. - # This will also update the `dirty_source_names` for the record - # as we add page files whose last modification times are later - # than the last bake. - record_entry = BakeRecordEntry(res['source_name'], res['path']) - record_entry.config = res['config'] - record_entry.timestamp = res['timestamp'] - if res['errors']: - record_entry.errors += res['errors'] - record.current.success = False - self._logErrors(res['path'], res['errors']) - record.addEntry(record_entry) + ppinfo = ppmngr.createPipeline(source) + logger.debug( + "Created pipeline '%s' for source: %s" % + (ppinfo.pipeline.PIPELINE_NAME, source.name)) + has_any_pp = True + if not has_any_pp: + raise Exception("The website has no content sources, or the bake " + "command was invoked with all pipelines filtered " + "out. There's nothing to do.") + return ppmngr - logger.debug("Loading %d realm pages..." % len(factories)) - with format_timed_scope(logger, - "loaded %d pages" % len(factories), - level=logging.DEBUG, colored=False, - timer_env=self.app.env, - timer_category='LoadJob'): - jobs = [] - for fac in factories: - job = { - 'type': JOB_LOAD, - 'job': save_factory(fac)} - jobs.append(job) - ar = pool.queueJobs(jobs, handler=_handler) - ar.wait() + def _loadSources(self, ppmngr): + start_time = time.perf_counter() - def _renderRealmPages(self, record, pool, factories): - def _handler(res): - entry = record.getCurrentEntry(res['path']) - if res['errors']: - entry.errors += res['errors'] - record.current.success = False - self._logErrors(res['path'], res['errors']) + for ppinfo in ppmngr.getPipelineInfos(): + rec = ppinfo.record_history.current + rec_entries = ppinfo.pipeline.loadAllContents() + if rec_entries is not None: + for e in rec_entries: + rec.addEntry(e) + + stats = self.app.env.stats + stats.stepTimer('LoadSourceContents', + time.perf_counter() - start_time) + logger.info(format_timed(start_time, "loaded site content")) - logger.debug("Rendering %d realm pages..." % len(factories)) - with format_timed_scope(logger, - "prepared %d pages" % len(factories), - level=logging.DEBUG, colored=False, - timer_env=self.app.env, - timer_category='RenderFirstSubJob'): - jobs = [] - for fac in factories: - record_entry = record.getCurrentEntry(fac.path) - if record_entry.errors: - logger.debug("Ignoring %s because it had previous " - "errors." % fac.ref_spec) - continue + def _bakeRealms(self, pool, ppmngr, record_histories): + # Bake the realms -- user first, theme second, so that a user item + # can override a theme item. + # Do this for as many times as we have pipeline passes left to do. + realm_list = [REALM_USER, REALM_THEME] + pp_by_pass_and_realm = _get_pipeline_infos_by_pass_and_realm( + ppmngr.getPipelineInfos()) - # Make sure the source and the route exist for this page, - # otherwise we add errors to the record entry and we'll skip - # this page for the rest of the bake. - source = self.app.getSource(fac.source.name) - if source is None: - record_entry.errors.append( - "Can't get source for page: %s" % fac.ref_spec) - logger.error(record_entry.errors[-1]) - continue + for pp_pass_num in sorted(pp_by_pass_and_realm.keys()): + logger.debug("Pipelines pass %d" % pp_pass_num) + pp_by_realm = pp_by_pass_and_realm[pp_pass_num] + for realm in realm_list: + pplist = pp_by_realm.get(realm) + if pplist is not None: + self._bakeRealm(pool, ppmngr, record_histories, + pp_pass_num, realm, pplist) - route = self.app.getSourceRoute(fac.source.name, fac.metadata) - if route is None: - record_entry.errors.append( - "Can't get route for page: %s" % fac.ref_spec) - logger.error(record_entry.errors[-1]) - continue + def _bakeRealm(self, pool, ppmngr, record_histories, + pp_pass_num, realm, pplist): + # Start with the first step, where we iterate on the content sources' + # items and run jobs on those. + pool.userdata.cur_step = 0 + next_step_jobs = {} + pool.userdata.next_step_jobs = next_step_jobs - # All good, queue the job. - route_index = self.app.routes.index(route) - job = { - 'type': JOB_RENDER_FIRST, - 'job': { - 'factory_info': save_factory(fac), - 'route_index': route_index - } - } - jobs.append(job) + start_time = time.perf_counter() + job_count = 0 + stats = self.app.env.stats + realm_name = REALM_NAMES[realm].lower() - ar = pool.queueJobs(jobs, handler=_handler) - ar.wait() - - def _bakeRealmPages(self, record, pool, realm, factories): - def _handler(res): - entry = record.getCurrentEntry(res['path']) - entry.subs = res['sub_entries'] - if res['errors']: - entry.errors += res['errors'] - self._logErrors(res['path'], res['errors']) - if entry.has_any_error: - record.current.success = False - if entry.subs and entry.was_any_sub_baked: - record.current.baked_count[realm] += 1 - record.current.total_baked_count[realm] += len(entry.subs) + for ppinfo in pplist: + src = ppinfo.source + pp = ppinfo.pipeline + jcctx = PipelineJobCreateContext(pp_pass_num, pp.record_name, + record_histories) - logger.debug("Baking %d realm pages..." % len(factories)) - with format_timed_scope(logger, - "baked %d pages" % len(factories), - level=logging.DEBUG, colored=False, - timer_env=self.app.env, - timer_category='BakeJob'): - jobs = [] - for fac in factories: - job = self._makeBakeJob(record, fac) - if job is not None: - jobs.append(job) + next_step_jobs[src.name] = [] + jobs = pp.createJobs(jcctx) + if jobs is not None: + new_job_count = len(jobs) + job_count += new_job_count + pool.queueJobs(jobs) + else: + new_job_count = 0 - ar = pool.queueJobs(jobs, handler=_handler) - ar.wait() + logger.debug( + "Queued %d jobs for source '%s' using pipeline '%s' " + "(%s, step 0)." % + (new_job_count, src.name, pp.PIPELINE_NAME, realm_name)) + + stats.stepTimer('MasterTaskPut_1', time.perf_counter() - start_time) - def _bakePageGenerators(self, record, pool): - for gen in self.app.generators: - ctx = PageGeneratorBakeContext(self.app, record, pool, gen) - gen.bake(ctx) + if job_count == 0: + logger.debug("No jobs queued! Bailing out of this bake pass.") + return - def _makeBakeJob(self, record, fac): - # Get the previous (if any) and current entry for this page. - pair = record.getPreviousAndCurrentEntries(fac.path) - assert pair is not None - prev_entry, cur_entry = pair - assert cur_entry is not None + pool.wait() + + logger.info(format_timed( + start_time, "%d pipeline jobs completed (%s, step 0)." % + (job_count, realm_name))) - # Ignore if there were errors in the previous passes. - if cur_entry.errors: - logger.debug("Ignoring %s because it had previous " - "errors." % fac.ref_spec) - return None - - # Build the route metadata and find the appropriate route. - page = fac.buildPage() - route_metadata = create_route_metadata(page) - route = self.app.getSourceRoute(fac.source.name, route_metadata) - assert route is not None + # Now let's see if any job created a follow-up job. Let's keep + # processing those jobs as long as they create new ones. + pool.userdata.cur_step = 1 + while True: + # Make a copy of out next step jobs and reset the list, so + # the first jobs to be processed don't mess it up as we're + # still iterating on it. + next_step_jobs = pool.userdata.next_step_jobs + pool.userdata.next_step_jobs = {} - # Figure out if this page is overriden by another previously - # baked page. This happens for example when the user has - # made a page that has the same page/URL as a theme page. - uri = route.getUri(route_metadata) - override_entry = record.getOverrideEntry(page.path, uri) - if override_entry is not None: - override_source = self.app.getSource( - override_entry.source_name) - if override_source.realm == fac.source.realm: - cur_entry.errors.append( - "Page '%s' maps to URL '%s' but is overriden " - "by page '%s'." % - (fac.ref_spec, uri, override_entry.path)) - logger.error(cur_entry.errors[-1]) - cur_entry.flags |= BakeRecordEntry.FLAG_OVERRIDEN - return None + start_time = time.perf_counter() + job_count = 0 + + for sn, jobs in next_step_jobs.items(): + if jobs: + logger.debug( + "Queuing jobs for source '%s' (%s, step %d)." % + (sn, realm_name, pool.userdata.cur_step)) - route_index = self.app.routes.index(route) - job = { - 'type': JOB_BAKE, - 'job': { - 'factory_info': save_factory(fac), - 'generator_name': None, - 'generator_record_key': None, - 'route_index': route_index, - 'route_metadata': route_metadata, - 'dirty_source_names': record.dirty_source_names - } - } - return job + pp = ppmngr.getPipeline(sn) + valctx = PipelineJobValidateContext( + pp_pass_num, pool.userdata.cur_step, + pp.record_name, record_histories) + pp.validateNextStepJobs(jobs, valctx) + + job_count += len(jobs) + pool.userdata.next_step_jobs[sn] = [] + pool.queueJobs(jobs) + + stats.stepTimer('MasterTaskPut_2+', time.perf_counter() - start_time) - def _handleDeletetions(self, record): - logger.debug("Handling deletions...") - for path, reason in record.getDeletions(): - logger.debug("Removing '%s': %s" % (path, reason)) - record.current.deleted.append(path) - try: - os.remove(path) - logger.info('[delete] %s' % path) - except OSError: - # Not a big deal if that file had already been removed - # by the user. - pass + if job_count == 0: + break + + pool.wait() - def _logErrors(self, path, errors): - rel_path = os.path.relpath(path, self.app.root_dir) - logger.error("Errors found in %s:" % rel_path) + logger.info(format_timed( + start_time, + "%d pipeline jobs completed (%s, step %d)." % + (job_count, realm_name, pool.userdata.cur_step))) + + pool.userdata.cur_step += 1 + + def _logErrors(self, item_spec, errors): + logger.error("Errors found in %s:" % item_spec) for e in errors: logger.error(" " + e) - def _createWorkerPool(self, previous_record_path): - from piecrust.app import PieCrustFactory + def _logWorkerException(self, item_spec, exc_data): + logger.error("Errors found in %s:" % item_spec) + logger.error(exc_data['value']) + if self.app.debug: + logger.error(exc_data['traceback']) + + def _createWorkerPool(self, previous_records_path, pool_userdata): from piecrust.workerpool import WorkerPool from piecrust.baking.worker import BakeWorkerContext, BakeWorker - appfactory = PieCrustFactory( - self.app.root_dir, - cache=self.app.cache.enabled, - cache_key=self.app.cache_key, - config_variant=self.applied_config_variant, - config_values=self.applied_config_values, - debug=self.app.debug, - theme_site=self.app.theme_site) - worker_count = self.app.config.get('baker/workers') batch_size = self.app.config.get('baker/batch_size') ctx = BakeWorkerContext( - appfactory, - self.out_dir, - force=self.force, - previous_record_path=previous_record_path) + self.appfactory, + self.out_dir, + force=self.force, + previous_records_path=previous_records_path, + allowed_pipelines=self.allowed_pipelines, + forbidden_pipelines=self.forbidden_pipelines) pool = WorkerPool( - worker_count=worker_count, - batch_size=batch_size, - worker_class=BakeWorker, - initargs=(ctx,)) + worker_count=worker_count, + batch_size=batch_size, + worker_class=BakeWorker, + initargs=(ctx,), + callback=self._handleWorkerResult, + error_callback=self._handleWorkerError, + userdata=pool_userdata) return pool + def _handleWorkerResult(self, job, res, userdata): + cur_step = userdata.cur_step + source_name, item_spec = job['job_spec'] + + # See if there's a next step to take. + npj = res.get('next_step_job') + if npj is not None: + npj['step_num'] = cur_step + 1 + userdata.next_step_jobs[source_name].append(npj) + + # Make the pipeline do custom handling to update the record entry. + ppinfo = userdata.ppmngr.getPipelineInfo(source_name) + pipeline = ppinfo.pipeline + record = ppinfo.current_record + ppmrctx = PipelineJobResultHandleContext(record, job, cur_step) + pipeline.handleJobResult(res, ppmrctx) + + # Set the overall success flags if there was an error. + record_entry = ppmrctx.record_entry + if not record_entry.success: + record.success = False + userdata.records.success = False + self._logErrors(job['item_spec'], record_entry.errors) + + def _handleWorkerError(self, job, exc_data, userdata): + # Set the overall success flag. + source_name, item_spec = job['job_spec'] + ppinfo = userdata.ppmngr.getPipelineInfo(source_name) + pipeline = ppinfo.pipeline + record = ppinfo.current_record + record.success = False + userdata.records.success = False + + # Add those errors to the record, if possible. + record_entry_spec = job.get('record_entry_spec', item_spec) + e = record.getEntry(record_entry_spec) + if e: + e.errors.append(exc_data['value']) + self._logWorkerException(item_spec, exc_data) + + # Log debug stuff. + if self.app.debug: + logger.error(exc_data.traceback) + + +class _PoolUserData: + def __init__(self, baker, ppmngr): + self.baker = baker + self.ppmngr = ppmngr + self.records = ppmngr.record_histories.current + self.cur_step = 0 + self.next_step_jobs = {} + + +def _get_pipeline_infos_by_pass_and_realm(pp_infos): + pp_by_pass_and_realm = {} + for pp_info in pp_infos: + pp_pass_num = pp_info.pipeline.PASS_NUM + if isinstance(pp_pass_num, list): + for ppn in pp_pass_num: + _add_pipeline_info_to_pass_and_realm_dict( + ppn, pp_info, pp_by_pass_and_realm) + else: + _add_pipeline_info_to_pass_and_realm_dict( + pp_pass_num, pp_info, pp_by_pass_and_realm) + return pp_by_pass_and_realm + + +def _add_pipeline_info_to_pass_and_realm_dict(pp_pass_num, pp_info, + pp_by_pass_and_realm): + pp_by_realm = pp_by_pass_and_realm.setdefault(pp_pass_num, {}) + pplist = pp_by_realm.setdefault( + pp_info.pipeline.source.config['realm'], []) + pplist.append(pp_info) + + +def _merge_execution_stats(base_stats, *other_stats): + total_stats = ExecutionStats() + total_stats.mergeStats(base_stats) + for ps in other_stats: + if ps is not None: + total_stats.mergeStats(ps) + return total_stats + + +def _save_bake_records(records, records_path, *, rotate_previous): + if rotate_previous: + records_dir, records_fn = os.path.split(records_path) + records_id, _ = os.path.splitext(records_fn) + for i in range(8, -1, -1): + suffix = '' if i == 0 else '.%d' % i + records_path_i = os.path.join( + records_dir, + '%s%s.records' % (records_id, suffix)) + if os.path.exists(records_path_i): + records_path_next = os.path.join( + records_dir, + '%s.%s.records' % (records_id, i + 1)) + if os.path.exists(records_path_next): + os.remove(records_path_next) + os.rename(records_path_i, records_path_next) + + with format_timed_scope(logger, "saved bake records.", + level=logging.DEBUG, colored=False): + records.save(records_path) diff -r 7a1903ede496 -r 2e5c5d33d62c piecrust/baking/records.py --- a/piecrust/baking/records.py Tue Nov 21 11:00:06 2017 -0800 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,204 +0,0 @@ -import copy -import os.path -import hashlib -import logging -from piecrust.records import Record, TransitionalRecord - - -logger = logging.getLogger(__name__) - - -def _get_transition_key(path, extra_key=None): - key = path - if extra_key: - key += '+%s' % extra_key - return hashlib.md5(key.encode('utf8')).hexdigest() - - -class BakeRecord(Record): - RECORD_VERSION = 20 - - def __init__(self): - super(BakeRecord, self).__init__() - self.out_dir = None - self.bake_time = None - self.baked_count = {} - self.total_baked_count = {} - self.deleted = [] - self.success = True - - -class SubPageBakeInfo(object): - FLAG_NONE = 0 - FLAG_BAKED = 2**0 - FLAG_FORCED_BY_SOURCE = 2**1 - FLAG_FORCED_BY_NO_PREVIOUS = 2**2 - FLAG_FORCED_BY_PREVIOUS_ERRORS = 2**3 - FLAG_FORMATTING_INVALIDATED = 2**4 - - def __init__(self, out_uri, out_path): - self.out_uri = out_uri - self.out_path = out_path - self.flags = self.FLAG_NONE - self.errors = [] - self.render_info = [None, None] # Same length as RENDER_PASSES - - @property - def was_clean(self): - return (self.flags & self.FLAG_BAKED) == 0 and len(self.errors) == 0 - - @property - def was_baked(self): - return (self.flags & self.FLAG_BAKED) != 0 - - @property - def was_baked_successfully(self): - return self.was_baked and len(self.errors) == 0 - - def anyPass(self, func): - for pinfo in self.render_info: - if pinfo and func(pinfo): - return True - return False - - def copyRenderInfo(self): - return copy.deepcopy(self.render_info) - - -class BakeRecordEntry(object): - """ An entry in the bake record. - """ - FLAG_NONE = 0 - FLAG_NEW = 2**0 - FLAG_SOURCE_MODIFIED = 2**1 - FLAG_OVERRIDEN = 2**2 - - def __init__(self, source_name, path, extra_key=None): - self.source_name = source_name - self.path = path - self.extra_key = extra_key - self.flags = self.FLAG_NONE - self.config = None - self.timestamp = None - self.errors = [] - self.subs = [] - - @property - def path_mtime(self): - return os.path.getmtime(self.path) - - @property - def was_overriden(self): - return (self.flags & self.FLAG_OVERRIDEN) != 0 - - @property - def num_subs(self): - return len(self.subs) - - @property - def was_any_sub_baked(self): - for o in self.subs: - if o.was_baked: - return True - return False - - @property - def all_assets(self): - for sub in self.subs: - yield from sub.assets - - @property - def all_out_paths(self): - for sub in self.subs: - yield sub.out_path - - @property - def has_any_error(self): - if len(self.errors) > 0: - return True - for o in self.subs: - if len(o.errors) > 0: - return True - return False - - def getSub(self, sub_index): - return self.subs[sub_index - 1] - - def getAllErrors(self): - yield from self.errors - for o in self.subs: - yield from o.errors - - def getAllUsedSourceNames(self): - res = set() - for o in self.subs: - for pinfo in o.render_info: - if pinfo: - res |= pinfo.used_source_names - return res - - -class TransitionalBakeRecord(TransitionalRecord): - def __init__(self, previous_path=None): - super(TransitionalBakeRecord, self).__init__(BakeRecord, - previous_path) - self.dirty_source_names = set() - - def addEntry(self, entry): - if (self.previous.bake_time and - entry.path_mtime >= self.previous.bake_time): - entry.flags |= BakeRecordEntry.FLAG_SOURCE_MODIFIED - self.dirty_source_names.add(entry.source_name) - super(TransitionalBakeRecord, self).addEntry(entry) - - def getTransitionKey(self, entry): - return _get_transition_key(entry.path, entry.extra_key) - - def getPreviousAndCurrentEntries(self, path, extra_key=None): - key = _get_transition_key(path, extra_key) - pair = self.transitions.get(key) - return pair - - def getOverrideEntry(self, path, uri): - for pair in self.transitions.values(): - cur = pair[1] - if cur and cur.path != path: - for o in cur.subs: - if o.out_uri == uri: - return cur - return None - - def getPreviousEntry(self, path, extra_key=None): - pair = self.getPreviousAndCurrentEntries(path, extra_key) - if pair is not None: - return pair[0] - return None - - def getCurrentEntry(self, path, extra_key=None): - pair = self.getPreviousAndCurrentEntries(path, extra_key) - if pair is not None: - return pair[1] - return None - - def collapseEntry(self, prev_entry): - cur_entry = copy.deepcopy(prev_entry) - cur_entry.flags = BakeRecordEntry.FLAG_NONE - for o in cur_entry.subs: - o.flags = SubPageBakeInfo.FLAG_NONE - self.addEntry(cur_entry) - - def getDeletions(self): - for prev, cur in self.transitions.values(): - if prev and not cur: - for sub in prev.subs: - yield (sub.out_path, 'previous source file was removed') - elif prev and cur: - prev_out_paths = [o.out_path for o in prev.subs] - cur_out_paths = [o.out_path for o in cur.subs] - diff = set(prev_out_paths) - set(cur_out_paths) - for p in diff: - yield (p, 'source file changed outputs') - - def _onNewEntryAdded(self, entry): - entry.flags |= BakeRecordEntry.FLAG_NEW - diff -r 7a1903ede496 -r 2e5c5d33d62c piecrust/baking/single.py --- a/piecrust/baking/single.py Tue Nov 21 11:00:06 2017 -0800 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,275 +0,0 @@ -import os.path -import queue -import logging -import threading -import urllib.parse -from piecrust import ASSET_DIR_SUFFIX -from piecrust.baking.records import SubPageBakeInfo -from piecrust.rendering import ( - QualifiedPage, PageRenderingContext, render_page, - PASS_FORMATTING) -from piecrust.uriutil import split_uri - - -logger = logging.getLogger(__name__) - - -class BakingError(Exception): - pass - - -def _text_writer(q): - while True: - item = q.get() - if item is not None: - out_path, txt = item - out_dir = os.path.dirname(out_path) - _ensure_dir_exists(out_dir) - - with open(out_path, 'w', encoding='utf8') as fp: - fp.write(txt) - - q.task_done() - else: - # Sentinel object, terminate the thread. - q.task_done() - break - - -class PageBaker(object): - def __init__(self, app, out_dir, force=False, copy_assets=True): - self.app = app - self.out_dir = out_dir - self.force = force - self.copy_assets = copy_assets - self.site_root = app.config.get('site/root') - self.pretty_urls = app.config.get('site/pretty_urls') - self._writer_queue = queue.Queue() - self._writer = threading.Thread( - name='PageSerializer', - target=_text_writer, - args=(self._writer_queue,)) - self._writer.start() - - def shutdown(self): - self._writer_queue.put_nowait(None) - self._writer.join() - - def getOutputPath(self, uri, pretty_urls): - uri_root, uri_path = split_uri(self.app, uri) - - bake_path = [self.out_dir] - decoded_uri = urllib.parse.unquote(uri_path) - if pretty_urls: - bake_path.append(decoded_uri) - bake_path.append('index.html') - elif decoded_uri == '': - bake_path.append('index.html') - else: - bake_path.append(decoded_uri) - - return os.path.normpath(os.path.join(*bake_path)) - - def bake(self, qualified_page, prev_entry, dirty_source_names, - generator_name=None): - # Start baking the sub-pages. - cur_sub = 1 - has_more_subs = True - sub_entries = [] - - while has_more_subs: - # Get the URL and path for this sub-page. - sub_uri = qualified_page.getUri(cur_sub) - logger.debug("Baking '%s' [%d]..." % (sub_uri, cur_sub)) - pretty_urls = qualified_page.config.get('pretty_urls', - self.pretty_urls) - out_path = self.getOutputPath(sub_uri, pretty_urls) - - # Create the sub-entry for the bake record. - sub_entry = SubPageBakeInfo(sub_uri, out_path) - sub_entries.append(sub_entry) - - # Find a corresponding sub-entry in the previous bake record. - prev_sub_entry = None - if prev_entry: - try: - prev_sub_entry = prev_entry.getSub(cur_sub) - except IndexError: - pass - - # Figure out if we need to invalidate or force anything. - force_this_sub, invalidate_formatting = _compute_force_flags( - prev_sub_entry, sub_entry, dirty_source_names) - force_this_sub = force_this_sub or self.force - - # Check for up-to-date outputs. - do_bake = True - if not force_this_sub: - try: - in_path_time = qualified_page.path_mtime - out_path_time = os.path.getmtime(out_path) - if out_path_time >= in_path_time: - do_bake = False - except OSError: - # File doesn't exist, we'll need to bake. - pass - - # If this page didn't bake because it's already up-to-date. - # Keep trying for as many subs as we know this page has. - if not do_bake: - sub_entry.render_info = prev_sub_entry.copyRenderInfo() - sub_entry.flags = SubPageBakeInfo.FLAG_NONE - - if prev_entry.num_subs >= cur_sub + 1: - cur_sub += 1 - has_more_subs = True - logger.debug(" %s is up to date, skipping to next " - "sub-page." % out_path) - continue - - logger.debug(" %s is up to date, skipping bake." % out_path) - break - - # All good, proceed. - try: - if invalidate_formatting: - cache_key = sub_uri - self.app.env.rendered_segments_repository.invalidate( - cache_key) - sub_entry.flags |= \ - SubPageBakeInfo.FLAG_FORMATTING_INVALIDATED - - logger.debug(" p%d -> %s" % (cur_sub, out_path)) - rp = self._bakeSingle(qualified_page, cur_sub, out_path, - generator_name) - except Exception as ex: - logger.exception(ex) - page_rel_path = os.path.relpath(qualified_page.path, - self.app.root_dir) - raise BakingError("%s: error baking '%s'." % - (page_rel_path, sub_uri)) from ex - - # Record what we did. - sub_entry.flags |= SubPageBakeInfo.FLAG_BAKED - sub_entry.render_info = rp.copyRenderInfo() - - # Copy page assets. - if (cur_sub == 1 and self.copy_assets and - sub_entry.anyPass(lambda p: p.used_assets)): - if pretty_urls: - out_assets_dir = os.path.dirname(out_path) - else: - out_assets_dir, out_name = os.path.split(out_path) - if sub_uri != self.site_root: - out_name_noext, _ = os.path.splitext(out_name) - out_assets_dir = os.path.join(out_assets_dir, - out_name_noext) - - logger.debug("Copying page assets to: %s" % out_assets_dir) - _ensure_dir_exists(out_assets_dir) - try: - qualified_page.source.buildAssetor(qualified_page, sub_uri).copyAssets(out_assets_dir) - except Exception as ex: - page_rel_path = os.path.relpath(qualified_page.path, - self.app.root_dir) - raise BakingError("%s: error deploying page assets." % - page_rel_path) from ex - - # Figure out if we have more work. - has_more_subs = False - if sub_entry.anyPass(lambda p: p.pagination_has_more): - cur_sub += 1 - has_more_subs = True - - return sub_entries - - def _bakeSingle(self, qp, num, out_path, - generator_name=None): - ctx = PageRenderingContext(qp, page_num=num) - if qp.route.is_generator_route: - qp.route.generator.prepareRenderContext(ctx) - - with self.app.env.timerScope("PageRender"): - rp = render_page(ctx) - - with self.app.env.timerScope("PageSerialize"): - self._writer_queue.put_nowait((out_path, rp.content)) - - return rp - - -def _compute_force_flags(prev_sub_entry, sub_entry, dirty_source_names): - # Figure out what to do with this page. - force_this_sub = False - invalidate_formatting = False - sub_uri = sub_entry.out_uri - if (prev_sub_entry and - (prev_sub_entry.was_baked_successfully or - prev_sub_entry.was_clean)): - # If the current page is known to use pages from other sources, - # see if any of those got baked, or are going to be baked for - # some reason. If so, we need to bake this one too. - # (this happens for instance with the main page of a blog). - dirty_for_this, invalidated_render_passes = ( - _get_dirty_source_names_and_render_passes( - prev_sub_entry, dirty_source_names)) - if len(invalidated_render_passes) > 0: - logger.debug( - "'%s' is known to use sources %s, which have " - "items that got (re)baked. Will force bake this " - "page. " % (sub_uri, dirty_for_this)) - sub_entry.flags |= \ - SubPageBakeInfo.FLAG_FORCED_BY_SOURCE - force_this_sub = True - - if PASS_FORMATTING in invalidated_render_passes: - logger.debug( - "Will invalidate cached formatting for '%s' " - "since sources were using during that pass." - % sub_uri) - invalidate_formatting = True - elif (prev_sub_entry and - prev_sub_entry.errors): - # Previous bake failed. We'll have to bake it again. - logger.debug( - "Previous record entry indicates baking failed for " - "'%s'. Will bake it again." % sub_uri) - sub_entry.flags |= \ - SubPageBakeInfo.FLAG_FORCED_BY_PREVIOUS_ERRORS - force_this_sub = True - elif not prev_sub_entry: - # No previous record. We'll have to bake it. - logger.debug("No previous record entry found for '%s'. Will " - "force bake it." % sub_uri) - sub_entry.flags |= \ - SubPageBakeInfo.FLAG_FORCED_BY_NO_PREVIOUS - force_this_sub = True - - return force_this_sub, invalidate_formatting - - -def _get_dirty_source_names_and_render_passes(sub_entry, dirty_source_names): - dirty_for_this = set() - invalidated_render_passes = set() - for p, pinfo in enumerate(sub_entry.render_info): - if pinfo: - for src_name in pinfo.used_source_names: - is_dirty = (src_name in dirty_source_names) - if is_dirty: - invalidated_render_passes.add(p) - dirty_for_this.add(src_name) - break - return dirty_for_this, invalidated_render_passes - - -def _ensure_dir_exists(path): - try: - os.makedirs(path, mode=0o755, exist_ok=True) - except OSError: - # In a multiprocess environment, several process may very - # occasionally try to create the same directory at the same time. - # Let's ignore any error and if something's really wrong (like file - # acces permissions or whatever), then it will more legitimately fail - # just after this when we try to write files. - pass - diff -r 7a1903ede496 -r 2e5c5d33d62c piecrust/baking/worker.py --- a/piecrust/baking/worker.py Tue Nov 21 11:00:06 2017 -0800 +++ b/piecrust/baking/worker.py Tue Nov 21 22:07:12 2017 -0800 @@ -1,13 +1,10 @@ import time import logging -from piecrust.app import PieCrust, apply_variant_and_values -from piecrust.baking.records import BakeRecord, _get_transition_key -from piecrust.baking.single import PageBaker, BakingError -from piecrust.environment import AbortedSourceUseError -from piecrust.rendering import ( - QualifiedPage, PageRenderingContext, render_page_segments) -from piecrust.routing import create_route_metadata -from piecrust.sources.base import PageFactory +from piecrust.pipelines.base import ( + PipelineManager, PipelineJobRunContext, + get_pipeline_name_for_source) +from piecrust.pipelines.records import ( + MultiRecordHistory, MultiRecord, load_records) from piecrust.workerpool import IWorker @@ -16,227 +13,96 @@ class BakeWorkerContext(object): def __init__(self, appfactory, out_dir, *, - force=False, previous_record_path=None): + force=False, previous_records_path=None, + allowed_pipelines=None, forbidden_pipelines=None): self.appfactory = appfactory self.out_dir = out_dir self.force = force - self.previous_record_path = previous_record_path - self.app = None - self.previous_record = None - self.previous_record_index = None + self.previous_records_path = previous_records_path + self.allowed_pipelines = allowed_pipelines + self.forbidden_pipelines = forbidden_pipelines class BakeWorker(IWorker): def __init__(self, ctx): self.ctx = ctx - self.work_start_time = time.perf_counter() + self.app = None + self.stats = None + self.previous_records = None + self._work_start_time = time.perf_counter() + self._sources = {} + self._ppctx = None def initialize(self): # Create the app local to this worker. app = self.ctx.appfactory.create() app.config.set('baker/is_baking', True) app.config.set('baker/worker_id', self.wid) - app.env.base_asset_url_format = '%uri%' + app.config.set('site/asset_url_format', '%page_uri%/%filename%') + app.env.fs_cache_only_for_main_page = True - app.env.registerTimer("BakeWorker_%d_Total" % self.wid) - app.env.registerTimer("BakeWorkerInit") - app.env.registerTimer("JobReceive") - app.env.registerCounter("SourceUseAbortions") - app.env.registerManifest("LoadJobs") - app.env.registerManifest("RenderJobs") - app.env.registerManifest("BakeJobs") - self.ctx.app = app + + stats = app.env.stats + stats.registerTimer("BakeWorker_%d_Total" % self.wid) + stats.registerTimer("BakeWorkerInit") + + self.app = app + self.stats = stats # Load previous record - if self.ctx.previous_record_path: - self.ctx.previous_record = BakeRecord.load( - self.ctx.previous_record_path) - self.ctx.previous_record_index = {} - for e in self.ctx.previous_record.entries: - key = _get_transition_key(e.path, e.extra_key) - self.ctx.previous_record_index[key] = e + if self.ctx.previous_records_path: + previous_records = load_records(self.ctx.previous_records_path) + else: + previous_records = MultiRecord() + self.previous_records = previous_records - # Create the job handlers. - job_handlers = { - JOB_LOAD: LoadJobHandler(self.ctx), - JOB_RENDER_FIRST: RenderFirstSubJobHandler(self.ctx), - JOB_BAKE: BakeJobHandler(self.ctx)} - for jt, jh in job_handlers.items(): - app.env.registerTimer(type(jh).__name__) - self.job_handlers = job_handlers + # Create the pipelines. + self.ppmngr = PipelineManager( + app, self.ctx.out_dir, + worker_id=self.wid, force=self.ctx.force) + ok_pp = self.ctx.allowed_pipelines + nok_pp = self.ctx.forbidden_pipelines + for src in app.sources: + pname = get_pipeline_name_for_source(src) + if ok_pp is not None and pname not in ok_pp: + continue + if nok_pp is not None and pname in nok_pp: + continue - app.env.stepTimerSince("BakeWorkerInit", self.work_start_time) + self.ppmngr.createPipeline(src) + + stats.registerTimer("PipelineJobs_%s" % pname, + raise_if_registered=False) + + stats.stepTimerSince("BakeWorkerInit", self._work_start_time) def process(self, job): - handler = self.job_handlers[job['type']] - with self.ctx.app.env.timerScope(type(handler).__name__): - return handler.handleJob(job['job']) - - def getReport(self, pool_reports): - self.ctx.app.env.stepTimerSince("BakeWorker_%d_Total" % self.wid, - self.work_start_time) - data = self.ctx.app.env.getStats() - data.timers.update(pool_reports) - return { - 'type': 'stats', - 'data': data} + source_name, item_spec = job['job_spec'] + logger.debug("Received job: %s@%s" % (source_name, item_spec)) - def shutdown(self): - for jh in self.job_handlers.values(): - jh.shutdown() - - -JOB_LOAD, JOB_RENDER_FIRST, JOB_BAKE = range(0, 3) - + # Run the job! + job_start = time.perf_counter() + pp = self.ppmngr.getPipeline(source_name) + runctx = PipelineJobRunContext(job, pp.record_name, + self.previous_records) + ppres = { + 'item_spec': item_spec + } + pp.run(job, runctx, ppres) -class JobHandler(object): - def __init__(self, ctx): - self.ctx = ctx + # Log time spent in this pipeline. + self.stats.stepTimerSince("PipelineJobs_%s" % pp.PIPELINE_NAME, + job_start) + + return ppres - @property - def app(self): - return self.ctx.app - - def handleJob(self, job): - raise NotImplementedError() + def getStats(self): + stats = self.app.env.stats + stats.stepTimerSince("BakeWorker_%d_Total" % self.wid, + self._work_start_time) + return stats def shutdown(self): - pass - - -def _get_errors(ex): - errors = [] - while ex is not None: - errors.append(str(ex)) - ex = ex.__cause__ - return errors - - -def save_factory(fac): - return { - 'source_name': fac.source.name, - 'rel_path': fac.rel_path, - 'metadata': fac.metadata} - - -def load_factory(app, info): - source = app.getSource(info['source_name']) - return PageFactory(source, info['rel_path'], info['metadata']) - - -class LoadJobHandler(JobHandler): - def handleJob(self, job): - # Just make sure the page has been cached. - fac = load_factory(self.app, job) - logger.debug("Loading page: %s" % fac.ref_spec) - self.app.env.addManifestEntry('LoadJobs', fac.ref_spec) - result = { - 'source_name': fac.source.name, - 'path': fac.path, - 'config': None, - 'timestamp': None, - 'errors': None} - try: - page = fac.buildPage() - page._load() - result['config'] = page.config.getAll() - result['timestamp'] = page.datetime.timestamp() - except Exception as ex: - logger.debug("Got loading error. Sending it to master.") - result['errors'] = _get_errors(ex) - if self.ctx.app.debug: - logger.exception(ex) - return result - - -class RenderFirstSubJobHandler(JobHandler): - def handleJob(self, job): - # Render the segments for the first sub-page of this page. - fac = load_factory(self.app, job['factory_info']) - self.app.env.addManifestEntry('RenderJobs', fac.ref_spec) - - route_index = job['route_index'] - route = self.app.routes[route_index] - - page = fac.buildPage() - route_metadata = create_route_metadata(page) - qp = QualifiedPage(page, route, route_metadata) - ctx = PageRenderingContext(qp) - self.app.env.abort_source_use = True + for src, pp in self._sources.values(): + pp.shutdown(self._ppctx) - result = { - 'path': fac.path, - 'aborted': False, - 'errors': None} - logger.debug("Preparing page: %s" % fac.ref_spec) - try: - render_page_segments(ctx) - except AbortedSourceUseError: - logger.debug("Page %s was aborted." % fac.ref_spec) - self.app.env.stepCounter("SourceUseAbortions") - result['aborted'] = True - except Exception as ex: - logger.debug("Got rendering error. Sending it to master.") - result['errors'] = _get_errors(ex) - if self.ctx.app.debug: - logger.exception(ex) - finally: - self.app.env.abort_source_use = False - return result - - -class BakeJobHandler(JobHandler): - def __init__(self, ctx): - super(BakeJobHandler, self).__init__(ctx) - self.page_baker = PageBaker(ctx.app, ctx.out_dir, ctx.force) - - def shutdown(self): - self.page_baker.shutdown() - - def handleJob(self, job): - # Actually bake the page and all its sub-pages to the output folder. - fac = load_factory(self.app, job['factory_info']) - self.app.env.addManifestEntry('BakeJobs', fac.ref_spec) - - route_index = job['route_index'] - route_metadata = job['route_metadata'] - route = self.app.routes[route_index] - - gen_name = job['generator_name'] - gen_key = job['generator_record_key'] - dirty_source_names = job['dirty_source_names'] - - page = fac.buildPage() - qp = QualifiedPage(page, route, route_metadata) - - result = { - 'path': fac.path, - 'generator_name': gen_name, - 'generator_record_key': gen_key, - 'sub_entries': None, - 'errors': None} - - if job.get('needs_config', False): - result['config'] = page.config.getAll() - - previous_entry = None - if self.ctx.previous_record_index is not None: - key = _get_transition_key(fac.path, gen_key) - previous_entry = self.ctx.previous_record_index.get(key) - - logger.debug("Baking page: %s" % fac.ref_spec) - logger.debug("With route metadata: %s" % route_metadata) - try: - sub_entries = self.page_baker.bake( - qp, previous_entry, dirty_source_names, gen_name) - result['sub_entries'] = sub_entries - - except Exception as ex: - logger.debug("Got baking error. Sending it to master.") - result['errors'] = _get_errors(ex) - result['sub_entries'] = [] - if self.ctx.app.debug: - logger.exception(ex) - - return result - diff -r 7a1903ede496 -r 2e5c5d33d62c piecrust/cache.py --- a/piecrust/cache.py Tue Nov 21 11:00:06 2017 -0800 +++ b/piecrust/cache.py Tue Nov 21 22:07:12 2017 -0800 @@ -1,11 +1,9 @@ import os import os.path -import json import shutil -import codecs +import pickle import hashlib import logging -import collections import repoze.lru @@ -85,19 +83,23 @@ return os.path.isfile(cache_path) def read(self, path): - cache_path = self.getCachePath(path) - logger.debug("Reading cache: %s" % cache_path) - with codecs.open(cache_path, 'r', 'utf-8') as fp: + with self.openRead(path, mode='r', encoding='utf8') as fp: return fp.read() + def openRead(self, path, mode='r', encoding=None): + cache_path = self.getCachePath(path) + return open(cache_path, mode=mode, encoding=encoding) + def write(self, path, content): + with self.openWrite(path, mode='w', encoding='utf8') as fp: + fp.write(content) + + def openWrite(self, path, mode='w', encoding=None): cache_path = self.getCachePath(path) cache_dir = os.path.dirname(cache_path) if not os.path.isdir(cache_dir): os.makedirs(cache_dir, 0o755) - logger.debug("Writing cache: %s" % cache_path) - with codecs.open(cache_path, 'w', 'utf-8') as fp: - fp.write(content) + return open(cache_path, mode=mode, encoding=encoding) def getCachePath(self, path): if path.startswith('.'): @@ -155,7 +157,7 @@ class MemCache(object): """ Simple memory cache. It can be backed by a simple file-system - cache, but items need to be JSON-serializable to do this. + cache, but items need to be pickle-able to do this. """ def __init__(self, size=2048): self.cache = repoze.lru.LRUCache(size) @@ -182,8 +184,8 @@ self.cache.put(key, item) if self.fs_cache and save_to_fs: fs_key = _make_fs_cache_key(key) - item_raw = json.dumps(item) - self.fs_cache.write(fs_key, item_raw) + with self.fs_cache.openWrite(fs_key, mode='wb') as fp: + pickle.dump(item, fp, pickle.HIGHEST_PROTOCOL) def get(self, key, item_maker, fs_cache_time=None, save_to_fs=True): self._last_access_hit = True @@ -192,16 +194,19 @@ self._hits += 1 return item - if (self.fs_cache is not None and - fs_cache_time is not None): + if self.fs_cache is not None: + if fs_cache_time is None: + raise ValueError( + "No file-system cache time was given for '%s'. " + "This would result in degraded performance." % key) + # Try first from the file-system cache. fs_key = _make_fs_cache_key(key) if (fs_key not in self._invalidated_fs_items and self.fs_cache.isValid(fs_key, fs_cache_time)): - logger.debug("'%s' found in file-system cache." % - key) - item_raw = self.fs_cache.read(fs_key) - item = json.loads(item_raw) + logger.debug("'%s' found in file-system cache." % key) + with self.fs_cache.openRead(fs_key, mode='rb') as fp: + item = pickle.load(fp) self.cache.put(key, item) self._hits += 1 return item @@ -216,9 +221,8 @@ # Save to the file-system if needed. if self.fs_cache is not None and save_to_fs: - item_raw = json.dumps(item) - self.fs_cache.write(fs_key, item_raw) + with self.fs_cache.openWrite(fs_key, mode='wb') as fp: + pickle.dump(item, fp, pickle.HIGHEST_PROTOCOL) return item - diff -r 7a1903ede496 -r 2e5c5d33d62c piecrust/commands/base.py --- a/piecrust/commands/base.py Tue Nov 21 11:00:06 2017 -0800 +++ b/piecrust/commands/base.py Tue Nov 21 22:07:12 2017 -0800 @@ -8,12 +8,11 @@ class CommandContext(object): - def __init__(self, app, parser, args): + def __init__(self, appfactory, app, parser, args): + self.appfactory = appfactory self.app = app self.parser = parser self.args = args - self.config_variant = None - self.config_values = None class ChefCommand(object): @@ -27,8 +26,9 @@ raise NotImplementedError() def run(self, ctx): - raise NotImplementedError("Command '%s' doesn't implement the `run` " - "method." % type(self)) + raise NotImplementedError( + "Command '%s' doesn't implement the `run` " + "method." % type(self)) def checkedRun(self, ctx): if ctx.app.root_dir is None and self.requires_website: @@ -83,8 +83,9 @@ return [(n, d) for (n, d, e) in self._topic_providers] def setupParser(self, parser, app): - parser.add_argument('topic', nargs='?', - help="The command name or topic on which to get help.") + parser.add_argument( + 'topic', nargs='?', + help="The command name or topic on which to get help.") extensions = self.getExtensions(app) for ext in extensions: @@ -106,8 +107,8 @@ for c in ctx.app.plugin_loader.getCommands(): if c.name == topic: fake = argparse.ArgumentParser( - prog='%s %s' % (ctx.parser.prog, c.name), - description=c.description) + prog='%s %s' % (ctx.parser.prog, c.name), + description=c.description) c.setupParser(fake, ctx.app) fake.print_help() return 0 diff -r 7a1903ede496 -r 2e5c5d33d62c piecrust/commands/builtin/admin.py --- a/piecrust/commands/builtin/admin.py Tue Nov 21 11:00:06 2017 -0800 +++ b/piecrust/commands/builtin/admin.py Tue Nov 21 22:07:12 2017 -0800 @@ -1,7 +1,7 @@ import os import os.path import logging -from piecrust import CACHE_DIR +from piecrust import CONFIG_PATH from piecrust.commands.base import ChefCommand from piecrust.pathutil import SiteNotFoundError @@ -20,34 +20,9 @@ subparsers = parser.add_subparsers() p = subparsers.add_parser( - 'init', - help="Creates a new administration panel website.") - p.set_defaults(sub_func=self._initFoodTruck) - - p = subparsers.add_parser( - 'genpass', - help=("Generates the hashed password for use as an " - "admin password")) - p.add_argument('password', help="The password to hash.") - p.set_defaults(sub_func=self._generatePassword) - - p = subparsers.add_parser( - 'run', - help="Runs the administrative panel website.") - p.add_argument( - '-p', '--port', - help="The port for the administrative panel website.", - default=8090) - p.add_argument( - '-a', '--address', - help="The host for the administrative panel website.", - default='localhost') - p.add_argument( - '--no-assets', - help="Don't process and monitor the asset folder(s).", - dest='monitor_assets', - action='store_false') - p.set_defaults(sub_func=self._runFoodTruck) + 'init', + help="Creates a new administration panel website.") + p.set_defaults(sub_func=self._initAdminSite) def checkedRun(self, ctx): if ctx.app.root_dir is None: @@ -58,37 +33,8 @@ return return ctx.args.sub_func(ctx) - def _runFoodTruck(self, ctx): - # See `_run_sse_check` in `piecrust.serving.wrappers` for an explanation - # of this check. - if (ctx.args.monitor_assets and ( - not ctx.args.debug or - os.environ.get('WERKZEUG_RUN_MAIN') == 'true')): - from piecrust.app import PieCrustFactory - from piecrust.serving.procloop import ProcessingLoop - appfactory = PieCrustFactory( - ctx.app.root_dir, - cache=ctx.app.cache.enabled, - cache_key=ctx.app.cache_key, - config_variant=ctx.config_variant, - config_values=ctx.config_values, - debug=ctx.app.debug, - theme_site=ctx.app.theme_site) - out_dir = os.path.join(ctx.app.root_dir, CACHE_DIR, 'foodtruck', 'server') - proc_loop = ProcessingLoop(appfactory, out_dir) - proc_loop.start() - - es = { - 'FOODTRUCK_CMDLINE_MODE': True, - 'FOODTRUCK_ROOT': ctx.app.root_dir} - from piecrust.admin.main import run_foodtruck - run_foodtruck( - host=ctx.args.address, - port=ctx.args.port, - debug=ctx.args.debug, - extra_settings=es) - - def _initFoodTruck(self, ctx): + def _initAdminSite(self, ctx): + import io import getpass from piecrust.admin import bcryptfallback as bcrypt @@ -97,9 +43,10 @@ admin_password = getpass.getpass("Admin password: ") if not admin_password: logger.warning("No administration password set!") - logger.warning("Don't make this instance of FoodTruck public.") + logger.warning("Don't make this instance of the PieCrust " + "administration panel public.") logger.info("You can later set an admin password by editing " - "the `foodtruck.yml` file and using the " + "the `admin.cfg` file and using the " "`chef admin genpass` command.") else: binpw = admin_password.encode('utf8') @@ -107,24 +54,23 @@ admin_password = hashpw ft_config = """ -security: +admin: + secret_key: %(secret_key)s username: %(username)s # You can generate another hashed password with `chef admin genpass`. password: %(password)s """ ft_config = ft_config % { - 'username': admin_username, - 'password': admin_password - } - with open('foodtruck.yml', 'w', encoding='utf8') as fp: - fp.write(ft_config) + 'secret_key': secret_key, + 'username': admin_username, + 'password': admin_password + } - flask_config = """ -SECRET_KEY = %(secret_key)s -""" - flask_config = flask_config % {'secret_key': secret_key} - with open('app.cfg', 'w', encoding='utf8') as fp: - fp.write(flask_config) + config_path = os.path.join(ctx.app.root_dir, CONFIG_PATH) + with open(config_path, 'a+', encoding='utf8') as fp: + fp.seek(0, io.SEEK_END) + fp.write('\n') + fp.write(ft_config) def _generatePassword(self, ctx): from piecrust.admin import bcryptfallback as bcrypt diff -r 7a1903ede496 -r 2e5c5d33d62c piecrust/commands/builtin/baking.py --- a/piecrust/commands/builtin/baking.py Tue Nov 21 11:00:06 2017 -0800 +++ b/piecrust/commands/builtin/baking.py Tue Nov 21 22:07:12 2017 -0800 @@ -1,24 +1,9 @@ +import os.path import time -import os.path import logging -import hashlib -import fnmatch import datetime from colorama import Fore -from piecrust import CACHE_DIR -from piecrust.baking.baker import Baker -from piecrust.baking.records import ( - BakeRecord, BakeRecordEntry, SubPageBakeInfo) -from piecrust.chefutil import format_timed from piecrust.commands.base import ChefCommand -from piecrust.environment import ExecutionStats -from piecrust.processing.pipeline import ProcessorPipeline -from piecrust.processing.records import ( - ProcessorPipelineRecord, - FLAG_PREPARED, FLAG_PROCESSED, FLAG_BYPASSED_STRUCTURED_PROCESSING, - FLAG_COLLAPSED_FROM_LAST_RUN) -from piecrust.rendering import ( - PASS_FORMATTING, PASS_RENDERING) logger = logging.getLogger(__name__) @@ -32,399 +17,353 @@ def setupParser(self, parser, app): parser.add_argument( - '-o', '--output', - help="The directory to put all the baked HTML files into " - "(defaults to `_counter`)") + '-o', '--output', + help="The directory to put all the baked HTML files into " + "(defaults to `_counter`)") + parser.add_argument( + '-f', '--force', + help="Force re-baking the entire website.", + action='store_true') parser.add_argument( - '-f', '--force', - help="Force re-baking the entire website.", - action='store_true') + '-p', '--pipelines', + help="The pipelines to run.", + action='append') parser.add_argument( - '-w', '--workers', - help="The number of worker processes to spawn.", - type=int, default=-1) + '-w', '--workers', + help="The number of worker processes to spawn.", + type=int, default=-1) parser.add_argument( - '--batch-size', - help="The number of jobs per batch.", - type=int, default=-1) + '--batch-size', + help="The number of jobs per batch.", + type=int, default=-1) parser.add_argument( - '--assets-only', - help="Only bake the assets (don't bake the web pages).", - action='store_true') + '--assets-only', + help="Only bake the assets (don't bake the web pages).", + action='store_true') parser.add_argument( - '--html-only', - help="Only bake the pages (don't run the asset pipeline).", - action='store_true') + '--html-only', + help="Only bake the pages (don't run the asset pipeline).", + action='store_true') parser.add_argument( - '--show-stats', - help="Show detailed information about the bake.", - action='store_true') + '--show-stats', + help="Show detailed information about the bake.", + action='store_true') + parser.add_argument( + '--profile', + help="Run the bake several times, for profiling.", + type=int, default=-1) def run(self, ctx): + from piecrust.chefutil import format_timed + from piecrust.environment import ExecutionStats + out_dir = (ctx.args.output or os.path.join(ctx.app.root_dir, '_counter')) success = True - ctx.stats = {} + avg_stats = ExecutionStats() + avg_stats.registerTimer('Total') start_time = time.perf_counter() - try: - # Bake the site sources. - if not ctx.args.assets_only: - success = success & self._bakeSources(ctx, out_dir) + + num_iter = 1 + if ctx.args.profile > 0: + num_iter = ctx.args.profile - # Bake the assets. - if not ctx.args.html_only: - success = success & self._bakeAssets(ctx, out_dir) + for i in range(num_iter): + iter_start_time = time.perf_counter() + if num_iter > 1: + import gc + gc.collect() + logger.info("---- %d/%d ----" % (i + 1, num_iter)) - # Show merged stats. - if ctx.args.show_stats: - logger.info("-------------------") - logger.info("Timing information:") - _show_stats(ctx.stats) + try: + records = self._doBake(ctx, out_dir) + except Exception as ex: + if ctx.app.debug: + logger.exception(ex) + else: + logger.error(str(ex)) + return 1 + + success = success and records.success + avg_stats.mergeStats(records.stats) + avg_stats.stepTimerSince('Total', iter_start_time) - # All done. - logger.info('-------------------------') - logger.info(format_timed(start_time, 'done baking')) - return 0 if success else 1 - except Exception as ex: - if ctx.app.debug: - logger.exception(ex) - else: - logger.error(str(ex)) - return 1 + # Show merged stats. + if ctx.args.show_stats: + if num_iter > 1: + _average_stats(avg_stats, num_iter) + + logger.info("-------------------") + logger.info("Timing information:") + _show_stats(avg_stats) - def _bakeSources(self, ctx, out_dir): + # All done. + logger.info('-------------------------') + logger.info(format_timed(start_time, 'done baking')) + return 0 if success else 1 + + def _doBake(self, ctx, out_dir): + from piecrust.baking.baker import Baker + if ctx.args.workers > 0: ctx.app.config.set('baker/workers', ctx.args.workers) if ctx.args.batch_size > 0: ctx.app.config.set('baker/batch_size', ctx.args.batch_size) - baker = Baker( - ctx.app, out_dir, - force=ctx.args.force, - applied_config_variant=ctx.config_variant, - applied_config_values=ctx.config_values) - record = baker.bake() - _merge_stats(record.stats, ctx.stats) - return record.success - - def _bakeAssets(self, ctx, out_dir): - proc = ProcessorPipeline( - ctx.app, out_dir, - force=ctx.args.force, - applied_config_variant=ctx.config_variant, - applied_config_values=ctx.config_values) - record = proc.run() - _merge_stats(record.stats, ctx.stats) - return record.success - - -def _merge_stats(source, target): - if source is None: - return - - for name, val in source.items(): - if name not in target: - target[name] = ExecutionStats() - target[name].mergeStats(val) - -def _show_stats(stats, *, full=False): - indent = ' ' - for name in sorted(stats.keys()): - logger.info('%s:' % name) - s = stats[name] - - logger.info(' Timers:') - for name, val in sorted(s.timers.items(), key=lambda i: i[1], - reverse=True): - val_str = '%8.1f s' % val - logger.info( - "%s[%s%s%s] %s" % - (indent, Fore.GREEN, val_str, Fore.RESET, name)) + allowed_pipelines = None + forbidden_pipelines = None + if ctx.args.html_only: + forbidden_pipelines = ['asset'] + elif ctx.args.assets_only: + allowed_pipelines = ['asset'] + elif ctx.args.pipelines: + if allowed_pipelines or forbidden_pipelines: + raise Exception( + "Can't specify `--html-only` or `--assets-only` with " + "`--pipelines`.") + allowed_pipelines = [] + forbidden_pipelines = [] + for p in ctx.args.pipelines: + if p[0] == '-': + forbidden_pipelines.append(p) + else: + allowed_pipelines.append(p) + if not allowed_pipelines: + allowed_pipelines = None + if not forbidden_pipelines: + forbidden_pipelines = None - logger.info(' Counters:') - for name in sorted(s.counters.keys()): - val_str = '%8d ' % s.counters[name] - logger.info( - "%s[%s%s%s] %s" % - (indent, Fore.GREEN, val_str, Fore.RESET, name)) + baker = Baker( + ctx.appfactory, ctx.app, out_dir, + force=ctx.args.force, + allowed_pipelines=allowed_pipelines, + forbidden_pipelines=forbidden_pipelines) + records = baker.bake() - logger.info(' Manifests:') - for name in sorted(s.manifests.keys()): - val = s.manifests[name] - logger.info( - "%s[%s%s%s] [%d entries]" % - (indent, Fore.CYAN, name, Fore.RESET, len(val))) - if full: - for v in val: - logger.info("%s - %s" % (indent, v)) + return records class ShowRecordCommand(ChefCommand): def __init__(self): super(ShowRecordCommand, self).__init__() - self.name = 'showrecord' - self.description = ("Shows the bake record for a given output " + self.name = 'showrecords' + self.description = ("Shows the bake records for a given output " "directory.") def setupParser(self, parser, app): parser.add_argument( - '-o', '--output', - help="The output directory for which to show the bake record " - "(defaults to `_counter`)", - nargs='?') + '-o', '--output', + help="The output directory for which to show the bake records " + "(defaults to `_counter`)", + nargs='?') + parser.add_argument( + '-i', '--in-path', + help="A pattern that will be used to filter the relative path " + "of entries to show.") parser.add_argument( - '-p', '--path', - help="A pattern that will be used to filter the relative path " - "of entries to show.") + '-t', '--out-path', + help="A pattern that will be used to filter the output path " + "of entries to show.") parser.add_argument( - '-t', '--out', - help="A pattern that will be used to filter the output path " - "of entries to show.") + '--fails', + action='store_true', + help="Only show record entries for failures.") + parser.add_argument( + '--last', + type=int, + default=0, + help="Show the last Nth bake records.") parser.add_argument( - '--last', - type=int, - default=0, - help="Show the last Nth bake record.") + '--records', + help="Load the specified records file.") parser.add_argument( - '--html-only', - action='store_true', - help="Only show records for pages (not from the asset " - "pipeline).") + '--html-only', + action='store_true', + help="Only show records for pages (not from the asset " + "pipeline).") parser.add_argument( - '--assets-only', - action='store_true', - help="Only show records for assets (not from pages).") + '--assets-only', + action='store_true', + help="Only show records for assets (not from pages).") parser.add_argument( - '--show-stats', - action='store_true', - help="Show stats from the record.") + '-p', '--pipelines', + nargs='*', + help="Only show records for the given pipeline(s).") parser.add_argument( - '--show-manifest', - help="Show manifest entries from the record.") + '--show-stats', + action='store_true', + help="Show stats from the records.") + parser.add_argument( + '--show-manifest', + help="Show manifest entries from the records.") def run(self, ctx): - out_dir = ctx.args.output or os.path.join(ctx.app.root_dir, '_counter') - record_id = hashlib.md5(out_dir.encode('utf8')).hexdigest() - suffix = '' if ctx.args.last == 0 else '.%d' % ctx.args.last - record_name = '%s%s.record' % (record_id, suffix) + import fnmatch + from piecrust.baking.baker import get_bake_records_path + from piecrust.pipelines.records import load_records - pattern = None - if ctx.args.path: - pattern = '*%s*' % ctx.args.path.strip('*') + records_path = ctx.args.records + if records_path is None: + out_dir = ctx.args.output or os.path.join(ctx.app.root_dir, + '_counter') + suffix = '' if ctx.args.last == 0 else '.%d' % ctx.args.last + records_path = get_bake_records_path(ctx.app, out_dir, + suffix=suffix) + logger.info("Bake records for output: %s" % out_dir) + else: + logger.info("Bake records from: %s" % records_path) + + records = load_records(records_path, True) + if records.invalidated: + raise Exception( + "The bake records were saved by a previous version of " + "PieCrust and can't be shown.") + + in_pattern = None + if ctx.args.in_path: + in_pattern = '*%s*' % ctx.args.in_path.strip('*') out_pattern = None - if ctx.args.out: - out_pattern = '*%s*' % ctx.args.out.strip('*') + if ctx.args.out_path: + out_pattern = '*%s*' % ctx.args.out_path.strip('*') + + pipelines = ctx.args.pipelines + if pipelines is None: + if ctx.args.assets_only: + pipelines = ['asset'] + if ctx.args.html_only: + pipelines = ['page'] + + logger.info("Status: %s" % ('SUCCESS' if records.success + else 'FAILURE')) + logger.info("Date/time: %s" % + datetime.datetime.fromtimestamp(records.bake_time)) + logger.info("Incremental count: %d" % records.incremental_count) + logger.info("Versions: %s/%s" % (records._app_version, + records._record_version)) + logger.info("") if not ctx.args.show_stats and not ctx.args.show_manifest: - if not ctx.args.assets_only: - self._showBakeRecord( - ctx, record_name, pattern, out_pattern) - if not ctx.args.html_only: - self._showProcessingRecord( - ctx, record_name, pattern, out_pattern) - return + for rec in sorted(records.records, key=lambda r: r.name): + if ctx.args.fails and rec.success: + logger.debug( + "Ignoring record '%s' because it was successful, " + "and `--fail` was passed." % rec.name) + continue + + ppname = rec.name[rec.name.index('@') + 1:] + if pipelines is not None and ppname not in pipelines: + logging.debug( + "Ignoring record '%s' because it was created by " + "pipeline '%s', which isn't listed in " + "`--pipelines`." % (rec.name, ppname)) + continue + + entries_to_show = [] - stats = {} - bake_rec = self._getBakeRecord(ctx, record_name) - if bake_rec: - _merge_stats(bake_rec.stats, stats) - proc_rec = self._getProcessingRecord(ctx, record_name) - if proc_rec: - _merge_stats(proc_rec.stats, stats) + for e in rec.getEntries(): + if ctx.args.fails and e.success: + continue + if in_pattern and not fnmatch.fnmatch(e.item_spec, + in_pattern): + continue + if out_pattern and not any( + [fnmatch.fnmatch(op, out_pattern) + for op in e.getAllOutputPaths()]): + continue + entries_to_show.append(e) + if entries_to_show: + logger.info("Record: %s" % rec.name) + logger.info("Status: %s" % ('SUCCESS' if rec.success + else 'FAILURE')) + logger.info("User Data:") + if not rec.user_data: + logger.info(" ") + else: + for k, v in rec.user_data.items(): + logger.info(" %s: %s" % (k, v)) + + for e in entries_to_show: + _print_record_entry(e) + logger.info("") + + stats = records.stats if ctx.args.show_stats: - _show_stats(stats, full=False) + _show_stats(stats) if ctx.args.show_manifest: - for name in sorted(stats.keys()): - logger.info('%s:' % name) - s = stats[name] - for name in sorted(s.manifests.keys()): - if ctx.args.show_manifest.lower() in name.lower(): - val = s.manifests[name] - logger.info( - " [%s%s%s] [%d entries]" % - (Fore.CYAN, name, Fore.RESET, len(val))) - for v in val: - logger.info(" - %s" % v) - + for name in sorted(stats.manifests.keys()): + if ctx.args.show_manifest.lower() in name.lower(): + val = stats.manifests[name] + logger.info( + " [%s%s%s] [%d entries]" % + (Fore.CYAN, name, Fore.RESET, len(val))) + for v in val: + logger.info(" - %s" % v) - def _getBakeRecord(self, ctx, record_name): - record_cache = ctx.app.cache.getCache('baker') - if not record_cache.has(record_name): - logger.warning( - "No page bake record has been created for this output " - "path.") - return None - - record = BakeRecord.load(record_cache.getCachePath(record_name)) - return record - - def _showBakeRecord(self, ctx, record_name, pattern, out_pattern): - record = self._getBakeRecord(ctx, record_name) - if record is None: - return - - logging.info("Bake record for: %s" % record.out_dir) - logging.info("From: %s" % record_name) - logging.info("Last baked: %s" % - datetime.datetime.fromtimestamp(record.bake_time)) - if record.success: - logging.info("Status: success") - else: - logging.error("Status: failed") - logging.info("Entries:") - for entry in record.entries: - if pattern and not fnmatch.fnmatch(entry.path, pattern): - continue - if out_pattern and not ( - any([o for o in entry.all_out_paths - if fnmatch.fnmatch(o, out_pattern)])): - continue - - flags = _get_flag_descriptions( - entry.flags, - { - BakeRecordEntry.FLAG_NEW: 'new', - BakeRecordEntry.FLAG_SOURCE_MODIFIED: 'modified', - BakeRecordEntry.FLAG_OVERRIDEN: 'overriden'}) - - logging.info(" - ") - - rel_path = os.path.relpath(entry.path, ctx.app.root_dir) - logging.info(" path: %s" % rel_path) - logging.info(" source: %s" % entry.source_name) - if entry.extra_key: - logging.info(" extra key: %s" % entry.extra_key) - logging.info(" flags: %s" % _join(flags)) - logging.info(" config: %s" % entry.config) - - if entry.errors: - logging.error(" errors: %s" % entry.errors) - - logging.info(" %d sub-pages:" % len(entry.subs)) - for sub in entry.subs: - sub_flags = _get_flag_descriptions( - sub.flags, - { - SubPageBakeInfo.FLAG_BAKED: 'baked', - SubPageBakeInfo.FLAG_FORCED_BY_SOURCE: - 'forced by source', - SubPageBakeInfo.FLAG_FORCED_BY_NO_PREVIOUS: - 'forced by missing previous record entry', - SubPageBakeInfo.FLAG_FORCED_BY_PREVIOUS_ERRORS: - 'forced by previous errors', - SubPageBakeInfo.FLAG_FORMATTING_INVALIDATED: - 'formatting invalidated'}) - - logging.info(" - ") - logging.info(" URL: %s" % sub.out_uri) - logging.info(" path: %s" % os.path.relpath( - sub.out_path, record.out_dir)) - logging.info(" flags: %s" % _join(sub_flags)) - - pass_names = { - PASS_FORMATTING: 'formatting pass', - PASS_RENDERING: 'rendering pass'} - for p, ri in enumerate(sub.render_info): - logging.info(" - %s" % pass_names[p]) - if not ri: - logging.info(" no info") - continue - - logging.info(" used sources: %s" % - _join(ri.used_source_names)) - pgn_info = 'no' - if ri.used_pagination: - pgn_info = 'yes' - if ri.pagination_has_more: - pgn_info += ', has more' - logging.info(" used pagination: %s", pgn_info) - logging.info(" used assets: %s", - 'yes' if ri.used_assets else 'no') - logging.info(" other info:") - for k, v in ri._custom_info.items(): - logging.info(" - %s: %s" % (k, v)) - - if sub.errors: - logging.error(" errors: %s" % sub.errors) - - def _getProcessingRecord(self, ctx, record_name): - record_cache = ctx.app.cache.getCache('proc') - if not record_cache.has(record_name): - logger.warning( - "No asset processing record has been created for this " - "output path.") - return None - - record = ProcessorPipelineRecord.load( - record_cache.getCachePath(record_name)) - return record - - def _showProcessingRecord(self, ctx, record_name, pattern, out_pattern): - record = self._getProcessingRecord(ctx, record_name) - if record is None: - return - - logging.info("") - logging.info("Processing record for: %s" % record.out_dir) - logging.info("Last baked: %s" % - datetime.datetime.fromtimestamp(record.process_time)) - if record.success: - logging.info("Status: success") - else: - logging.error("Status: failed") - logging.info("Entries:") - for entry in record.entries: - rel_path = os.path.relpath(entry.path, ctx.app.root_dir) - if pattern and not fnmatch.fnmatch(rel_path, pattern): - continue - if out_pattern and not ( - any([o for o in entry.rel_outputs - if fnmatch.fnmatch(o, out_pattern)])): - continue - - flags = _get_flag_descriptions( - entry.flags, - { - FLAG_PREPARED: 'prepared', - FLAG_PROCESSED: 'processed', - FLAG_BYPASSED_STRUCTURED_PROCESSING: 'external', - FLAG_COLLAPSED_FROM_LAST_RUN: 'from last run'}) - - logger.info(" - ") - logger.info(" path: %s" % rel_path) - logger.info(" out paths: %s" % entry.rel_outputs) - logger.info(" flags: %s" % _join(flags)) - logger.info(" proc tree: %s" % _format_proc_tree( - entry.proc_tree, 14*' ')) - - if entry.errors: - logger.error(" errors: %s" % entry.errors) +def _average_stats(stats, cnt): + for name in stats.timers: + stats.timers[name] /= cnt + for name in stats.counters: + stats.counters[name] /= cnt -def _join(items, sep=', ', text_if_none='none'): - if items: - return sep.join(items) - return text_if_none +def _show_stats(stats, *, full=False): + indent = ' ' + + logger.info(' Timers:') + for name, val in sorted(stats.timers.items(), key=lambda i: i[1], + reverse=True): + val_str = '%8.1f s' % val + logger.info( + "%s[%s%s%s] %s" % + (indent, Fore.GREEN, val_str, Fore.RESET, name)) + + logger.info(' Counters:') + for name in sorted(stats.counters.keys()): + val_str = '%8d ' % stats.counters[name] + logger.info( + "%s[%s%s%s] %s" % + (indent, Fore.GREEN, val_str, Fore.RESET, name)) + + logger.info(' Manifests:') + for name in sorted(stats.manifests.keys()): + val = stats.manifests[name] + logger.info( + "%s[%s%s%s] [%d entries]" % + (indent, Fore.CYAN, name, Fore.RESET, len(val))) + if full: + for v in val: + logger.info("%s - %s" % (indent, v)) -def _get_flag_descriptions(flags, descriptions): - res = [] - for k, v in descriptions.items(): - if flags & k: - res.append(v) - return res +def _print_record_entry(e): + import pprint + import textwrap + logger.info(" - %s" % e.item_spec) + logger.info(" Outputs:") + out_paths = list(e.getAllOutputPaths()) + if out_paths: + for op in out_paths: + logger.info(" - %s" % op) + else: + logger.info(" ") -def _format_proc_tree(tree, margin='', level=0): - name, children = tree - res = '%s%s+ %s\n' % (margin if level > 0 else '', level * ' ', name) - if children: - for c in children: - res += _format_proc_tree(c, margin, level + 1) - return res + e_desc = e.describe() + for k, v in e_desc.items(): + if isinstance(v, dict): + text = pprint.pformat(v, indent=2) + logger.info(" %s:" % k) + logger.info(textwrap.indent(text, ' ')) + else: + logger.info(" %s: %s" % (k, v)) + errors = list(e.getAllErrors()) + if errors: + logger.error(" Errors:") + for err in errors: + logger.error(" - %s" % err) diff -r 7a1903ede496 -r 2e5c5d33d62c piecrust/commands/builtin/info.py --- a/piecrust/commands/builtin/info.py Tue Nov 21 11:00:06 2017 -0800 +++ b/piecrust/commands/builtin/info.py Tue Nov 21 22:07:12 2017 -0800 @@ -1,8 +1,6 @@ import os.path import logging -import fnmatch from piecrust.commands.base import ChefCommand -from piecrust.configuration import ConfigurationDumper logger = logging.getLogger(__name__) @@ -29,11 +27,14 @@ def setupParser(self, parser, app): parser.add_argument( - 'path', - help="The path to a config section or value", - nargs='?') + 'path', + help="The path to a config section or value", + nargs='?') def run(self, ctx): + import yaml + from piecrust.configuration import ConfigurationDumper + if ctx.args.path: show = ctx.app.config.get(ctx.args.path) else: @@ -41,7 +42,6 @@ if show is not None: if isinstance(show, (dict, list)): - import yaml out = yaml.dump(show, default_flow_style=False, Dumper=ConfigurationDumper) logger.info(out) @@ -65,7 +65,11 @@ for src in ctx.app.sources: logger.info("%s:" % src.name) logger.info(" type: %s" % src.config.get('type')) - logger.info(" class: %s" % type(src)) + logger.debug(" class: %s" % type(src)) + desc = src.describe() + if isinstance(desc, dict): + for k, v in desc.items(): + logger.info(" %s: %s" % (k, v)) class ShowRoutesCommand(ChefCommand): @@ -81,7 +85,6 @@ for route in ctx.app.routes: logger.info("%s:" % route.uri_pattern) logger.info(" source: %s" % (route.source_name or '')) - logger.info(" generator: %s" % (route.generator_name or '')) logger.info(" regex: %s" % route.uri_re.pattern) logger.info(" function: %s(%s)" % ( route.func_name, @@ -118,31 +121,34 @@ def setupParser(self, parser, app): parser.add_argument( - 'pattern', - help="The pattern to match with page filenames", - nargs='?') + 'pattern', + help="The pattern to match with page filenames", + nargs='?') parser.add_argument( - '-n', '--name', - help="Limit the search to sources matching this name") + '-n', '--name', + help="Limit the search to sources matching this name") parser.add_argument( - '--full-path', - help="Return full paths instead of root-relative paths", - action='store_true') + '--full-path', + help="Return full paths instead of root-relative paths", + action='store_true') parser.add_argument( - '--metadata', - help="Return metadata about the page instead of just the path", - action='store_true') + '--metadata', + help="Return metadata about the page instead of just the path", + action='store_true') parser.add_argument( - '--include-theme', - help="Include theme pages to the search", - action='store_true') + '--include-theme', + help="Include theme pages to the search", + action='store_true') parser.add_argument( - '--exact', - help=("Match the exact given pattern, instead of any page " - "containing the pattern"), - action='store_true') + '--exact', + help=("Match the exact given pattern, instead of any page " + "containing the pattern"), + action='store_true') def run(self, ctx): + import fnmatch + from piecrust.sources.fs import FSContentSourceBase + pattern = ctx.args.pattern sources = list(ctx.app.sources) if not ctx.args.exact and pattern is not None: @@ -154,17 +160,28 @@ if ctx.args.name and not fnmatch.fnmatch(src.name, ctx.args.name): continue - page_facs = src.getPageFactories() - for pf in page_facs: - name = os.path.relpath(pf.path, ctx.app.root_dir) - if pattern is None or fnmatch.fnmatch(name, pattern): - if ctx.args.full_path: - name = pf.path - if ctx.args.metadata: - logger.info("path:%s" % pf.path) - for key, val in pf.metadata.items(): - logger.info("%s:%s" % (key, val)) - logger.info("---") + is_fs_src = isinstance(src, FSContentSourceBase) + items = src.getAllContents() + for item in items: + if ctx.args.metadata: + logger.info("spec:%s" % item.spec) + for key, val in item.metadata.items(): + logger.info("%s:%s" % (key, val)) + logger.info("---") + else: + if is_fs_src: + name = os.path.relpath(item.spec, ctx.app.root_dir) + if pattern is None or fnmatch.fnmatch(name, pattern): + if ctx.args.metadata: + logger.info("path:%s" % item.spec) + for key, val in item.metadata.items(): + logger.info("%s:%s" % (key, val)) + logger.info("---") + else: + if ctx.args.full_path: + name = item.spec + logger.info(name) else: - logger.info(name) + if pattern is None or fnmatch.fnmatch(name, pattern): + logger.info(item.spec) diff -r 7a1903ede496 -r 2e5c5d33d62c piecrust/commands/builtin/plugins.py --- a/piecrust/commands/builtin/plugins.py Tue Nov 21 11:00:06 2017 -0800 +++ b/piecrust/commands/builtin/plugins.py Tue Nov 21 22:07:12 2017 -0800 @@ -1,6 +1,5 @@ import logging from piecrust.commands.base import ChefCommand -from piecrust.pathutil import SiteNotFoundError logger = logging.getLogger(__name__) @@ -13,24 +12,21 @@ self.description = "Manage the plugins for the current website." def setupParser(self, parser, app): - # Don't setup anything if this is a null app - # (for when `chef` is run from outside a website) - if app.root_dir is None: - return - subparsers = parser.add_subparsers() p = subparsers.add_parser( - 'list', - help="Lists the plugins installed in the current website.") + 'list', + help="Lists the plugins installed in the current website.") p.add_argument( - '-a', '--all', - action='store_true', - help=("Also list all the available plugins for the " - "current environment. The installed one will have an " - "asterix (*).")) + '-a', '--all', + action='store_true', + help=("Also list all the available plugins for the " + "current environment. The installed one will have an " + "asterix (*).")) p.set_defaults(sub_func=self._listPlugins) def checkedRun(self, ctx): + from piecrust.pathutil import SiteNotFoundError + if ctx.app.root_dir is None: raise SiteNotFoundError(theme=ctx.app.theme_site) @@ -40,10 +36,11 @@ ctx.args.sub_func(ctx) def _listPlugins(self, ctx): + import pip + names = {} installed_suffix = '' if ctx.args.all: - import pip prefix = 'PieCrust-' installed_packages = pip.get_installed_distributions() for plugin in installed_packages: diff -r 7a1903ede496 -r 2e5c5d33d62c piecrust/commands/builtin/publishing.py --- a/piecrust/commands/builtin/publishing.py Tue Nov 21 11:00:06 2017 -0800 +++ b/piecrust/commands/builtin/publishing.py Tue Nov 21 22:07:12 2017 -0800 @@ -1,8 +1,5 @@ import logging -import urllib.parse from piecrust.commands.base import ChefCommand -from piecrust.pathutil import SiteNotFoundError -from piecrust.publishing.publisher import Publisher, find_publisher_name logger = logging.getLogger(__name__) @@ -18,13 +15,21 @@ def setupParser(self, parser, app): parser.add_argument( - '--log-publisher', - metavar='LOG_FILE', - help="Log the publisher's output to a given file.") + '--log-publisher', + metavar='LOG_FILE', + help="Log the publisher's output to a given file.") + parser.add_argument( + '--log-debug-info', + action='store_true', + help="Add some debug info as a preamble to the log file.") parser.add_argument( - '--preview', - action='store_true', - help="Only preview what the publisher would do.") + '--append-log', + action='store_true', + help="Append to the log file if it exists.") + parser.add_argument( + '--preview', + action='store_true', + help="Only preview what the publisher would do.") # Don't setup anything for a null app. if app.root_dir is None: @@ -33,8 +38,8 @@ subparsers = parser.add_subparsers() for pub in app.publishers: p = subparsers.add_parser( - pub.target, - help="Publish using target '%s'." % pub.target) + pub.target, + help="Publish using target '%s'." % pub.target) pub.setupPublishParser(p, app) p.set_defaults(sub_func=self._doPublish) p.set_defaults(target=pub.target) @@ -47,6 +52,8 @@ "https://bolt80.com/piecrust/en/latest/docs/publishing/") def checkedRun(self, ctx): + from piecrust.pathutil import SiteNotFoundError + if ctx.app.root_dir is None: raise SiteNotFoundError(theme=ctx.app.theme_site) @@ -56,12 +63,14 @@ ctx.args.sub_func(ctx) def _doPublish(self, ctx): - pub = Publisher(ctx.app) + from piecrust.publishing.base import PublishingManager + + pub = PublishingManager(ctx.appfactory, ctx.app) pub.run( - ctx.args.target, - preview=ctx.args.preview, - extra_args=ctx.args, - log_file=ctx.args.log_publisher, - applied_config_variant=ctx.config_variant, - applied_config_values=ctx.config_values) + ctx.args.target, + preview=ctx.args.preview, + extra_args=ctx.args, + log_file=ctx.args.log_publisher, + log_debug_info=ctx.args.log_debug_info, + append_log_file=ctx.args.append_log) diff -r 7a1903ede496 -r 2e5c5d33d62c piecrust/commands/builtin/scaffolding.py --- a/piecrust/commands/builtin/scaffolding.py Tue Nov 21 11:00:06 2017 -0800 +++ b/piecrust/commands/builtin/scaffolding.py Tue Nov 21 22:07:12 2017 -0800 @@ -1,27 +1,12 @@ import os import os.path -import re -import io -import time -import glob import logging -import textwrap -from piecrust import RESOURCES_DIR -from piecrust.chefutil import print_help_item from piecrust.commands.base import ExtendableChefCommand, ChefCommandExtension -from piecrust.sources.base import MODE_CREATING -from piecrust.sources.interfaces import IPreparingSource -from piecrust.uriutil import multi_replace logger = logging.getLogger(__name__) -def make_title(slug): - slug = re.sub(r'[\-_]', ' ', slug) - return slug.title() - - class PrepareCommand(ExtendableChefCommand): """ Chef command for creating pages with some default content. """ @@ -36,6 +21,8 @@ if app.root_dir is None: return + from piecrust.sources.interfaces import IPreparingSource + subparsers = parser.add_subparsers() for src in app.sources: if not isinstance(src, IPreparingSource): @@ -47,18 +34,22 @@ "source." % src.name) continue p = subparsers.add_parser( - src.item_name, - help=("Creates an empty page in the '%s' source." % - src.name)) + src.config['item_name'], + help=("Creates an empty page in the '%s' source." % + src.name)) src.setupPrepareParser(p, app) p.add_argument('-t', '--template', default='default', help="The template to use, which will change the " - "generated text and header. Run `chef help " - "scaffolding` for more information.") + "generated text and header. Run `chef help " + "scaffolding` for more information.") + p.add_argument('-f', '--force', action='store_true', + help="Overwrite any existing content.") p.set_defaults(source=src) p.set_defaults(sub_func=self._doRun) def checkedRun(self, ctx): + from piecrust.pathutil import SiteNotFoundError + if ctx.app.root_dir is None: raise SiteNotFoundError(theme=ctx.app.theme_site) @@ -68,60 +59,59 @@ ctx.args.sub_func(ctx) def _doRun(self, ctx): + import time + from piecrust.uriutil import multi_replace + from piecrust.sources.fs import FSContentSourceBase + if not hasattr(ctx.args, 'source'): raise Exception("No source specified. " "Please run `chef prepare -h` for usage.") app = ctx.app - source = ctx.args.source - metadata = source.buildMetadata(ctx.args) - factory = source.findPageFactory(metadata, MODE_CREATING) - path = factory.path - name, ext = os.path.splitext(path) - if ext == '.*': - path = '%s.%s' % ( - name, - app.config.get('site/default_auto_format')) - if os.path.exists(path): - raise Exception("'%s' already exists." % path) - tpl_name = ctx.args.template extensions = self.getExtensions(app) ext = next( - filter( - lambda e: tpl_name in e.getTemplateNames(ctx.app), - extensions), - None) + filter( + lambda e: tpl_name in e.getTemplateNames(app), + extensions), + None) if ext is None: raise Exception("No such page template: %s" % tpl_name) - - tpl_text = ext.getTemplate(ctx.app, tpl_name) + tpl_text = ext.getTemplate(app, tpl_name) if tpl_text is None: raise Exception("Error loading template: %s" % tpl_name) - title = (metadata.get('slug') or metadata.get('path') or - 'Untitled page') - title = make_title(title) - tokens = { - '%title%': title, - '%time.today%': time.strftime('%Y/%m/%d'), - '%time.now%': time.strftime('%H:%M:%S')} - tpl_text = multi_replace(tpl_text, tokens) + + source = ctx.args.source + content_item = source.createContent(vars(ctx.args)) + if content_item is None: + raise Exception("Can't create item.") - logger.info("Creating page: %s" % os.path.relpath(path, app.root_dir)) - if not os.path.exists(os.path.dirname(path)): - os.makedirs(os.path.dirname(path), 0o755) + config_tokens = { + '%title%': "Untitled Content", + '%time.today%': time.strftime('%Y/%m/%d'), + '%time.now%': time.strftime('%H:%M:%S') + } + config = content_item.metadata.get('config') + if config: + for k, v in config.items(): + config_tokens['%%%s%%' % k] = v + tpl_text = multi_replace(tpl_text, config_tokens) - with open(path, 'w') as f: + logger.info("Creating content: %s" % content_item.spec) + mode = 'w' if ctx.args.force else 'x' + with source.openItem(content_item, mode) as f: f.write(tpl_text) + # If this was a file-system content item, see if we need to auto-open + # an editor on it. editor = ctx.app.config.get('prepare/editor') editor_type = ctx.app.config.get('prepare/editor_type', 'exe') - if editor: + if editor and isinstance(source, FSContentSourceBase): import shlex shell = False - args = '%s "%s"' % (editor, path) + args = '%s "%s"' % (editor, content_item.spec) if '%path%' in editor: - args = editor.replace('%path%', path) + args = editor.replace('%path%', content_item.spec) if editor_type.lower() == 'shell': shell = True @@ -146,12 +136,14 @@ def getTemplateDescription(self, app, name): descs = { - 'default': "The default template, for a simple page.", - 'rss': "A fully functional RSS feed.", - 'atom': "A fully functional Atom feed."} + 'default': "The default template, for a simple page.", + 'rss': "A fully functional RSS feed.", + 'atom': "A fully functional Atom feed."} return descs[name] def getTemplate(self, app, name): + from piecrust import RESOURCES_DIR + assert name in ['default', 'rss', 'atom'] src_path = os.path.join(RESOURCES_DIR, 'prepare', '%s.html' % name) with open(src_path, 'r', encoding='utf8') as fp: @@ -182,6 +174,8 @@ return "User-defined template." def getTemplate(self, app, name): + import glob + templates_dir = self._getTemplatesDir(app) pattern = os.path.join(templates_dir, '%s.*' % name) matches = glob.glob(pattern) @@ -189,7 +183,7 @@ raise Exception("No such page scaffolding template: %s" % name) if len(matches) > 1: raise Exception( - "More than one scaffolding template has name: %s" % name) + "More than one scaffolding template has name: %s" % name) with open(matches[0], 'r', encoding='utf8') as fp: return fp.read() @@ -204,6 +198,10 @@ "Available templates for the 'prepare' command.")] def getHelpTopic(self, topic, app): + import io + import textwrap + from piecrust.chefutil import print_help_item + with io.StringIO() as tplh: extensions = app.plugin_loader.getCommandExtensions() for e in extensions: @@ -214,16 +212,16 @@ help_list = tplh.getvalue() help_txt = ( - textwrap.fill( - "Running the 'prepare' command will let " - "PieCrust setup a page for you in the correct place, with " - "some hopefully useful default text.") + - "\n\n" + - textwrap.fill("The following templates are available:") + - "\n\n" + - help_list + - "\n" + - "You can add user-defined templates by creating pages in a " - "`scaffold/pages` sub-directory in your website.") + textwrap.fill( + "Running the 'prepare' command will let " + "PieCrust setup a page for you in the correct place, with " + "some hopefully useful default text.") + + "\n\n" + + textwrap.fill("The following templates are available:") + + "\n\n" + + help_list + + "\n" + + "You can add user-defined templates by creating pages in a " + "`scaffold/pages` sub-directory in your website.") return help_txt diff -r 7a1903ede496 -r 2e5c5d33d62c piecrust/commands/builtin/serving.py --- a/piecrust/commands/builtin/serving.py Tue Nov 21 11:00:06 2017 -0800 +++ b/piecrust/commands/builtin/serving.py Tue Nov 21 22:07:12 2017 -0800 @@ -1,6 +1,5 @@ import logging from piecrust.commands.base import ChefCommand -from piecrust.serving.wrappers import run_werkzeug_server, run_gunicorn_server logger = logging.getLogger(__name__) @@ -15,57 +14,41 @@ def setupParser(self, parser, app): parser.add_argument( - '-p', '--port', - help="The port for the web server", - default=8080) + '-p', '--port', + help="The port for the web server", + default=8080) parser.add_argument( - '-a', '--address', - help="The host for the web server", - default='localhost') + '-a', '--address', + help="The host for the web server", + default='localhost') + parser.add_argument( + '--use-reloader', + help="Restart the server when PieCrust code changes", + action='store_true') parser.add_argument( - '--use-reloader', - help="Restart the server when PieCrust code changes", - action='store_true') + '--use-debugger', + help="Show the debugger when an error occurs", + action='store_true') parser.add_argument( - '--use-debugger', - help="Show the debugger when an error occurs", - action='store_true') + '--admin', + help="Also serve the administration panel.", + action='store_true') parser.add_argument( - '--wsgi', - help="The WSGI server implementation to use", - choices=['werkzeug', 'gunicorn'], - default='werkzeug') + '--wsgi', + help="The WSGI server implementation to use", + choices=['werkzeug', 'gunicorn'], + default='werkzeug') def run(self, ctx): - root_dir = ctx.app.root_dir + appfactory = ctx.appfactory host = ctx.args.address port = int(ctx.args.port) - debug = ctx.args.debug or ctx.args.use_debugger - - from piecrust.app import PieCrustFactory - appfactory = PieCrustFactory( - ctx.app.root_dir, - cache=ctx.app.cache.enabled, - cache_key=ctx.app.cache_key, - config_variant=ctx.config_variant, - config_values=ctx.config_values, - debug=ctx.app.debug, - theme_site=ctx.app.theme_site) + use_debugger = ctx.args.debug or ctx.args.use_debugger - if ctx.args.wsgi == 'werkzeug': - run_werkzeug_server( - appfactory, host, port, - use_debugger=debug, - use_reloader=ctx.args.use_reloader) - - elif ctx.args.wsgi == 'gunicorn': - options = { - 'bind': '%s:%s' % (host, port), - 'accesslog': '-', # print access log to stderr - } - if debug: - options['loglevel'] = 'debug' - if ctx.args.use_reloader: - options['reload'] = True - run_gunicorn_server(appfactory, gunicorn_options=options) - + from piecrust.serving.wrappers import run_piecrust_server + run_piecrust_server( + ctx.args.wsgi, appfactory, host, port, + is_cmdline_mode=True, + serve_admin=ctx.args.admin, + use_reloader=ctx.args.use_reloader, + use_debugger=use_debugger) diff -r 7a1903ede496 -r 2e5c5d33d62c piecrust/commands/builtin/themes.py --- a/piecrust/commands/builtin/themes.py Tue Nov 21 11:00:06 2017 -0800 +++ b/piecrust/commands/builtin/themes.py Tue Nov 21 22:07:12 2017 -0800 @@ -1,12 +1,8 @@ import os import os.path -import shutil import logging -import yaml -from piecrust import ( - RESOURCES_DIR, THEME_DIR, THEME_CONFIG_PATH, THEME_INFO_PATH) +from piecrust import THEME_DIR, THEME_CONFIG_PATH, THEME_INFO_PATH from piecrust.commands.base import ChefCommand -from piecrust.pathutil import SiteNotFoundError logger = logging.getLogger(__name__) @@ -21,34 +17,36 @@ def setupParser(self, parser, app): subparsers = parser.add_subparsers() p = subparsers.add_parser( - 'info', - help="Provides information about the current theme.") + 'info', + help="Provides information about the current theme.") p.set_defaults(sub_func=self._info) p = subparsers.add_parser( - 'override', - help="Copies the current theme to the website for " - "customization.") + 'override', + help="Copies the current theme to the website for " + "customization.") p.set_defaults(sub_func=self._overrideTheme) p = subparsers.add_parser( - 'link', - help="Makes a given theme the active one for the current " - "website by creating a symbolic link to it from the " - "'theme' directory.") + 'link', + help="Makes a given theme the active one for the current " + "website by creating a symbolic link to it from the " + "'theme' directory.") p.add_argument( - 'theme_dir', - help="The directory of the theme to link.") + 'theme_dir', + help="The directory of the theme to link.") p.set_defaults(sub_func=self._linkTheme) p = subparsers.add_parser( - 'unlink', - help="Removes the currently active theme for the website. " - "This removes the symbolic link to the theme, if any, or " - "deletes the theme folder if it was copied locally.") + 'unlink', + help="Removes the currently active theme for the website. " + "This removes the symbolic link to the theme, if any, or " + "deletes the theme folder if it was copied locally.") p.set_defaults(sub_func=self._unlinkTheme) def checkedRun(self, ctx): + from piecrust.pathutil import SiteNotFoundError + if ctx.app.root_dir is None: raise SiteNotFoundError(theme=ctx.app.theme_site) @@ -58,6 +56,8 @@ ctx.args.sub_func(ctx) def _info(self, ctx): + import yaml + theme_dir = ctx.app.theme_dir if not os.path.exists(theme_dir): logger.info("Using default theme, from: %s" % ctx.app.theme_dir) @@ -84,6 +84,8 @@ logger.info(" - %s: %s" % (str(k), str(v))) def _overrideTheme(self, ctx): + import shutil + theme_dir = ctx.app.theme_dir if not theme_dir: logger.error("There is no theme currently applied.") @@ -101,24 +103,24 @@ dst_path = os.path.join(app_dir, rel_dirpath, name) copies.append((src_path, dst_path)) - conflicts = [] + conflicts = set() for c in copies: if os.path.exists(c[1]): - conflicts.append(c[1]) + conflicts.add(c[1]) if conflicts: - logger.warning("Some website files will be overwritten:") + logger.warning("Some website files override theme files:") for c in conflicts: logger.warning(os.path.relpath(c, app_dir)) - logger.warning("Are you sure? [Y/n]") - ans = input() - if len(ans) > 0 and ans.lower() not in ['y', 'yes']: - return 1 + logger.warning("") + logger.warning("The local website files will be preserved, and " + "the conflicting theme files won't be copied " + "locally.") for c in copies: - logger.info(os.path.relpath(c[1], app_dir)) - if not os.path.exists(os.path.dirname(c[1])): - os.makedirs(os.path.dirname(c[1])) - shutil.copy2(c[0], c[1]) + if not c[1] in conflicts: + logger.info(os.path.relpath(c[1], app_dir)) + os.makedirs(os.path.dirname(c[1]), exist_ok=True) + shutil.copy2(c[0], c[1]) def _linkTheme(self, ctx): if not os.path.isdir(ctx.args.theme_dir): @@ -126,7 +128,7 @@ return 1 msg = ("A theme already exists, and will be deleted. " - "Are you sure? [Y/n]") + "Are you sure? [Y/n]") self._doUnlinkTheme(ctx.app.root_dir, msg) theme_dir = os.path.join(ctx.app.root_dir, THEME_DIR) @@ -139,10 +141,12 @@ def _unlinkTheme(self, ctx): msg = ("The active theme is local. Are you sure you want " - "to delete the theme directory? [Y/n]") + "to delete the theme directory? [Y/n]") self._doUnlinkTheme(ctx.app.root_dir, msg) def _doUnlinkTheme(self, root_dir, delete_message): + import shutil + theme_dir = os.path.join(root_dir, THEME_DIR) if os.path.islink(theme_dir): diff -r 7a1903ede496 -r 2e5c5d33d62c piecrust/commands/builtin/util.py --- a/piecrust/commands/builtin/util.py Tue Nov 21 11:00:06 2017 -0800 +++ b/piecrust/commands/builtin/util.py Tue Nov 21 22:07:12 2017 -0800 @@ -1,9 +1,6 @@ import os import os.path -import shutil -import codecs import logging -import yaml from piecrust import CACHE_DIR, RESOURCES_DIR from piecrust.app import CONFIG_PATH, THEME_CONFIG_PATH from piecrust.commands.base import ChefCommand @@ -21,8 +18,8 @@ def setupParser(self, parser, app): parser.add_argument( - 'destination', - help="The destination directory in which to create the website.") + 'destination', + help="The destination directory in which to create the website.") def run(self, ctx): destination = ctx.args.destination @@ -41,7 +38,8 @@ tpl_path = os.path.join(RESOURCES_DIR, 'webinit', CONFIG_PATH) if ctx.args.theme: - tpl_path = os.path.join(RESOURCES_DIR, 'webinit', THEME_CONFIG_PATH) + tpl_path = os.path.join(RESOURCES_DIR, 'webinit', + THEME_CONFIG_PATH) with open(tpl_path, 'r', encoding='utf-8') as fp: config_text = fp.read() @@ -59,6 +57,8 @@ pass def run(self, ctx): + import shutil + cache_dir = os.path.join(ctx.app.root_dir, CACHE_DIR) if cache_dir and os.path.isdir(cache_dir): logger.info("Purging cache: %s" % cache_dir) diff -r 7a1903ede496 -r 2e5c5d33d62c piecrust/configuration.py --- a/piecrust/configuration.py Tue Nov 21 11:00:06 2017 -0800 +++ b/piecrust/configuration.py Tue Nov 21 22:07:12 2017 -0800 @@ -15,6 +15,13 @@ default_allowed_types = (dict, list, tuple, float, int, bool, str) +MERGE_NEW_VALUES = 0 +MERGE_OVERWRITE_VALUES = 1 +MERGE_PREPEND_LISTS = 2 +MERGE_APPEND_LISTS = 4 +MERGE_ALL = MERGE_OVERWRITE_VALUES | MERGE_PREPEND_LISTS + + class ConfigurationError(Exception): pass @@ -64,7 +71,7 @@ self._ensureLoaded() return self._values - def merge(self, other): + def merge(self, other, mode=MERGE_ALL): self._ensureLoaded() if isinstance(other, dict): @@ -73,9 +80,10 @@ other_values = other._values else: raise Exception( - "Unsupported value type to merge: %s" % type(other)) + "Unsupported value type to merge: %s" % type(other)) merge_dicts(self._values, other_values, + mode=mode, validator=self._validateValue) def validateTypes(self, allowed_types=default_allowed_types): @@ -96,7 +104,7 @@ return if not isinstance(v, allowed_types): raise ConfigurationError( - "Value '%s' is of forbidden type: %s" % (v, type(v))) + "Value '%s' is of forbidden type: %s" % (v, type(v))) if isinstance(v, dict): self._validateDictTypesRecursive(v, allowed_types) elif isinstance(v, list): @@ -162,13 +170,6 @@ cur = cur[b] -MERGE_NEW_VALUES = 0 -MERGE_OVERWRITE_VALUES = 1 -MERGE_PREPEND_LISTS = 2 -MERGE_APPEND_LISTS = 4 -MERGE_ALL = MERGE_OVERWRITE_VALUES | MERGE_PREPEND_LISTS - - def merge_dicts(source, merging, *args, validator=None, mode=MERGE_ALL): _recurse_merge_dicts(source, merging, None, validator, mode) @@ -223,7 +224,7 @@ header_regex = re.compile( - r'(---\s*\n)(?P
(.*\n)*?)^(---\s*\n)', re.MULTILINE) + r'(---\s*\n)(?P
(.*\n)*?)^(---\s*\n)', re.MULTILINE) def parse_config_header(text): @@ -239,17 +240,18 @@ class ConfigurationLoader(SafeLoader): - """ A YAML loader that loads mappings into ordered dictionaries. + """ A YAML loader that loads mappings into ordered dictionaries, + and supports sexagesimal notations for timestamps. """ def __init__(self, *args, **kwargs): super(ConfigurationLoader, self).__init__(*args, **kwargs) self.add_constructor('tag:yaml.org,2002:map', - type(self).construct_yaml_map) + type(self).construct_yaml_map) self.add_constructor('tag:yaml.org,2002:omap', - type(self).construct_yaml_map) + type(self).construct_yaml_map) self.add_constructor('tag:yaml.org,2002:sexagesimal', - type(self).construct_yaml_time) + type(self).construct_yaml_time) def construct_yaml_map(self, node): data = collections.OrderedDict() @@ -259,21 +261,23 @@ def construct_mapping(self, node, deep=False): if not isinstance(node, yaml.MappingNode): - raise ConstructorError(None, None, - "expected a mapping node, but found %s" % node.id, - node.start_mark) + raise ConstructorError( + None, None, + "expected a mapping node, but found %s" % node.id, + node.start_mark) mapping = collections.OrderedDict() for key_node, value_node in node.value: key = self.construct_object(key_node, deep=deep) if not isinstance(key, collections.Hashable): - raise ConstructorError("while constructing a mapping", node.start_mark, - "found unhashable key", key_node.start_mark) + raise ConstructorError( + "while constructing a mapping", node.start_mark, + "found unhashable key", key_node.start_mark) value = self.construct_object(value_node, deep=deep) mapping[key] = value return mapping time_regexp = re.compile( - r'''^(?P[0-9][0-9]?) + r'''^(?P[0-9][0-9]?) :(?P[0-9][0-9]) (:(?P[0-9][0-9]) (\.(?P[0-9]+))?)?$''', re.X) @@ -294,10 +298,10 @@ ConfigurationLoader.add_implicit_resolver( - 'tag:yaml.org,2002:sexagesimal', - re.compile(r'''^[0-9][0-9]?:[0-9][0-9] + 'tag:yaml.org,2002:sexagesimal', + re.compile(r'''^[0-9][0-9]?:[0-9][0-9] (:[0-9][0-9](\.[0-9]+)?)?$''', re.X), - list('0123456789')) + list('0123456789')) # We need to add our `sexagesimal` resolver before the `int` one, which @@ -319,5 +323,5 @@ ConfigurationDumper.add_representer(collections.OrderedDict, - ConfigurationDumper.represent_ordered_dict) + ConfigurationDumper.represent_ordered_dict) diff -r 7a1903ede496 -r 2e5c5d33d62c piecrust/data/assetor.py --- a/piecrust/data/assetor.py Tue Nov 21 11:00:06 2017 -0800 +++ b/piecrust/data/assetor.py Tue Nov 21 22:07:12 2017 -0800 @@ -1,8 +1,7 @@ import os import os.path -import shutil import logging -from piecrust import ASSET_DIR_SUFFIX +from piecrust.sources.base import REL_ASSETS from piecrust.uriutil import multi_replace @@ -13,105 +12,108 @@ pass -def build_base_url(app, uri, rel_assets_path): - base_url_format = app.env.base_asset_url_format - rel_assets_path = rel_assets_path.replace('\\', '/') - - # Remove any extension since we'll be copying assets into the 1st - # sub-page's folder. - pretty = app.config.get('site/pretty_urls') - if not pretty: - uri, _ = os.path.splitext(uri) - - base_url = multi_replace( - base_url_format, - { - '%path%': rel_assets_path, - '%uri%': uri}) - - return base_url.rstrip('/') + '/' +class _AssetInfo: + def __init__(self, content_item, uri): + self.content_item = content_item + self.uri = uri -class AssetorBase(object): - def __init__(self, page, uri): - self._page = page - self._uri = uri - self._cache = None - - def __getattr__(self, name): - try: - self._cacheAssets() - return self._cache[name][0] - except KeyError: - raise AttributeError() - - def __getitem__(self, key): - self._cacheAssets() - return self._cache[key][0] - - def __iter__(self): - self._cacheAssets() - return map(lambda i: i[0], self._cache.values()) - - def allNames(self): - self._cacheAssets() - return list(self._cache.keys()) - - def _debugRenderAssetNames(self): - self._cacheAssets() - return list(self._cache.keys()) - - def _cacheAssets(self): - if self._cache is not None: - return - - self._cache = dict(self.findAssets()) - - def findAssets(self): - raise NotImplementedError() - - def copyAssets(self, dest_dir): - raise NotImplementedError() - -class Assetor(AssetorBase): +class Assetor: debug_render_doc = """Helps render URLs to files in the current page's asset folder.""" debug_render = [] debug_render_dynamic = ['_debugRenderAssetNames'] - def findAssets(self): - assets = {} - name, ext = os.path.splitext(self._page.path) - assets_dir = name + ASSET_DIR_SUFFIX - if not os.path.isdir(assets_dir): - return assets + def __init__(self, page): + self._page = page + self._cache_map = None + self._cache_list = None + + def __getattr__(self, name): + try: + self._cacheAssets() + return self._cache_map[name].uri + except KeyError: + raise AttributeError() + + def __getitem__(self, name): + self._cacheAssets() + return self._cache_map[name].uri + + def __contains__(self, name): + self._cacheAssets() + return name in self._cache_map + + def __iter__(self): + self._cacheAssets() + return iter(self._cache_list) + + def __len__(self): + self._cacheAssets() + return len(self._cache_map) + + def _getAssetNames(self): + self._cacheAssets() + return self._cache_map.keys() + + def _getAssetItems(self): + self._cacheAssets() + return map(lambda i: i.content_item, self._cache_map.values()) - rel_assets_dir = os.path.relpath(assets_dir, self._page.app.root_dir) - base_url = build_base_url(self._page.app, self._uri, rel_assets_dir) - for fn in os.listdir(assets_dir): - full_fn = os.path.join(assets_dir, fn) - if not os.path.isfile(full_fn): - raise Exception("Skipping: %s" % full_fn) - continue + def _debugRenderAssetNames(self): + self._cacheAssets() + return list(self._cache_map.keys()) + + def _cacheAssets(self): + if self._cache_map is not None: + return + + source = self._page.source + content_item = self._page.content_item + assets = source.getRelatedContents(content_item, REL_ASSETS) + + self._cache_map = {} + self._cache_list = [] + + if assets is None: + return - name, ext = os.path.splitext(fn) - if name in assets: + app = source.app + root_dir = app.root_dir + asset_url_format = app.config.get('site/asset_url_format') + if not asset_url_format: + raise Exception("No asset URL format was specified.") + + page_uri = self._page.getUri() + pretty_urls = app.config.get('site/pretty_urls') + if not pretty_urls: + page_uri, _ = os.path.splitext(page_uri) + + uri_build_tokens = { + '%path%': None, + '%filename%': None, + '%page_uri%': page_uri + } + + for a in assets: + name = a.metadata['name'] + if name in self._cache_map: raise UnsupportedAssetsError( - "Multiple asset files are named '%s'." % name) - assets[name] = (base_url + fn, full_fn) - - cpi = self._page.app.env.exec_info_stack.current_page_info - if cpi is not None: - cpi.render_ctx.current_pass_info.used_assets = True - - return assets + "An asset with name '%s' already exists for item '%s'. " + "Do you have multiple assets with colliding names?" % + (name, content_item.spec)) - def copyAssets(self, dest_dir): - page_pathname, _ = os.path.splitext(self._page.path) - in_assets_dir = page_pathname + ASSET_DIR_SUFFIX - for fn in os.listdir(in_assets_dir): - full_fn = os.path.join(in_assets_dir, fn) - if os.path.isfile(full_fn): - dest_ap = os.path.join(dest_dir, fn) - logger.debug(" %s -> %s" % (full_fn, dest_ap)) - shutil.copy(full_fn, dest_ap) + # TODO: this assumes a file-system source! + uri_build_tokens['%path%'] = \ + os.path.relpath(a.spec, root_dir).replace('\\', '/') + uri_build_tokens['%filename%'] = a.metadata['filename'] + uri = multi_replace(asset_url_format, uri_build_tokens) + + self._cache_map[name] = _AssetInfo(a, uri) + self._cache_list.append(uri) + + stack = app.env.render_ctx_stack + cur_ctx = stack.current_ctx + if cur_ctx is not None: + cur_ctx.render_info['used_assets'] = True + diff -r 7a1903ede496 -r 2e5c5d33d62c piecrust/data/base.py --- a/piecrust/data/base.py Tue Nov 21 11:00:06 2017 -0800 +++ b/piecrust/data/base.py Tue Nov 21 22:07:12 2017 -0800 @@ -5,9 +5,10 @@ """ Provides a dictionary-like object that's really the aggregation of multiple dictionary-like objects. """ - def __init__(self, dicts, path=''): + def __init__(self, dicts, path='', *, stats=None): self._dicts = dicts self._path = path + self._stats = stats def __getattr__(self, name): try: @@ -40,10 +41,10 @@ for val in values: if not isinstance(val, (dict, collections.abc.Mapping)): raise Exception( - "Template data for '%s' contains an incompatible mix " - "of data: %s" % ( - self._subp(name), - ', '.join([str(type(v)) for v in values]))) + "Template data for '%s' contains an incompatible mix " + "of data: %s" % ( + self._subp(name), + ', '.join([str(type(v)) for v in values]))) return MergedMapping(values, self._subp(name)) diff -r 7a1903ede496 -r 2e5c5d33d62c piecrust/data/builder.py --- a/piecrust/data/builder.py Tue Nov 21 11:00:06 2017 -0800 +++ b/piecrust/data/builder.py Tue Nov 21 22:07:12 2017 -0800 @@ -1,53 +1,45 @@ import logging -from werkzeug.utils import cached_property +from piecrust.data.assetor import Assetor from piecrust.data.base import MergedMapping -from piecrust.data.linker import PageLinkerData +from piecrust.data.linker import Linker from piecrust.data.pagedata import PageData from piecrust.data.paginator import Paginator from piecrust.data.piecrustdata import PieCrustData from piecrust.data.providersdata import DataProvidersData -from piecrust.routing import CompositeRouteFunction +from piecrust.routing import RouteFunction logger = logging.getLogger(__name__) -class DataBuildingContext(object): - def __init__(self, qualified_page, page_num=1): - self.page = qualified_page - self.page_num = page_num +class DataBuildingContext: + def __init__(self, page, sub_num): + self.page = page + self.sub_num = sub_num self.pagination_source = None self.pagination_filter = None - @property - def app(self): - return self.page.app - - @cached_property - def uri(self): - return self.page.getUri(self.page_num) - def build_page_data(ctx): - app = ctx.app page = ctx.page + sub_num = ctx.sub_num + app = page.app + pgn_source = ctx.pagination_source or get_default_pagination_source(page) - first_uri = ctx.page.getUri(1) pc_data = PieCrustData() config_data = PageData(page, ctx) - paginator = Paginator(page, pgn_source, - page_num=ctx.page_num, + paginator = Paginator(pgn_source, page, sub_num, pgn_filter=ctx.pagination_filter) - assetor = page.source.buildAssetor(page, first_uri) - linker = PageLinkerData(page.source, page.rel_path) + assetor = Assetor(page) + linker = Linker(page.source, page.content_item) data = { - 'piecrust': pc_data, - 'page': config_data, - 'assets': assetor, - 'pagination': paginator, - 'family': linker - } + 'piecrust': pc_data, + 'page': config_data, + 'assets': assetor, + 'pagination': paginator, + 'family': linker + } for route in app.routes: name = route.func_name @@ -56,21 +48,28 @@ func = data.get(name) if func is None: - func = CompositeRouteFunction() - func.addFunc(route) - data[name] = func - elif isinstance(func, CompositeRouteFunction): - func.addFunc(route) + data[name] = RouteFunction(route) + elif isinstance(func, RouteFunction): + if not func._isCompatibleRoute(route): + raise Exception( + "Route function '%s' can't target both route '%s' and " + "route '%s' as the 2 patterns are incompatible." % + (name, func._route.uri_pattern, route.uri_pattern)) else: raise Exception("Route function '%s' collides with an " "existing function or template data." % name) - #TODO: handle slugified taxonomy terms. + # TODO: handle slugified taxonomy terms. site_data = app.config.getAll() providers_data = DataProvidersData(page) - data = MergedMapping([data, providers_data, site_data]) + + # Put the site data first so that `MergedMapping` doesn't load stuff + # for nothing just to find a value that was in the YAML config all + # along. + data = MergedMapping([site_data, data, providers_data], + stats=app.env.stats) # Do this at the end because we want all the data to be ready to be # displayed in the debugger window. @@ -81,7 +80,7 @@ return data -def build_layout_data(page, page_data, contents): +def add_layout_data(page_data, contents): for name, txt in contents.items(): if name in page_data: logger.warning("Content segment '%s' will hide existing data." % diff -r 7a1903ede496 -r 2e5c5d33d62c piecrust/data/filters.py --- a/piecrust/data/filters.py Tue Nov 21 11:00:06 2017 -0800 +++ b/piecrust/data/filters.py Tue Nov 21 22:07:12 2017 -0800 @@ -4,91 +4,6 @@ logger = logging.getLogger(__name__) -def page_value_accessor(page, name): - return page.config.get(name) - - -class PaginationFilter(object): - def __init__(self, value_accessor=None): - self.root_clause = None - self.value_accessor = value_accessor or self._default_value_accessor - - @property - def is_empty(self): - return self.root_clause is None - - def addClause(self, clause): - self._ensureRootClause() - self.root_clause.addClause(clause) - - def addClausesFromConfig(self, config): - self._ensureRootClause() - self._addClausesFromConfigRecursive(config, self.root_clause) - - def pageMatches(self, page): - if self.root_clause is None: - return True - return self.root_clause.pageMatches(self, page) - - def _ensureRootClause(self): - if self.root_clause is None: - self.root_clause = AndBooleanClause() - - def _addClausesFromConfigRecursive(self, config, parent_clause): - for key, val in config.items(): - if key == 'and': - if not isinstance(val, list) or len(val) == 0: - raise Exception("The given boolean 'AND' filter clause " - "doesn't have an array of child clauses.") - subcl = AndBooleanClause() - parent_clause.addClause(subcl) - for c in val: - self._addClausesFromConfigRecursive(c, subcl) - - elif key == 'or': - if not isinstance(val, list) or len(val) == 0: - raise Exception("The given boolean 'OR' filter clause " - "doesn't have an array of child clauses.") - subcl = OrBooleanClause() - parent_clause.addClause(subcl) - for c in val: - self._addClausesFromConfigRecursive(c, subcl) - - elif key == 'not': - if isinstance(val, list): - if len(val) != 1: - raise Exception("'NOT' filter clauses must have " - "exactly one child clause.") - val = val[0] - subcl = NotClause() - parent_clause.addClause(subcl) - self._addClausesFromConfigRecursive(val, subcl) - - elif key[:4] == 'has_': - setting_name = key[4:] - if isinstance(val, list): - wrappercl = AndBooleanClause() - for c in val: - wrappercl.addClause(HasFilterClause(setting_name, c)) - parent_clause.addClause(wrappercl) - else: - parent_clause.addClause(HasFilterClause(setting_name, val)) - - elif key[:3] == 'is_': - setting_name = key[3:] - parent_clause.addClause(IsFilterClause(setting_name, val)) - - else: - raise Exception("Unknown filter clause: %s" % key) - - @staticmethod - def _default_value_accessor(item, name): - try: - return getattr(item, name) - except AttributeError: - return None - - class IFilterClause(object): def addClause(self, clause): raise NotImplementedError() @@ -138,6 +53,22 @@ return False +class IsDefinedFilterClause(IFilterClause): + def __init__(self, name): + self.name = name + + def pageMatches(self, fil, page): + return self.name in page.config + + +class IsNotEmptyFilterClause(IFilterClause): + def __init__(self, name): + self.name = name + + def pageMatches(self, fil, page): + return bool(page.config.get(self.name)) + + class SettingFilterClause(IFilterClause): def __init__(self, name, value, coercer=None): self.name = name @@ -151,7 +82,7 @@ class HasFilterClause(SettingFilterClause): def pageMatches(self, fil, page): - actual_value = fil.value_accessor(page, self.name) + actual_value = page.config.get(self.name) if actual_value is None or not isinstance(actual_value, list): return False @@ -163,8 +94,98 @@ class IsFilterClause(SettingFilterClause): def pageMatches(self, fil, page): - actual_value = fil.value_accessor(page, self.name) + actual_value = page.config.get(self.name) if self.coercer: actual_value = self.coercer(actual_value) return actual_value == self.value + +unary_ops = {'not': NotClause} +binary_ops = { + 'and': AndBooleanClause, + 'or': OrBooleanClause} +misc_ops = { + 'defined': IsDefinedFilterClause, + 'not_empty': IsNotEmptyFilterClause} + + +class PaginationFilter(object): + def __init__(self): + self.root_clause = None + + @property + def is_empty(self): + return self.root_clause is None + + def addClause(self, clause): + self._ensureRootClause() + self.root_clause.addClause(clause) + + def addClausesFromConfig(self, config): + self._ensureRootClause() + self._addClausesFromConfigRecursive(config, self.root_clause) + + def pageMatches(self, page): + if self.root_clause is None: + return True + return self.root_clause.pageMatches(self, page) + + def _ensureRootClause(self): + if self.root_clause is None: + self.root_clause = AndBooleanClause() + + def _addClausesFromConfigRecursive(self, config, parent_clause): + for key, val in config.items(): + clause_class = unary_ops.get(key) + if clause_class: + if isinstance(val, list): + if len(val) != 1: + raise Exception( + "Unary filter '%s' must have exactly one child " + "clause." % key) + val = val[0] + subcl = clause_class() + parent_clause.addClause(subcl) + self._addClausesFromConfigRecursive(val, subcl) + continue + + clause_class = binary_ops.get(key) + if clause_class: + if not isinstance(val, list) or len(val) == 0: + raise Exception( + "Binary filter clause '%s' doesn't have an array " + "of child clauses." % key) + subcl = clause_class() + parent_clause.addClause(subcl) + for c in val: + self._addClausesFromConfigRecursive(c, subcl) + continue + + clause_class = misc_ops.get(key) + if clause_class: + if isinstance(val, list): + wrappercl = AndBooleanClause() + for c in val: + wrappercl.addClause(clause_class(c)) + parent_clause.addClause(wrappercl) + else: + parent_clause.addClause(clause_class(val)) + continue + + if key[:4] == 'has_': + setting_name = key[4:] + if isinstance(val, list): + wrappercl = AndBooleanClause() + for c in val: + wrappercl.addClause(HasFilterClause(setting_name, c)) + parent_clause.addClause(wrappercl) + else: + parent_clause.addClause(HasFilterClause(setting_name, val)) + continue + + if key[:3] == 'is_': + setting_name = key[3:] + parent_clause.addClause(IsFilterClause(setting_name, val)) + continue + + raise Exception("Unknown filter clause: %s" % key) diff -r 7a1903ede496 -r 2e5c5d33d62c piecrust/data/iterators.py --- a/piecrust/data/iterators.py Tue Nov 21 11:00:06 2017 -0800 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,353 +0,0 @@ -import logging -from piecrust.data.filters import PaginationFilter, IsFilterClause, NotClause -from piecrust.environment import AbortedSourceUseError -from piecrust.events import Event -from piecrust.sources.base import PageSource -from piecrust.sources.interfaces import IPaginationSource - - -logger = logging.getLogger(__name__) - - -class SliceIterator(object): - def __init__(self, it, offset=0, limit=-1): - self.it = it - self.offset = offset - self.limit = limit - self.current_page = None - self.has_more = False - self.inner_count = -1 - self.next_page = None - self.prev_page = None - self._cache = None - - def __iter__(self): - if self._cache is None: - inner_list = list(self.it) - self.inner_count = len(inner_list) - - if self.limit > 0: - self.has_more = self.inner_count > (self.offset + self.limit) - self._cache = inner_list[self.offset:self.offset + self.limit] - else: - self.has_more = False - self._cache = inner_list[self.offset:] - - if self.current_page: - try: - idx = inner_list.index(self.current_page) - except ValueError: - idx = -1 - if idx >= 0: - if idx < self.inner_count - 1: - self.next_page = inner_list[idx + 1] - if idx > 0: - self.prev_page = inner_list[idx - 1] - - return iter(self._cache) - - -class SettingFilterIterator(object): - def __init__(self, it, fil_conf, setting_accessor=None): - self.it = it - self.fil_conf = fil_conf - self._fil = None - self.setting_accessor = setting_accessor - - def __iter__(self): - if self._fil is None: - self._fil = PaginationFilter(value_accessor=self.setting_accessor) - self._fil.addClausesFromConfig(self.fil_conf) - - for i in self.it: - if self._fil.pageMatches(i): - yield i - - -class NaturalSortIterator(object): - def __init__(self, it, reverse=False): - self.it = it - self.reverse = reverse - - def __iter__(self): - return iter(sorted(self.it, reverse=self.reverse)) - - -class SettingSortIterator(object): - def __init__(self, it, name, reverse=False, value_accessor=None): - self.it = it - self.name = name - self.reverse = reverse - self.value_accessor = value_accessor or self._default_value_accessor - - def __iter__(self): - return iter(sorted(self.it, key=self._key_getter, - reverse=self.reverse)) - - def _key_getter(self, item): - key = self.value_accessor(item, self.name) - if key is None: - return 0 - return key - - @staticmethod - def _default_value_accessor(item, name): - try: - return getattr(item, name) - except AttributeError: - return None - - -class PaginationFilterIterator(object): - def __init__(self, it, fil): - self.it = it - self._fil = fil - - def __iter__(self): - for page in self.it: - if self._fil.pageMatches(page): - yield page - - -class GenericSortIterator(object): - def __init__(self, it, sorter): - self.it = it - self.sorter = sorter - self._sorted_it = None - - def __iter__(self): - if self._sorted_it is None: - self._sorted_it = self.sorter(self.it) - return iter(self._sorted_it) - - -class PageIterator(object): - debug_render = [] - debug_render_doc_dynamic = ['_debugRenderDoc'] - debug_render_not_empty = True - - def __init__(self, source, *, - current_page=None, - pagination_filter=None, sorter=None, - offset=0, limit=-1, locked=False): - self._source = source - self._current_page = current_page - self._locked = False - self._pages = source - self._pagesData = None - self._pagination_slicer = None - self._has_sorter = False - self._next_page = None - self._prev_page = None - self._iter_event = Event() - - if isinstance(source, IPaginationSource): - src_it = source.getSourceIterator() - if src_it is not None: - self._pages = src_it - - # If we're currently baking, apply the default baker filter - # to exclude things like draft posts. - if (isinstance(source, PageSource) and - source.app.config.get('baker/is_baking')): - setting_name = source.app.config.get('baker/no_bake_setting', - 'draft') - accessor = self._getSettingAccessor() - draft_filter = PaginationFilter(accessor) - draft_filter.root_clause = NotClause() - draft_filter.root_clause.addClause( - IsFilterClause(setting_name, True)) - self._simpleNonSortedWrap( - PaginationFilterIterator, draft_filter) - - # Apply any filter first, before we start sorting or slicing. - if pagination_filter is not None: - self._simpleNonSortedWrap(PaginationFilterIterator, - pagination_filter) - - if sorter is not None: - self._simpleNonSortedWrap(GenericSortIterator, sorter) - self._has_sorter = True - - if offset > 0 or limit > 0: - self.slice(offset, limit) - - self._locked = locked - - @property - def total_count(self): - self._load() - if self._pagination_slicer is not None: - return self._pagination_slicer.inner_count - return len(self._pagesData) - - @property - def next_page(self): - self._load() - return self._next_page - - @property - def prev_page(self): - self._load() - return self._prev_page - - def __len__(self): - self._load() - return len(self._pagesData) - - def __getitem__(self, key): - self._load() - return self._pagesData[key] - - def __iter__(self): - self._load() - self._iter_event.fire() - return iter(self._pagesData) - - def __getattr__(self, name): - if name[:3] == 'is_' or name[:3] == 'in_': - def is_filter(value): - conf = {'is_%s' % name[3:]: value} - accessor = self._getSettingAccessor() - return self._simpleNonSortedWrap(SettingFilterIterator, conf, - accessor) - return is_filter - - if name[:4] == 'has_': - def has_filter(value): - conf = {name: value} - accessor = self._getSettingAccessor() - return self._simpleNonSortedWrap(SettingFilterIterator, conf, - accessor) - return has_filter - - if name[:5] == 'with_': - def has_filter(value): - conf = {'has_%s' % name[5:]: value} - accessor = self._getSettingAccessor() - return self._simpleNonSortedWrap(SettingFilterIterator, conf, - accessor) - return has_filter - - return self.__getattribute__(name) - - def skip(self, count): - return self._simpleWrap(SliceIterator, count) - - def limit(self, count): - return self._simpleWrap(SliceIterator, 0, count) - - def slice(self, skip, limit): - return self._simpleWrap(SliceIterator, skip, limit) - - def filter(self, filter_name): - if self._current_page is None: - raise Exception("Can't use `filter()` because no parent page was " - "set for this page iterator.") - filter_conf = self._current_page.config.get(filter_name) - if filter_conf is None: - raise Exception("Couldn't find filter '%s' in the configuration " - "header for page: %s" % - (filter_name, self._current_page.path)) - accessor = self._getSettingAccessor() - return self._simpleNonSortedWrap(SettingFilterIterator, filter_conf, - accessor) - - def sort(self, setting_name=None, reverse=False): - self._ensureUnlocked() - self._unload() - if setting_name is not None: - accessor = self._getSettingAccessor() - self._pages = SettingSortIterator(self._pages, setting_name, - reverse, accessor) - else: - self._pages = NaturalSortIterator(self._pages, reverse) - self._has_sorter = True - return self - - def reset(self): - self._ensureUnlocked() - self._unload - return self - - @property - def _has_more(self): - self._load() - if self._pagination_slicer: - return self._pagination_slicer.has_more - return False - - def _simpleWrap(self, it_class, *args, **kwargs): - self._ensureUnlocked() - self._unload() - self._ensureSorter() - self._pages = it_class(self._pages, *args, **kwargs) - if self._pagination_slicer is None and it_class is SliceIterator: - self._pagination_slicer = self._pages - self._pagination_slicer.current_page = self._current_page - return self - - def _simpleNonSortedWrap(self, it_class, *args, **kwargs): - self._ensureUnlocked() - self._unload() - self._pages = it_class(self._pages, *args, **kwargs) - return self - - def _getSettingAccessor(self): - accessor = None - if isinstance(self._source, IPaginationSource): - accessor = self._source.getSettingAccessor() - return accessor - - def _ensureUnlocked(self): - if self._locked: - raise Exception( - "This page iterator has been locked, probably because " - "you're trying to tamper with pagination data.") - - def _ensureSorter(self): - if self._has_sorter: - return - if isinstance(self._source, IPaginationSource): - sort_it = self._source.getSorterIterator(self._pages) - if sort_it is not None: - self._pages = sort_it - self._has_sorter = True - - def _unload(self): - self._pagesData = None - self._next_page = None - self._prev_page = None - - def _load(self): - if self._pagesData is not None: - return - - if (self._current_page is not None and - self._current_page.app.env.abort_source_use and - isinstance(self._source, PageSource)): - logger.debug("Aborting iteration from %s." % - self._current_page.ref_spec) - raise AbortedSourceUseError() - - self._ensureSorter() - - it_chain = self._pages - is_pgn_source = False - if isinstance(self._source, IPaginationSource): - is_pgn_source = True - tail_it = self._source.getTailIterator(self._pages) - if tail_it is not None: - it_chain = tail_it - - self._pagesData = list(it_chain) - - if is_pgn_source and self._current_page and self._pagination_slicer: - pn = [self._pagination_slicer.prev_page, - self._pagination_slicer.next_page] - pn_it = self._source.getTailIterator(iter(pn)) - self._prev_page, self._next_page = (list(pn_it)) - - def _debugRenderDoc(self): - return "Contains %d items" % len(self) - diff -r 7a1903ede496 -r 2e5c5d33d62c piecrust/data/linker.py --- a/piecrust/data/linker.py Tue Nov 21 11:00:06 2017 -0800 +++ b/piecrust/data/linker.py Tue Nov 21 22:07:12 2017 -0800 @@ -1,365 +1,134 @@ import logging -import collections -from piecrust.data.iterators import PageIterator -from piecrust.data.pagedata import LazyPageConfigLoaderHasNoValue from piecrust.data.paginationdata import PaginationData -from piecrust.sources.interfaces import IPaginationSource, IListableSource +from piecrust.sources.base import ( + REL_PARENT_GROUP, REL_LOGICAL_PARENT_ITEM, REL_LOGICAl_CHILD_GROUP) logger = logging.getLogger(__name__) -class PageLinkerData(object): - """ Entry template data to get access to related pages from a given - root page. +_unloaded = object() + + +class Linker: + """ A template-exposed data class that lets the user navigate the + logical hierarchy of pages in a page source. """ debug_render = ['parent', 'ancestors', 'siblings', 'children', 'root', 'forpath'] debug_render_invoke = ['parent', 'ancestors', 'siblings', 'children', 'root'] debug_render_redirect = { - 'ancestors': '_debugRenderAncestors', - 'siblings': '_debugRenderSiblings', - 'children': '_debugRenderChildren', - 'root': '_debugRenderRoot'} + 'ancestors': '_debugRenderAncestors', + 'siblings': '_debugRenderSiblings', + 'children': '_debugRenderChildren', + 'root': '_debugRenderRoot'} - def __init__(self, source, page_path): + def __init__(self, source, content_item): self._source = source - self._root_page_path = page_path - self._linker = None - self._is_loaded = False + self._content_item = content_item + + self._parent_group = _unloaded + self._ancestors = None + self._siblings = None + self._children = None @property def parent(self): - self._load() - if self._linker is not None: - return self._linker.parent + a = self.ancestors + if a: + return a[0] return None @property def ancestors(self): - cur = self.parent - while cur: - yield cur - cur = cur.parent + if self._ancestors is None: + src = self._source + app = src.app + + self._ancestors = [] + cur_group = self._getParentGroup() + while cur_group: + pi = src.getRelatedContents(cur_group, + REL_LOGICAL_PARENT_ITEM) + if pi is not None: + pipage = app.getPage(src, pi) + self._ancestors.append(PaginationData(pipage)) + cur_group = src.getRelatedContents( + pi, REL_PARENT_GROUP) + else: + break + return self._ancestors @property def siblings(self): - self._load() - if self._linker is None: - return [] - return self._linker - - @property - def children(self): - self._load() - if self._linker is None: - return [] - self._linker._load() - if self._linker._self_item is None: - return [] - children = self._linker._self_item._linker_info.child_linker - if children is None: - return [] - return children - - @property - def root(self): - self._load() - if self._linker is None: - return None - return self._linker.root - - def forpath(self, rel_path): - self._load() - if self._linker is None: - return None - return self._linker.forpath(rel_path) - - def _load(self): - if self._is_loaded: - return - - self._is_loaded = True - - is_listable = isinstance(self._source, IListableSource) - if not is_listable: - return - - dir_path = self._source.getDirpath(self._root_page_path) - self._linker = Linker(self._source, dir_path, - root_page_path=self._root_page_path) - - def _debugRenderAncestors(self): - return [i.name for i in self.ancestors] - - def _debugRenderSiblings(self): - return [i.name for i in self.siblings] - - def _debugRenderChildren(self): - return [i.name for i in self.children] - - def _debugRenderRoot(self): - r = self.root - if r is not None: - return r.name - return None - - -class LinkedPageData(PaginationData): - """ Class whose instances get returned when iterating on a `Linker` - or `RecursiveLinker`. It's just like what gets usually returned by - `Paginator` and other page iterators, but with a few additional data - like hierarchical data. - """ - debug_render = (['is_dir', 'is_self', 'parent', 'children'] + - PaginationData.debug_render) - debug_render_invoke = (['is_dir', 'is_self', 'parent', 'children'] + - PaginationData.debug_render_invoke) - - def __init__(self, page): - super(LinkedPageData, self).__init__(page) - self.name = page._linker_info.name - self.is_self = page._linker_info.is_self - self.is_dir = page._linker_info.is_dir - self.is_page = True - self._child_linker = page._linker_info.child_linker - - self._mapLoader('*', self._linkerChildLoader) - - @property - def parent(self): - if self._child_linker is not None: - return self._child_linker.parent - return None + src = self._source + app = src.app + for i in self._getAllSiblings(): + if not i.is_group: + ipage = app.getPage(src, i) + ipage_data = PaginationData(ipage) + ipage_data._setValue('is_self', + i.spec == self._content_item.spec) + yield ipage_data @property def children(self): - if self._child_linker is not None: - return self._child_linker - return [] - - def _linkerChildLoader(self, data, name): - if self.children and hasattr(self.children, name): - return getattr(self.children, name) - raise LazyPageConfigLoaderHasNoValue - + src = self._source + app = src.app + for i in self._getAllChildren(): + if not i.is_group: + ipage = app.getPage(src, i) + yield PaginationData(ipage) -class LinkedPageDataBuilderIterator(object): - """ Iterator that builds `LinkedPageData` out of pages. - """ - def __init__(self, it): - self.it = it - - def __iter__(self): - for item in self.it: - yield LinkedPageData(item) - + def forpath(self, path): + # TODO: generalize this for sources that aren't file-system based. + item = self._source.findContentFromSpec({'slug': path}) + return Linker(self._source, item) -class LinkerSource(IPaginationSource): - """ Source iterator that returns pages given by `Linker`. - """ - def __init__(self, pages, orig_source): - self._pages = list(pages) - self._orig_source = None - if isinstance(orig_source, IPaginationSource): - self._orig_source = orig_source - - def getItemsPerPage(self): - raise NotImplementedError() - - def getSourceIterator(self): - return self._pages - - def getSorterIterator(self, it): - # We don't want to sort the pages -- we expect the original source - # to return hierarchical items in the order it wants already. - return None - - def getTailIterator(self, it): - return LinkedPageDataBuilderIterator(it) - - def getPaginationFilter(self, page): - return None - - def getSettingAccessor(self): - if self._orig_source: - return self._orig_source.getSettingAccessor() + def childrenof(self, path): + # TODO: generalize this for sources that aren't file-system based. + src = self._source + app = src.app + group = src.findContentFromSpec(path) + if group is not None: + if not group.is_group: + raise Exception("'%s' is not a folder/group." % path) + for i in src.getContents(group): + if not i.is_group: + ipage = app.getPage(src, i) + yield PaginationData(ipage) return None - -class _LinkerInfo(object): - def __init__(self): - self.name = None - self.is_dir = False - self.is_self = False - self.child_linker = None - - -class _LinkedPage(object): - def __init__(self, page): - self._page = page - self._linker_info = _LinkerInfo() - - def __getattr__(self, name): - return getattr(self._page, name) - - -class Linker(object): - debug_render_doc = """Provides access to sibling and children pages.""" - - def __init__(self, source, dir_path, *, root_page_path=None): - self._source = source - self._dir_path = dir_path - self._root_page_path = root_page_path - self._items = None - self._parent = None - self._self_item = None - - self.is_dir = True - self.is_page = False - self.is_self = False - - def __iter__(self): - return iter(self.pages) - - def __getattr__(self, name): - self._load() - try: - item = self._items[name] - except KeyError: - raise AttributeError() - - if isinstance(item, Linker): - return item - - return LinkedPageData(item) - - def __str__(self): - return self.name - - @property - def name(self): - return self._source.getBasename(self._dir_path) - - @property - def children(self): - return self._iterItems(0) - - @property - def parent(self): - if self._dir_path == '': - return None - - if self._parent is None: - parent_name = self._source.getBasename(self._dir_path) - parent_dir_path = self._source.getDirpath(self._dir_path) - for is_dir, name, data in self._source.listPath(parent_dir_path): - if not is_dir and name == parent_name: - parent_page = data.buildPage() - item = _LinkedPage(parent_page) - item._linker_info.name = parent_name - item._linker_info.child_linker = Linker( - self._source, parent_dir_path, - root_page_path=self._root_page_path) - self._parent = LinkedPageData(item) - break - else: - self._parent = Linker(self._source, parent_dir_path, - root_page_path=self._root_page_path) - - return self._parent - - @property - def pages(self): - return self._iterItems(0, filter_page_items) + def _getAllSiblings(self): + if self._siblings is None: + self._siblings = list(self._source.getContents( + self._getParentGroup())) + return self._siblings - @property - def directories(self): - return self._iterItems(0, filter_directory_items) - - @property - def all(self): - return self._iterItems() - - @property - def allpages(self): - return self._iterItems(-1, filter_page_items) - - @property - def alldirectories(self): - return self._iterItems(-1, filter_directory_items) - - @property - def root(self): - return self.forpath('/') - - def forpath(self, rel_path): - return Linker(self._source, rel_path, - root_page_path=self._root_page_path) - - def _iterItems(self, max_depth=-1, filter_func=None): - items = walk_linkers(self, max_depth=max_depth, - filter_func=filter_func) - src = LinkerSource(items, self._source) - return PageIterator(src) - - def _load(self): - if self._items is not None: - return - - is_listable = isinstance(self._source, IListableSource) - if not is_listable: - raise Exception("Source '%s' can't be listed." % self._source.name) + def _getAllChildren(self): + if self._children is None: + child_group = self._source.getRelatedContents( + self._content_item, REL_LOGICAl_CHILD_GROUP) + if child_group is not None: + self._children = list( + self._source.getContents(child_group)) + else: + self._children = [] + return self._children - items = list(self._source.listPath(self._dir_path)) - self._items = collections.OrderedDict() - for is_dir, name, data in items: - # If `is_dir` is true, `data` will be the directory's source - # path. If not, it will be a page factory. - if is_dir: - item = Linker(self._source, data, - root_page_path=self._root_page_path) - else: - page = data.buildPage() - is_self = (page.rel_path == self._root_page_path) - item = _LinkedPage(page) - item._linker_info.name = name - item._linker_info.is_self = is_self - if is_self: - self._self_item = item + def _getParentGroup(self): + if self._parent_group is _unloaded: + self._parent_group = self._source.getRelatedContents( + self._content_item, REL_PARENT_GROUP) + return self._parent_group - existing = self._items.get(name) - if existing is None: - self._items[name] = item - elif is_dir: - # The current item is a directory. The existing item - # should be a page. - existing._linker_info.child_linker = item - existing._linker_info.is_dir = True - else: - # The current item is a page. The existing item should - # be a directory. - item._linker_info.child_linker = existing - item._linker_info.is_dir = True - self._items[name] = item - + def _debugRenderAncestors(self): + return [i.title for i in self.ancestors] -def filter_page_items(item): - return not isinstance(item, Linker) - - -def filter_directory_items(item): - return isinstance(item, Linker) - + def _debugRenderSiblings(self): + return [i.title for i in self.siblings] -def walk_linkers(linker, depth=0, max_depth=-1, filter_func=None): - linker._load() - for item in linker._items.values(): - if not filter_func or filter_func(item): - yield item + def _debugRenderChildren(self): + return [i.title for i in self.children] - if (isinstance(item, Linker) and - (max_depth < 0 or depth + 1 <= max_depth)): - yield from walk_linkers(item, depth + 1, max_depth) - diff -r 7a1903ede496 -r 2e5c5d33d62c piecrust/data/pagedata.py --- a/piecrust/data/pagedata.py Tue Nov 21 11:00:06 2017 -0800 +++ b/piecrust/data/pagedata.py Tue Nov 21 22:07:12 2017 -0800 @@ -1,6 +1,8 @@ +import copy import time import logging import collections.abc +from piecrust.sources.base import AbortedSourceUseError logger = logging.getLogger(__name__) @@ -69,14 +71,15 @@ loader = self._loaders.get(name) if loader is not None: try: - self._values[name] = loader(self, name) - except LazyPageConfigLoaderHasNoValue: + with self._page.app.env.stats.timerScope('BuildLazyPageData'): + self._values[name] = loader(self, name) + except (LazyPageConfigLoaderHasNoValue, AbortedSourceUseError): raise except Exception as ex: logger.exception(ex) raise Exception( - "Error while loading attribute '%s' for: %s" % - (name, self._page.rel_path)) from ex + "Error while loading attribute '%s' for: %s" % + (name, self._page.content_spec)) from ex # Forget this loader now that it served its purpose. try: @@ -89,18 +92,22 @@ loader = self._loaders.get('*') if loader is not None: try: - self._values[name] = loader(self, name) - except LazyPageConfigLoaderHasNoValue: + with self._page.app.env.stats.timerScope('BuildLazyPageData'): + self._values[name] = loader(self, name) + except (LazyPageConfigLoaderHasNoValue, AbortedSourceUseError): raise except Exception as ex: logger.exception(ex) raise Exception( - "Error while loading attribute '%s' for: %s" % - (name, self._page.rel_path)) from ex + "Error while loading attribute '%s' for: %s" % + (name, self._page.content_spec)) from ex # We always keep the wildcard loader in the loaders list. - return self._values[name] + try: + return self._values[name] + except KeyError: + pass - raise LazyPageConfigLoaderHasNoValue("No such value: %s" % name) + raise LazyPageConfigLoaderHasNoValue() def _setValue(self, name, value): self._values[name] = value @@ -116,12 +123,14 @@ if not override_existing and attr_name in self._loaders: raise Exception( - "A loader has already been mapped for: %s" % attr_name) + "A loader has already been mapped for: %s" % attr_name) self._loaders[attr_name] = loader def _mapValue(self, attr_name, value, override_existing=False): - loader = lambda _, __: value - self._mapLoader(attr_name, loader, override_existing=override_existing) + self._mapLoader( + attr_name, + lambda _, __: value, + override_existing=override_existing) def _ensureLoaded(self): if self._is_loaded: @@ -129,12 +138,13 @@ self._is_loaded = True try: - self._load() + with self._page.app.env.stats.timerScope('BuildLazyPageData'): + self._load() except Exception as ex: logger.exception(ex) raise Exception( - "Error while loading data for: %s" % - self._page.rel_path) from ex + "Error while loading data for: %s" % + self._page.content_spec) from ex def _load(self): pass @@ -152,20 +162,36 @@ """ Template data for a page. """ def __init__(self, page, ctx): - super(PageData, self).__init__(page) + super().__init__(page) self._ctx = ctx def _load(self): + from piecrust.uriutil import split_uri + page = self._page + set_val = self._setValue + + page_url = page.getUri(self._ctx.sub_num) + _, rel_url = split_uri(page.app, page_url) + dt = page.datetime for k, v in page.source_metadata.items(): - self._setValue(k, v) - self._setValue('url', self._ctx.uri) - self._setValue('timestamp', time.mktime(dt.timetuple())) - self._setValue('datetime', { + set_val(k, v) + set_val('url', page_url) + set_val('rel_url', rel_url) + set_val('route', copy.deepcopy(page.source_metadata['route_params'])) + + set_val('timestamp', time.mktime(dt.timetuple())) + set_val('datetime', { 'year': dt.year, 'month': dt.month, 'day': dt.day, 'hour': dt.hour, 'minute': dt.minute, 'second': dt.second}) - date_format = page.app.config.get('site/date_format') - if date_format: - self._setValue('date', page.datetime.strftime(date_format)) + + self._mapLoader('date', _load_date) + +def _load_date(data, name): + page = data._page + date_format = page.app.config.get('site/date_format') + if date_format: + return page.datetime.strftime(date_format) + return None diff -r 7a1903ede496 -r 2e5c5d33d62c piecrust/data/paginationdata.py --- a/piecrust/data/paginationdata.py Tue Nov 21 11:00:06 2017 -0800 +++ b/piecrust/data/paginationdata.py Tue Nov 21 22:07:12 2017 -0800 @@ -1,10 +1,8 @@ import copy import time import logging -from piecrust.data.assetor import Assetor from piecrust.data.pagedata import LazyPageConfigData -from piecrust.routing import create_route_metadata -from piecrust.uriutil import split_uri +from piecrust.sources.base import AbortedSourceUseError logger = logging.getLogger(__name__) @@ -12,89 +10,109 @@ class PaginationData(LazyPageConfigData): def __init__(self, page): - super(PaginationData, self).__init__(page) - self._route = None - self._route_metadata = None - - def _get_uri(self): - page = self._page - if self._route is None: - # TODO: this is not quite correct, as we're missing parts of the - # route metadata if the current page is a taxonomy page. - route_metadata = create_route_metadata(page) - self._route = page.app.getSourceRoute(page.source.name, route_metadata) - self._route_metadata = route_metadata - if self._route is None: - raise Exception("Can't get route for page: %s" % page.path) - return self._route.getUri(self._route_metadata) + super().__init__(page) def _load(self): + from piecrust.uriutil import split_uri + page = self._page - dt = page.datetime - page_url = self._get_uri() + set_val = self._setValue + + page_url = page.getUri() _, rel_url = split_uri(page.app, page_url) - self._setValue('url', page_url) - self._setValue('rel_url', rel_url) - self._setValue('slug', rel_url) # For backwards compatibility - self._setValue('route', copy.deepcopy(self._route_metadata)) - self._setValue( - 'timestamp', - time.mktime(page.datetime.timetuple())) - self._setValue('datetime', { - 'year': dt.year, 'month': dt.month, 'day': dt.day, - 'hour': dt.hour, 'minute': dt.minute, 'second': dt.second}) - date_format = page.app.config.get('site/date_format') - if date_format: - self._setValue('date', page.datetime.strftime(date_format)) - self._setValue('mtime', page.path_mtime) + set_val('url', page_url) + set_val('rel_url', rel_url) + set_val('slug', rel_url) # For backwards compatibility + set_val('route', copy.deepcopy(page.source_metadata['route_params'])) - assetor = page.source.buildAssetor(page, page_url) - self._setValue('assets', assetor) + self._mapLoader('date', _load_date) + self._mapLoader('datetime', _load_datetime) + self._mapLoader('timestamp', _load_timestamp) + self._mapLoader('mtime', _load_content_mtime) + self._mapLoader('assets', _load_assets) segment_names = page.config.get('segments') for name in segment_names: - self._mapLoader(name, self._load_rendered_segment) + self._mapLoader('raw_' + name, _load_raw_segment) + self._mapLoader(name, _load_rendered_segment) + + +def _load_assets(data, name): + from piecrust.data.assetor import Assetor + return Assetor(data._page) + + +def _load_date(data, name): + page = data._page + date_format = page.app.config.get('site/date_format') + if date_format: + return page.datetime.strftime(date_format) + return None + - def _load_rendered_segment(self, data, name): - do_render = True - eis = self._page.app.env.exec_info_stack - if eis is not None and eis.hasPage(self._page): - # This is the pagination data for the page that is currently - # being rendered! Inception! But this is possible... so just - # prevent infinite recursion. - do_render = False +def _load_datetime(data, name): + dt = data._page.datetime + return { + 'year': dt.year, 'month': dt.month, 'day': dt.day, + 'hour': dt.hour, 'minute': dt.minute, 'second': dt.second} + + +def _load_timestamp(data, name): + page = data._page + return time.mktime(page.datetime.timetuple()) + - assert self is data +def _load_content_mtime(data, name): + return data._page.content_mtime + + +def _load_raw_segment(data, name): + page = data._page + return page.getSegment(name[4:]) + + +def _load_rendered_segment(data, name): + page = data._page - if do_render: - uri = self._get_uri() - try: - from piecrust.rendering import ( - QualifiedPage, PageRenderingContext, - render_page_segments) - qp = QualifiedPage(self._page, self._route, - self._route_metadata) - ctx = PageRenderingContext(qp) - render_result = render_page_segments(ctx) - segs = render_result.segments - except Exception as ex: - logger.exception(ex) - raise Exception( - "Error rendering segments for '%s'" % uri) from ex - else: - segs = {} - for name in self._page.config.get('segments'): - segs[name] = "" + do_render = True + stack = page.app.env.render_ctx_stack + if stack.hasPage(page): + # This is the pagination data for the page that is currently + # being rendered! Inception! But this is possible... so just + # prevent infinite recursion. + do_render = False - for k, v in segs.items(): - self._unmapLoader(k) - self._setValue(k, v) + if do_render: + uri = page.getUri() + try: + from piecrust.rendering import ( + RenderingContext, render_page_segments) + ctx = RenderingContext(page) + render_result = render_page_segments(ctx) + segs = render_result.segments + except AbortedSourceUseError: + raise + except Exception as ex: + logger.exception(ex) + raise Exception( + "Error rendering segments for '%s'" % uri) from ex + else: + segs = {} + for name in page.config.get('segments'): + segs[name] = "" - if 'content.abstract' in segs: - self._setValue('content', segs['content.abstract']) - self._setValue('has_more', True) - if name == 'content': - return segs['content.abstract'] + unmap_loader = data._unmapLoader + set_val = data._setValue + + for k, v in segs.items(): + unmap_loader(k) + set_val(k, v) - return segs[name] + if 'content.abstract' in segs: + set_val('content', segs['content.abstract']) + set_val('has_more', True) + if name == 'content': + return segs['content.abstract'] + return segs[name] + diff -r 7a1903ede496 -r 2e5c5d33d62c piecrust/data/paginator.py --- a/piecrust/data/paginator.py Tue Nov 21 11:00:06 2017 -0800 +++ b/piecrust/data/paginator.py Tue Nov 21 22:07:12 2017 -0800 @@ -1,9 +1,7 @@ import math import logging from werkzeug.utils import cached_property -from piecrust.data.filters import PaginationFilter, page_value_accessor -from piecrust.data.iterators import PageIterator -from piecrust.sources.interfaces import IPaginationSource +from piecrust.sources.base import ContentSource logger = logging.getLogger(__name__) @@ -11,27 +9,28 @@ class Paginator(object): debug_render = [ - 'has_more', 'items', 'has_items', 'items_per_page', - 'items_this_page', 'prev_page_number', 'this_page_number', - 'next_page_number', 'prev_page', 'next_page', - 'total_item_count', 'total_page_count', - 'next_item', 'prev_item'] + 'has_more', 'items', 'has_items', 'items_per_page', + 'items_this_page', 'prev_page_number', 'this_page_number', + 'next_page_number', 'prev_page', 'next_page', + 'total_item_count', 'total_page_count', + 'next_item', 'prev_item'] debug_render_invoke = [ - 'has_more', 'items', 'has_items', 'items_per_page', - 'items_this_page', 'prev_page_number', 'this_page_number', - 'next_page_number', 'prev_page', 'next_page', - 'total_item_count', 'total_page_count', - 'next_item', 'prev_item'] + 'has_more', 'items', 'has_items', 'items_per_page', + 'items_this_page', 'prev_page_number', 'this_page_number', + 'next_page_number', 'prev_page', 'next_page', + 'total_item_count', 'total_page_count', + 'next_item', 'prev_item'] - def __init__(self, qualified_page, source, *, - page_num=1, pgn_filter=None, items_per_page=-1): - self._parent_page = qualified_page + def __init__(self, source, current_page, sub_num, *, + pgn_filter=None, items_per_page=-1): self._source = source - self._page_num = page_num + self._page = current_page + self._sub_num = sub_num self._iterator = None self._pgn_filter = pgn_filter self._items_per_page = items_per_page self._pgn_set_on_ctx = False + self._is_content_source = isinstance(source, ContentSource) @property def is_loaded(self): @@ -88,12 +87,17 @@ def items_per_page(self): if self._items_per_page > 0: return self._items_per_page - if self._parent_page: - ipp = self._parent_page.config.get('items_per_page') + + if self._page is not None: + ipp = self._page.config.get('items_per_page') if ipp is not None: return ipp - if isinstance(self._source, IPaginationSource): - return self._source.getItemsPerPage() + + if self._is_content_source: + ipp = self._source.config.get('items_per_page') + if ipp is not None: + return ipp + raise Exception("No way to figure out how many items to display " "per page.") @@ -104,19 +108,19 @@ @property def prev_page_number(self): - if self._page_num > 1: - return self._page_num - 1 + if self._sub_num > 1: + return self._sub_num - 1 return None @property def this_page_number(self): - return self._page_num + return self._sub_num @property def next_page_number(self): self._load() if self._iterator._has_more: - return self._page_num + 1 + return self._sub_num + 1 return None @property @@ -128,7 +132,7 @@ @property def this_page(self): - return self._getPageUri(self._page_num) + return self._getPageUri(self._sub_num) @property def next_page(self): @@ -166,8 +170,8 @@ if radius <= 0 or total_page_count < (2 * radius + 1): return list(range(1, total_page_count + 1)) - first_num = self._page_num - radius - last_num = self._page_num + radius + first_num = self._sub_num - radius + last_num = self._sub_num + radius if first_num <= 0: last_num += 1 - first_num first_num = 1 @@ -185,42 +189,36 @@ if self._iterator is not None: return - if self._source is None: - raise Exception("Can't load pagination data: no source has " - "been defined.") + from piecrust.data.filters import PaginationFilter + from piecrust.dataproviders.pageiterator import ( + PageIterator, HardCodedFilterIterator) - pag_filter = self._getPaginationFilter() - offset = (self._page_num - 1) * self.items_per_page - current_page = None - if self._parent_page: - current_page = self._parent_page.page self._iterator = PageIterator( - self._source, - current_page=current_page, - pagination_filter=pag_filter, - offset=offset, limit=self.items_per_page, - locked=True) - self._iterator._iter_event += self._onIteration - - def _getPaginationFilter(self): - f = PaginationFilter(value_accessor=page_value_accessor) + self._source, + current_page=self._page) if self._pgn_filter is not None: - f.addClause(self._pgn_filter.root_clause) + pag_fil = PaginationFilter() + pag_fil.addClause(self._pgn_filter.root_clause) + self._iterator._simpleNonSortedWrap( + HardCodedFilterIterator, pag_fil) - if self._parent_page and isinstance(self._source, IPaginationSource): - sf = self._source.getPaginationFilter(self._parent_page) - if sf is not None: - f.addClause(sf.root_clause) + offset = (self._sub_num - 1) * self.items_per_page + limit = self.items_per_page + self._iterator.slice(offset, limit) - return f + if self._is_content_source: + self._iterator._iter_event += self._onIteration + + self._iterator._lockIterator() def _getPageUri(self, index): - return self._parent_page.getUri(index) + return self._page.getUri(index) - def _onIteration(self): - if self._parent_page is not None and not self._pgn_set_on_ctx: - eis = self._parent_page.app.env.exec_info_stack - eis.current_page_info.render_ctx.setPagination(self) - self._pgn_set_on_ctx = True + def _onIteration(self, it): + if not self._pgn_set_on_ctx: + rcs = self._source.app.env.render_ctx_stack + if rcs.current_ctx is not None: + rcs.current_ctx.setPagination(self) + self._pgn_set_on_ctx = True diff -r 7a1903ede496 -r 2e5c5d33d62c piecrust/data/piecrustdata.py --- a/piecrust/data/piecrustdata.py Tue Nov 21 11:00:06 2017 -0800 +++ b/piecrust/data/piecrustdata.py Tue Nov 21 22:07:12 2017 -0800 @@ -15,7 +15,7 @@ self.version = APP_VERSION self.url = 'http://bolt80.com/piecrust/' self.branding = 'Baked with PieCrust %s.' % ( - 'http://bolt80.com/piecrust/', APP_VERSION) + 'http://bolt80.com/piecrust/', APP_VERSION) self._page = None @property diff -r 7a1903ede496 -r 2e5c5d33d62c piecrust/data/provider.py --- a/piecrust/data/provider.py Tue Nov 21 11:00:06 2017 -0800 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,274 +0,0 @@ -import time -import collections.abc -from piecrust.configuration import ConfigurationError -from piecrust.data.iterators import PageIterator -from piecrust.generation.taxonomy import Taxonomy -from piecrust.sources.array import ArraySource - - -def get_data_provider_class(app, provider_type): - if not provider_type: - raise Exception("No data provider type specified.") - for prov in app.plugin_loader.getDataProviders(): - if prov.PROVIDER_NAME == provider_type: - return prov - raise ConfigurationError( - "Unknown data provider type: %s" % provider_type) - - -class DataProvider(object): - debug_render_dynamic = [] - debug_render_invoke_dynamic = [] - - def __init__(self, source, page, override): - if source.app is not page.app: - raise Exception("The given source and page don't belong to " - "the same application.") - self._source = source - self._page = page - - -class IteratorDataProvider(DataProvider): - PROVIDER_NAME = 'iterator' - - debug_render_doc_dynamic = ['_debugRenderDoc'] - debug_render_not_empty = True - - def __init__(self, source, page, override): - super(IteratorDataProvider, self).__init__(source, page, override) - - self._innerIt = None - if isinstance(override, IteratorDataProvider): - # Iterator providers can be chained, like for instance with - # `site.pages` listing both the theme pages and the user site's - # pages. - self._innerIt = override - - self._pages = PageIterator(source, current_page=page) - self._pages._iter_event += self._onIteration - self._ctx_set = False - - def __len__(self): - return len(self._pages) - - def __getitem__(self, key): - return self._pages[key] - - def __iter__(self): - yield from iter(self._pages) - if self._innerIt: - yield from self._innerIt - - def _onIteration(self): - if not self._ctx_set: - eis = self._page.app.env.exec_info_stack - eis.current_page_info.render_ctx.addUsedSource(self._source.name) - self._ctx_set = True - - def _debugRenderDoc(self): - return 'Provides a list of %d items' % len(self) - - -class BlogDataProvider(DataProvider, collections.abc.Mapping): - PROVIDER_NAME = 'blog' - - debug_render_doc = """Provides a list of blog posts and yearly/monthly - archives.""" - debug_render_dynamic = (['_debugRenderTaxonomies'] + - DataProvider.debug_render_dynamic) - - def __init__(self, source, page, override): - super(BlogDataProvider, self).__init__(source, page, override) - self._yearly = None - self._monthly = None - self._taxonomies = {} - self._ctx_set = False - - @property - def posts(self): - return self._posts() - - @property - def years(self): - return self._buildYearlyArchive() - - @property - def months(self): - return self._buildMonthlyArchive() - - def __getitem__(self, name): - if name == 'posts': - return self._posts() - elif name == 'years': - return self._buildYearlyArchive() - elif name == 'months': - return self._buildMonthlyArchive() - - if self._source.app.config.get('site/taxonomies/' + name) is not None: - return self._buildTaxonomy(name) - - raise KeyError("No such item: %s" % name) - - def __iter__(self): - keys = ['posts', 'years', 'months'] - keys += list(self._source.app.config.get('site/taxonomies').keys()) - return iter(keys) - - def __len__(self): - return 3 + len(self._source.app.config.get('site/taxonomies')) - - def _debugRenderTaxonomies(self): - return list(self._source.app.config.get('site/taxonomies').keys()) - - def _posts(self): - it = PageIterator(self._source, current_page=self._page) - it._iter_event += self._onIteration - return it - - def _buildYearlyArchive(self): - if self._yearly is not None: - return self._yearly - - self._yearly = [] - yearly_index = {} - for post in self._source.getPages(): - year = post.datetime.strftime('%Y') - - posts_this_year = yearly_index.get(year) - if posts_this_year is None: - timestamp = time.mktime( - (post.datetime.year, 1, 1, 0, 0, 0, 0, 0, -1)) - posts_this_year = BlogArchiveEntry(self._page, year, timestamp) - self._yearly.append(posts_this_year) - yearly_index[year] = posts_this_year - - posts_this_year._data_source.append(post) - self._yearly = sorted(self._yearly, - key=lambda e: e.timestamp, - reverse=True) - self._onIteration() - return self._yearly - - def _buildMonthlyArchive(self): - if self._monthly is not None: - return self._monthly - - self._monthly = [] - for post in self._source.getPages(): - month = post.datetime.strftime('%B %Y') - - posts_this_month = next( - filter(lambda m: m.name == month, self._monthly), - None) - if posts_this_month is None: - timestamp = time.mktime( - (post.datetime.year, post.datetime.month, 1, - 0, 0, 0, 0, 0, -1)) - posts_this_month = BlogArchiveEntry(self._page, month, timestamp) - self._monthly.append(posts_this_month) - - posts_this_month._data_source.append(post) - self._monthly = sorted(self._monthly, - key=lambda e: e.timestamp, - reverse=True) - self._onIteration() - return self._monthly - - def _buildTaxonomy(self, tax_name): - if tax_name in self._taxonomies: - return self._taxonomies[tax_name] - - tax_cfg = self._page.app.config.get('site/taxonomies/' + tax_name) - tax = Taxonomy(tax_name, tax_cfg) - - posts_by_tax_value = {} - for post in self._source.getPages(): - tax_values = post.config.get(tax.setting_name) - if tax_values is None: - continue - if not isinstance(tax_values, list): - tax_values = [tax_values] - for val in tax_values: - posts = posts_by_tax_value.setdefault(val, []) - posts.append(post) - - entries = [] - for value, ds in posts_by_tax_value.items(): - source = ArraySource(self._page.app, ds) - entries.append(BlogTaxonomyEntry(self._page, source, value)) - self._taxonomies[tax_name] = sorted(entries, key=lambda k: k.name) - - self._onIteration() - return self._taxonomies[tax_name] - - def _onIteration(self): - if not self._ctx_set: - eis = self._page.app.env.exec_info_stack - if eis.current_page_info: - eis.current_page_info.render_ctx.addUsedSource(self._source) - self._ctx_set = True - - -class BlogArchiveEntry(object): - debug_render = ['name', 'timestamp', 'posts'] - debug_render_invoke = ['name', 'timestamp', 'posts'] - - def __init__(self, page, name, timestamp): - self.name = name - self.timestamp = timestamp - self._page = page - self._data_source = [] - self._iterator = None - - def __str__(self): - return self.name - - def __int__(self): - return int(self.name) - - @property - def posts(self): - self._load() - self._iterator.reset() - return self._iterator - - def _load(self): - if self._iterator is not None: - return - source = ArraySource(self._page.app, self._data_source) - self._iterator = PageIterator(source, current_page=self._page) - - -class BlogTaxonomyEntry(object): - debug_render = ['name', 'post_count', 'posts'] - debug_render_invoke = ['name', 'post_count', 'posts'] - - def __init__(self, page, source, property_value): - self._page = page - self._source = source - self._property_value = property_value - self._iterator = None - - def __str__(self): - return self._property_value - - @property - def name(self): - return self._property_value - - @property - def posts(self): - self._load() - self._iterator.reset() - return self._iterator - - @property - def post_count(self): - return self._source.page_count - - def _load(self): - if self._iterator is not None: - return - - self._iterator = PageIterator(self._source, current_page=self._page) - diff -r 7a1903ede496 -r 2e5c5d33d62c piecrust/data/providersdata.py --- a/piecrust/data/providersdata.py Tue Nov 21 11:00:06 2017 -0800 +++ b/piecrust/data/providersdata.py Tue Nov 21 22:07:12 2017 -0800 @@ -1,5 +1,8 @@ import re import collections.abc +from piecrust.configuration import ConfigurationError +from piecrust.dataproviders.base import ( + DataProvider, build_data_provider) re_endpoint_sep = re.compile(r'[\/\.]') @@ -27,15 +30,36 @@ return self._dict = {} - for source in self._page.app.sources + self._page.app.generators: - if source.data_endpoint: - endpoint_bits = re_endpoint_sep.split(source.data_endpoint) - endpoint = self._dict - for e in endpoint_bits[:-1]: - if e not in endpoint: - endpoint[e] = {} - endpoint = endpoint[e] - override = endpoint.get(endpoint_bits[-1]) - provider = source.buildDataProvider(self._page, override) - if provider is not None: - endpoint[endpoint_bits[-1]] = provider + for source in self._page.app.sources: + pname = source.config.get('data_type') or 'page_iterator' + pendpoint = source.config.get('data_endpoint') + if not pname or not pendpoint: + continue + + endpoint_bits = re_endpoint_sep.split(pendpoint) + endpoint = self._dict + for e in endpoint_bits[:-1]: + if e not in endpoint: + endpoint[e] = {} + endpoint = endpoint[e] + existing = endpoint.get(endpoint_bits[-1]) + + if existing is None: + provider = build_data_provider(pname, source, self._page) + endpoint[endpoint_bits[-1]] = provider + elif isinstance(existing, DataProvider): + existing_source = existing._sources[0] + if (existing.PROVIDER_NAME != pname or + existing_source.SOURCE_NAME != source.SOURCE_NAME): + raise ConfigurationError( + "Can't combine data providers '%s' and '%' " + "(using sources '%s' and '%s') " + "on endpoint '%s'." % + (existing.PROVIDER_NAME, pname, + existing_source.SOURCE_NAME, source.SOURCE_NAME, + pendpoint)) + existing._addSource(source) + else: + raise ConfigurationError( + "Endpoint '%s' can't be used for a data provider because " + "it's already used for something else." % pendpoint) diff -r 7a1903ede496 -r 2e5c5d33d62c piecrust/dataproviders/__init__.py diff -r 7a1903ede496 -r 2e5c5d33d62c piecrust/dataproviders/base.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/piecrust/dataproviders/base.py Tue Nov 21 22:07:12 2017 -0800 @@ -0,0 +1,34 @@ +from piecrust.configuration import ConfigurationError + + +class DataProvider: + """ The base class for a data provider. + """ + PROVIDER_NAME = None + + debug_render_dynamic = [] + debug_render_invoke_dynamic = [] + + def __init__(self, source, page): + self._sources = [source] + self._page = page + self._app = source.app + + def _addSource(self, source): + self._sources.append(source) + + +def build_data_provider(provider_type, source, page): + if not provider_type: + raise Exception("No data provider type specified.") + + for p in page.app.plugin_loader.getDataProviders(): + if p.PROVIDER_NAME == provider_type: + pclass = p + break + else: + raise ConfigurationError("Unknown data provider type: %s" % + provider_type) + + return pclass(source, page) + diff -r 7a1903ede496 -r 2e5c5d33d62c piecrust/dataproviders/blog.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/piecrust/dataproviders/blog.py Tue Nov 21 22:07:12 2017 -0800 @@ -0,0 +1,229 @@ +import time +import collections.abc +from piecrust.dataproviders.base import DataProvider +from piecrust.dataproviders.pageiterator import PageIterator +from piecrust.sources.list import ListSource +from piecrust.sources.taxonomy import Taxonomy + + +class BlogDataProvider(DataProvider, collections.abc.Mapping): + PROVIDER_NAME = 'blog' + + debug_render_doc = """Provides a list of blog posts and yearly/monthly + archives.""" + debug_render_dynamic = (['_debugRenderTaxonomies'] + + DataProvider.debug_render_dynamic) + + def __init__(self, source, page): + super().__init__(source, page) + self._posts = None + self._yearly = None + self._monthly = None + self._taxonomies = {} + self._archives_built = False + self._ctx_set = False + + def _addSource(self, source): + raise Exception("The blog data provider doesn't support " + "combining multiple sources.") + + @property + def posts(self): + self._buildPosts() + return self._posts + + @property + def years(self): + self._buildArchives() + return self._yearly + + @property + def months(self): + self._buildArchives() + return self._montly + + def __getitem__(self, name): + self._buildArchives() + return self._taxonomies[name] + + def __getattr__(self, name): + self._buildArchives() + try: + return self._taxonomies[name] + except KeyError: + raise AttributeError("No such taxonomy: %s" % name) + + def __iter__(self): + self._buildPosts() + self._buildArchives() + return ['posts', 'years', 'months'] + list( + sorted(self._taxonomies.keys())) + + def __len__(self): + self._buildPosts() + self._buildArchives() + return 3 + len(self._taxonomies) + + def _debugRenderTaxonomies(self): + return list(self._app.config.get('site/taxonomies').keys()) + + def _buildPosts(self): + if self._posts is None: + it = PageIterator(self._sources[0], current_page=self._page) + it._load_event += self._onIteration + self._posts = it + + def _buildArchives(self): + if self._archives_built: + return + + yearly_index = {} + monthly_index = {} + tax_index = {} + + taxonomies = [] + tax_names = list(self._app.config.get('site/taxonomies').keys()) + for tn in tax_names: + tax_cfg = self._app.config.get('site/taxonomies/' + tn) + taxonomies.append(Taxonomy(tn, tax_cfg)) + tax_index[tn] = {} + + page = self._page + source = self._sources[0] + + for post in source.getAllPages(): + post_dt = post.datetime + + year = post_dt.year + month = (post_dt.month, post_dt.year) + + posts_this_year = yearly_index.get(year) + if posts_this_year is None: + timestamp = time.mktime( + (post_dt.year, 1, 1, 0, 0, 0, 0, 0, -1)) + posts_this_year = BlogArchiveEntry( + source, page, year, timestamp) + yearly_index[year] = posts_this_year + posts_this_year._items.append(post.content_item) + + posts_this_month = monthly_index.get(month) + if posts_this_month is None: + timestamp = time.mktime( + (post_dt.year, post_dt.month, 1, + 0, 0, 0, 0, 0, -1)) + posts_this_month = BlogArchiveEntry( + source, page, month[0], timestamp) + monthly_index[month] = posts_this_month + posts_this_month._items.append(post.content_item) + + for tax in taxonomies: + post_term = post.config.get(tax.setting_name) + if post_term is None: + continue + + posts_this_tax = tax_index[tax.name] + if tax.is_multiple: + for val in post_term: + entry = posts_this_tax.get(val) + if entry is None: + entry = BlogTaxonomyEntry(source, page, val) + posts_this_tax[val] = entry + entry._items.append(post.content_item) + else: + entry = posts_this_tax.get(val) + if entry is None: + entry = BlogTaxonomyEntry(source, page, post_term) + posts_this_tax[val] = entry + entry._items.append(post.content_item) + + self._yearly = list(sorted( + yearly_index.values(), + key=lambda e: e.timestamp, reverse=True)) + self._monthly = list(sorted( + monthly_index.values(), + key=lambda e: e.timestamp, reverse=True)) + + self._taxonomies = {} + for tax_name, entries in tax_index.items(): + self._taxonomies[tax_name] = list( + sorted(entries.values(), key=lambda i: i.term)) + + self._onIteration(None) + + self._archives_built = True + + def _onIteration(self, it): + if not self._ctx_set: + rcs = self._app.env.render_ctx_stack + if rcs.current_ctx: + rcs.current_ctx.addUsedSource(self._sources[0]) + self._ctx_set = True + + +class BlogArchiveEntry: + debug_render = ['name', 'timestamp', 'posts'] + debug_render_invoke = ['name', 'timestamp', 'posts'] + + def __init__(self, source, page, name, timestamp): + self.name = name + self.timestamp = timestamp + self._source = source + self._page = page + self._items = [] + self._iterator = None + + def __str__(self): + return str(self.name) + + def __int__(self): + return int(self.name) + + @property + def posts(self): + self._load() + self._iterator.reset() + return self._iterator + + def _load(self): + if self._iterator is not None: + return + + src = ListSource(self._source, self._items) + self._iterator = PageIterator(src, current_page=self._page) + + +class BlogTaxonomyEntry: + debug_render = ['name', 'post_count', 'posts'] + debug_render_invoke = ['name', 'post_count', 'posts'] + + def __init__(self, source, page, term): + self.term = term + self._source = source + self._page = page + self._items = [] + self._iterator = None + + def __str__(self): + return self.term + + @property + def name(self): + return self.term + + @property + def posts(self): + self._load() + self._iterator.reset() + return self._iterator + + @property + def post_count(self): + return len(self._items) + + def _load(self): + if self._iterator is not None: + return + + src = ListSource(self._source, self._items) + self._iterator = PageIterator(src, current_page=self._page) + diff -r 7a1903ede496 -r 2e5c5d33d62c piecrust/dataproviders/pageiterator.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/piecrust/dataproviders/pageiterator.py Tue Nov 21 22:07:12 2017 -0800 @@ -0,0 +1,445 @@ +import logging +from piecrust.data.filters import PaginationFilter +from piecrust.data.paginationdata import PaginationData +from piecrust.events import Event +from piecrust.dataproviders.base import DataProvider +from piecrust.sources.base import ContentSource + + +logger = logging.getLogger(__name__) + + +class _CombinedSource: + def __init__(self, sources): + self.sources = sources + self.app = sources[0].app + self.name = None + + # This is for recursive traversal of the iterator chain. + # See later in `PageIterator`. + self.it = None + + def __iter__(self): + sources = self.sources + + if len(sources) == 1: + source = sources[0] + self.name = source.name + yield from source.getAllPages() + self.name = None + return + + # Return the pages from all the combined sources, but skip + # those that are "overridden" -- e.g. a theme page that gets + # replaced by a user page of the same name. + used_uris = set() + for source in sources: + self.name = source.name + for page in source.getAllPages(): + page_uri = page.getUri() + if page_uri not in used_uris: + used_uris.add(page_uri) + yield page + + self.name = None + + +class PageIteratorDataProvider(DataProvider): + """ A data provider that reads a content source as a list of pages. + + This class supports wrapping another `PageIteratorDataProvider` + instance because several sources may want to be merged under the + same data endpoint (e.g. `site.pages` which lists both the user + pages and the theme pages). + """ + PROVIDER_NAME = 'page_iterator' + + debug_render_doc_dynamic = ['_debugRenderDoc'] + debug_render_not_empty = True + + def __init__(self, source, page): + super().__init__(source, page) + self._app = source.app + self._it = None + self._iterated = False + + def __len__(self): + self._load() + return len(self._it) + + def __iter__(self): + self._load() + yield from self._it + + def _load(self): + if self._it is not None: + return + + combined_source = _CombinedSource(list(reversed(self._sources))) + self._it = PageIterator(combined_source, current_page=self._page) + self._it._load_event += self._onIteration + + def _onIteration(self, it): + if not self._iterated: + rcs = self._app.env.render_ctx_stack + rcs.current_ctx.addUsedSource(it._source) + self._iterated = True + + def _addSource(self, source): + if self._it is not None: + raise Exception("Can't add sources after the data provider " + "has been loaded.") + super()._addSource(source) + + def _debugRenderDoc(self): + return 'Provides a list of %d items' % len(self) + + +class PageIterator: + def __init__(self, source, *, current_page=None): + self._source = source + self._is_content_source = isinstance( + source, (ContentSource, _CombinedSource)) + self._cache = None + self._pagination_slicer = None + self._has_sorter = False + self._next_page = None + self._prev_page = None + self._locked = False + self._load_event = Event() + self._iter_event = Event() + self._current_page = current_page + self._initIterator() + + @property + def total_count(self): + self._load() + if self._pagination_slicer is not None: + return self._pagination_slicer.inner_count + return len(self._cache) + + @property + def next_page(self): + self._load() + return self._next_page + + @property + def prev_page(self): + self._load() + return self._prev_page + + def __len__(self): + self._load() + return len(self._cache) + + def __getitem__(self, key): + self._load() + return self._cache[key] + + def __iter__(self): + self._load() + self._iter_event.fire(self) + return iter(self._cache) + + def __getattr__(self, name): + if name[:3] == 'is_' or name[:3] == 'in_': + def is_filter(value): + conf = {'is_%s' % name[3:]: value} + return self._simpleNonSortedWrap(SettingFilterIterator, conf) + return is_filter + + if name[:4] == 'has_': + def has_filter(value): + conf = {name: value} + return self._simpleNonSortedWrap(SettingFilterIterator, conf) + return has_filter + + if name[:5] == 'with_': + def has_filter(value): + conf = {'has_%s' % name[5:]: value} + return self._simpleNonSortedWrap(SettingFilterIterator, conf) + return has_filter + + return self.__getattribute__(name) + + def skip(self, count): + return self._simpleWrap(SliceIterator, count) + + def limit(self, count): + return self._simpleWrap(SliceIterator, 0, count) + + def slice(self, skip, limit): + return self._simpleWrap(SliceIterator, skip, limit) + + def filter(self, filter_name): + if self._current_page is None: + raise Exception("Can't use `filter()` because no parent page was " + "set for this page iterator.") + filter_conf = self._current_page.config.get(filter_name) + if filter_conf is None: + raise Exception("Couldn't find filter '%s' in the configuration " + "header for page: %s" % + (filter_name, self._current_page.path)) + return self._simpleNonSortedWrap(SettingFilterIterator, filter_conf) + + def sort(self, setting_name=None, reverse=False): + if setting_name: + self._wrapAsSort(SettingSortIterator, setting_name, reverse) + else: + self._wrapAsSort(NaturalSortIterator, reverse) + return self + + def reset(self): + self._ensureUnlocked() + self._unload() + return self + + @property + def _is_loaded(self): + return self._cache is not None + + @property + def _has_more(self): + self._load() + if self._pagination_slicer: + return self._pagination_slicer.has_more + return False + + def _simpleWrap(self, it_class, *args, **kwargs): + self._ensureUnlocked() + self._ensureUnloaded() + self._ensureSorter() + self._it = it_class(self._it, *args, **kwargs) + if self._pagination_slicer is None and it_class is SliceIterator: + self._pagination_slicer = self._it + self._pagination_slicer.current_page = self._current_page + return self + + def _simpleNonSortedWrap(self, it_class, *args, **kwargs): + self._ensureUnlocked() + self._ensureUnloaded() + self._it = it_class(self._it, *args, **kwargs) + return self + + def _wrapAsSort(self, sort_it_class, *args, **kwargs): + self._ensureUnlocked() + self._ensureUnloaded() + self._it = sort_it_class(self._it, *args, **kwargs) + self._has_sorter = True + return self + + def _lockIterator(self): + self._ensureUnlocked() + self._locked = True + + def _ensureUnlocked(self): + if self._locked: + raise Exception( + "This page iterator has been locked and can't be modified.") + + def _ensureUnloaded(self): + if self._cache: + raise Exception( + "This page iterator has already been iterated upon and " + "can't be modified anymore.") + + def _ensureSorter(self): + if self._has_sorter: + return + if self._is_content_source: + # For content sources, the default sorting is reverse + # date/time sorting. + self._it = DateSortIterator(self._it, reverse=True) + self._has_sorter = True + + def _initIterator(self): + if self._is_content_source: + if isinstance(self._source, _CombinedSource): + self._it = self._source + else: + self._it = PageContentSourceIterator(self._source) + + app = self._source.app + if app.config.get('baker/is_baking'): + # While baking, automatically exclude any page with + # the `draft` setting. + draft_setting = app.config['baker/no_bake_setting'] + self._it = NoDraftsIterator(self._it, draft_setting) + else: + self._it = GenericSourceIterator(self._source) + + def _unload(self): + self._initIterator() + self._cache = None + self._paginationSlicer = None + self._has_sorter = False + self._next_page = None + self._prev_page = None + + def _load(self): + if self._cache is not None: + return + + self._ensureSorter() + + if self._is_content_source: + self._it = PaginationDataBuilderIterator(self._it) + + self._cache = list(self._it) + + if (self._current_page is not None and + self._pagination_slicer is not None): + pn = [self._pagination_slicer.prev_page, + self._pagination_slicer.next_page] + pn_it = PaginationDataBuilderIterator(iter(pn)) + self._prev_page, self._next_page = (list(pn_it)) + + self._load_event.fire(self) + + def _debugRenderDoc(self): + return "Contains %d items" % len(self) + + +class SettingFilterIterator: + def __init__(self, it, fil_conf): + self.it = it + self.fil_conf = fil_conf + self._fil = None + + def __iter__(self): + if self._fil is None: + self._fil = PaginationFilter() + self._fil.addClausesFromConfig(self.fil_conf) + + for i in self.it: + if self._fil.pageMatches(i): + yield i + + +class HardCodedFilterIterator: + def __init__(self, it, fil): + self.it = it + self._fil = fil + + def __iter__(self): + for i in self.it: + if self._fil.pageMatches(i): + yield i + + +class SliceIterator: + def __init__(self, it, offset=0, limit=-1): + self.it = it + self.offset = offset + self.limit = limit + self.current_page = None + self.has_more = False + self.inner_count = -1 + self.next_page = None + self.prev_page = None + self._cache = None + + def __iter__(self): + if self._cache is None: + inner_list = list(self.it) + self.inner_count = len(inner_list) + + if self.limit > 0: + self.has_more = self.inner_count > (self.offset + self.limit) + self._cache = inner_list[self.offset:self.offset + self.limit] + else: + self.has_more = False + self._cache = inner_list[self.offset:] + + if self.current_page: + try: + idx = inner_list.index(self.current_page) + except ValueError: + idx = -1 + if idx >= 0: + if idx < self.inner_count - 1: + self.next_page = inner_list[idx + 1] + if idx > 0: + self.prev_page = inner_list[idx - 1] + + return iter(self._cache) + + +class NaturalSortIterator: + def __init__(self, it, reverse=False): + self.it = it + self.reverse = reverse + + def __iter__(self): + return iter(sorted(self.it, reverse=self.reverse)) + + +class SettingSortIterator: + def __init__(self, it, name, reverse=False): + self.it = it + self.name = name + self.reverse = reverse + + def __iter__(self): + return iter(sorted(self.it, key=self._key_getter, + reverse=self.reverse)) + + def _key_getter(self, item): + key = item.config.get(self.name) + if key is None: + return 0 + return key + + +class DateSortIterator: + def __init__(self, it, reverse=True): + self.it = it + self.reverse = reverse + + def __iter__(self): + return iter(sorted(self.it, + key=lambda x: x.datetime, reverse=self.reverse)) + + +class PageContentSourceIterator: + def __init__(self, source): + self.source = source + + # This is to permit recursive traversal of the + # iterator chain. It acts as the end. + self.it = None + + def __iter__(self): + source = self.source + yield from source.getAllPages() + + +class NoDraftsIterator: + def __init__(self, source, no_draft_setting): + self.it = source + self.no_draft_setting = no_draft_setting + + def __iter__(self): + nds = self.no_draft_setting + yield from filter(lambda i: not i.config.get(nds), self.it) + + +class PaginationDataBuilderIterator: + def __init__(self, it): + self.it = it + + def __iter__(self): + for page in self.it: + if page is not None: + yield PaginationData(page) + else: + yield None + + +class GenericSourceIterator: + def __init__(self, source): + self.source = source + self.it = None + + def __iter__(self): + yield from self.source diff -r 7a1903ede496 -r 2e5c5d33d62c piecrust/environment.py --- a/piecrust/environment.py Tue Nov 21 11:00:06 2017 -0800 +++ b/piecrust/environment.py Tue Nov 21 22:07:12 2017 -0800 @@ -1,68 +1,23 @@ import time import logging import contextlib -from piecrust.cache import MemCache logger = logging.getLogger(__name__) -class AbortedSourceUseError(Exception): - pass - - -class ExecutionInfo(object): - def __init__(self, page, render_ctx): - self.page = page - self.render_ctx = render_ctx - self.was_cache_valid = False - self.start_time = time.perf_counter() - - -class ExecutionInfoStack(object): - def __init__(self): - self._page_stack = [] - - @property - def current_page_info(self): - if len(self._page_stack) == 0: - return None - return self._page_stack[-1] - - @property - def is_main_page(self): - return len(self._page_stack) == 1 - - def hasPage(self, page): - for ei in self._page_stack: - if ei.page == page: - return True - return False - - def pushPage(self, page, render_ctx): - if len(self._page_stack) > 0: - top = self._page_stack[-1] - assert top.page is not page - self._page_stack.append(ExecutionInfo(page, render_ctx)) - - def popPage(self): - del self._page_stack[-1] - - def clear(self): - self._page_stack = [] - - -class ExecutionStats(object): +class ExecutionStats: def __init__(self): self.timers = {} self.counters = {} self.manifests = {} - def registerTimer(self, category, *, raise_if_registered=True): + def registerTimer(self, category, *, + raise_if_registered=True, time=0): if raise_if_registered and category in self.timers: raise Exception("Timer '%s' has already been registered." % category) - self.timers[category] = 0 + self.timers[category] = time @contextlib.contextmanager def timerScope(self, category): @@ -105,84 +60,53 @@ v = self.manifests.setdefault(oc, []) self.manifests[oc] = v + ov + def toData(self): + return { + 'timers': self.timers.copy(), + 'counters': self.counters.copy(), + 'manifests': self.manifests.copy()} -class Environment(object): + def fromData(self, data): + self.timers = data['timers'] + self.counters = data['counters'] + self.manifests = data['manifests'] + + +class Environment: def __init__(self): + from piecrust.cache import MemCache + from piecrust.rendering import RenderingContextStack + self.app = None self.start_time = None - self.exec_info_stack = ExecutionInfoStack() self.was_cache_cleaned = False - self.base_asset_url_format = '%uri%' self.page_repository = MemCache() self.rendered_segments_repository = MemCache() - self.fs_caches = { - 'renders': self.rendered_segments_repository} + self.render_ctx_stack = RenderingContextStack() self.fs_cache_only_for_main_page = False self.abort_source_use = False - self._default_layout_extensions = None self._stats = ExecutionStats() @property - def default_layout_extensions(self): - if self._default_layout_extensions is not None: - return self._default_layout_extensions - - if self.app is None: - raise Exception("This environment has not been initialized yet.") - - from piecrust.rendering import get_template_engine - dte = get_template_engine(self.app, None) - self._default_layout_extensions = ['.' + e.lstrip('.') - for e in dte.EXTENSIONS] - return self._default_layout_extensions + def stats(self): + return self._stats def initialize(self, app): self.app = app self.start_time = time.perf_counter() - self.exec_info_stack.clear() - self.was_cache_cleaned = False - self.base_asset_url_format = '%uri%' - for name, repo in self.fs_caches.items(): - cache = app.cache.getCache(name) - repo.fs_cache = cache - - def registerTimer(self, category, *, raise_if_registered=True): - self._stats.registerTimer( - category, raise_if_registered=raise_if_registered) - - def timerScope(self, category): - return self._stats.timerScope(category) - - def stepTimer(self, category, value): - self._stats.stepTimer(category, value) + self.rendered_segments_repository.fs_cache = \ + app.cache.getCache('renders') - def stepTimerSince(self, category, since): - self._stats.stepTimerSince(category, since) - - def registerCounter(self, category, *, raise_if_registered=True): - self._stats.registerCounter( - category, raise_if_registered=raise_if_registered) - - def stepCounter(self, category, inc=1): - self._stats.stepCounter(category, inc) - - def registerManifest(self, name, *, raise_if_registered=True): - self._stats.registerManifest( - name, raise_if_registered=raise_if_registered) - - def addManifestEntry(self, name, entry): - self._stats.addManifestEntry(name, entry) - - def getStats(self): + def _mergeCacheStats(self): repos = [ - ('RenderedSegmentsRepo', self.rendered_segments_repository), - ('PagesRepo', self.page_repository)] + ('RenderedSegmentsRepo', self.rendered_segments_repository), + ('PagesRepo', self.page_repository)] for name, repo in repos: self._stats.counters['%s_hit' % name] = repo._hits self._stats.counters['%s_miss' % name] = repo._misses - self._stats.manifests['%s_missedKeys' % name] = list(repo._missed_keys) - return self._stats + self._stats.manifests['%s_missedKeys' % name] = \ + list(repo._missed_keys) class StandardEnvironment(Environment): diff -r 7a1903ede496 -r 2e5c5d33d62c piecrust/events.py --- a/piecrust/events.py Tue Nov 21 11:00:06 2017 -0800 +++ b/piecrust/events.py Tue Nov 21 22:07:12 2017 -0800 @@ -1,5 +1,7 @@ class Event(object): + """ A simple implementation of a subscribable event. + """ def __init__(self): self._handlers = [] diff -r 7a1903ede496 -r 2e5c5d33d62c piecrust/fastpickle.py --- a/piecrust/fastpickle.py Tue Nov 21 11:00:06 2017 -0800 +++ b/piecrust/fastpickle.py Tue Nov 21 22:07:12 2017 -0800 @@ -1,54 +1,85 @@ +import io import sys -import json import codecs import datetime import collections -def pickle(obj): - data = _pickle_object(obj) - data = json.dumps(data, indent=None, separators=(',', ':')) - return data.encode('utf8') +use_msgpack = False +use_marshall = False -def pickle_obj(obj): - if obj is not None: - return _pickle_object(obj) - return None +if use_msgpack: + import msgpack + + def _dumps_msgpack(obj, buf): + msgpack.pack(obj, buf) + + def _loads_msgpack(buf, bufsize): + return msgpack.unpack(buf) + + _dumps = _dumps_msgpack + _loads = _loads_msgpack + +elif use_marshall: + import marshal + + def _dumps_marshal(obj, buf): + marshal.dump(obj, buf) + + def _loads_marshal(buf, bufsize): + return marshal.load(buf) + + _dumps = _dumps_marshal + _loads = _loads_marshal + +else: + import json + + class _BufferWrapper: + def __init__(self, buf): + self._buf = buf + + def write(self, data): + self._buf.write(data.encode('utf8')) + + def read(self): + return self._buf.read().decode('utf8') + + def _dumps_json(obj, buf): + buf = _BufferWrapper(buf) + json.dump(obj, buf, indent=None, separators=(',', ':')) + + def _loads_json(buf, bufsize): + buf = _BufferWrapper(buf) + return json.load(buf) + + _dumps = _dumps_json + _loads = _loads_json + + +def pickle(obj): + with io.BytesIO() as buf: + pickle_intob(obj, buf) + return buf.getvalue() def pickle_intob(obj, buf): data = _pickle_object(obj) - buf = _WriteWrapper(buf) - json.dump(data, buf, indent=None, separators=(',', ':')) + _dumps(data, buf) def unpickle(data): - data = json.loads(data.decode('utf8')) + with io.BytesIO(data) as buf: + data = _loads(buf, len(data)) return _unpickle_object(data) -def unpickle_obj(data): - if data is not None: - return _unpickle_object(data) - return None - - def unpickle_fromb(buf, bufsize): - with buf.getbuffer() as innerbuf: - data = codecs.decode(innerbuf[:bufsize], 'utf8') - data = json.loads(data) + data = _loads(buf, bufsize) return _unpickle_object(data) -class _WriteWrapper(object): - def __init__(self, buf): - self._buf = buf - - def write(self, data): - self._buf.write(data.encode('utf8')) - - _PICKLING = 0 _UNPICKLING = 1 @@ -102,7 +133,7 @@ 'day': obj.day} elif op == _UNPICKLING: return datetime.date( - obj['year'], obj['month'], obj['day']) + obj['year'], obj['month'], obj['day']) def _datetime_convert(obj, func, op): @@ -117,8 +148,8 @@ 'microsecond': obj.microsecond} elif op == _UNPICKLING: return datetime.datetime( - obj['year'], obj['month'], obj['day'], - obj['hour'], obj['minute'], obj['second'], obj['microsecond']) + obj['year'], obj['month'], obj['day'], + obj['hour'], obj['minute'], obj['second'], obj['microsecond']) def _time_convert(obj, func, op): @@ -130,47 +161,47 @@ 'microsecond': obj.microsecond} elif op == _UNPICKLING: return datetime.time( - obj['hour'], obj['minute'], obj['second'], obj['microsecond']) + obj['hour'], obj['minute'], obj['second'], obj['microsecond']) _type_convert = { - type(None): _identity_dispatch, - bool: _identity_dispatch, - int: _identity_dispatch, - float: _identity_dispatch, - str: _identity_dispatch, - datetime.date: _date_convert, - datetime.datetime: _datetime_convert, - datetime.time: _time_convert, - tuple: _tuple_convert, - list: _list_convert, - dict: _dict_convert, - set: _set_convert, - collections.OrderedDict: _ordered_dict_convert, - } + type(None): _identity_dispatch, + bool: _identity_dispatch, + int: _identity_dispatch, + float: _identity_dispatch, + str: _identity_dispatch, + datetime.date: _date_convert, + datetime.datetime: _datetime_convert, + datetime.time: _time_convert, + tuple: _tuple_convert, + list: _list_convert, + dict: _dict_convert, + set: _set_convert, + collections.OrderedDict: _ordered_dict_convert, +} _type_unconvert = { - type(None): _identity_dispatch, - bool: _identity_dispatch, - int: _identity_dispatch, - float: _identity_dispatch, - str: _identity_dispatch, - 'date': _date_convert, - 'datetime': _datetime_convert, - 'time': _time_convert, - } + type(None): _identity_dispatch, + bool: _identity_dispatch, + int: _identity_dispatch, + float: _identity_dispatch, + str: _identity_dispatch, + 'date': _date_convert, + 'datetime': _datetime_convert, + 'time': _time_convert, +} _collection_unconvert = { - '__type__:tuple': _tuple_convert, - '__type__:set': _set_convert, - } + '__type__:tuple': _tuple_convert, + '__type__:set': _set_convert, +} _mapping_unconvert = { - 'OrderedDict': _ordered_dict_convert - } + 'OrderedDict': _ordered_dict_convert +} def _pickle_object(obj): diff -r 7a1903ede496 -r 2e5c5d33d62c piecrust/formatting/hoedownformatter.py --- a/piecrust/formatting/hoedownformatter.py Tue Nov 21 11:00:06 2017 -0800 +++ b/piecrust/formatting/hoedownformatter.py Tue Nov 21 22:07:12 2017 -0800 @@ -6,7 +6,7 @@ class HoedownFormatter(Formatter): - FORMAT_NAMES = ['hoedown'] + FORMAT_NAMES = ['markdown', 'mdown', 'md'] OUTPUT_FORMAT = 'html' def __init__(self): diff -r 7a1903ede496 -r 2e5c5d33d62c piecrust/formatting/markdownformatter.py --- a/piecrust/formatting/markdownformatter.py Tue Nov 21 11:00:06 2017 -0800 +++ b/piecrust/formatting/markdownformatter.py Tue Nov 21 22:07:12 2017 -0800 @@ -2,7 +2,7 @@ class MarkdownFormatter(Formatter): - FORMAT_NAMES = ['markdown', 'mdown', 'md'] + FORMAT_NAMES = ['pymarkdown'] OUTPUT_FORMAT = 'html' def __init__(self): diff -r 7a1903ede496 -r 2e5c5d33d62c piecrust/generation/__init__.py diff -r 7a1903ede496 -r 2e5c5d33d62c piecrust/generation/base.py --- a/piecrust/generation/base.py Tue Nov 21 11:00:06 2017 -0800 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,163 +0,0 @@ -import logging -from werkzeug.utils import cached_property -from piecrust.baking.records import BakeRecordEntry -from piecrust.baking.worker import save_factory, JOB_BAKE -from piecrust.configuration import ConfigurationError -from piecrust.routing import create_route_metadata -from piecrust.sources.pageref import PageRef - - -logger = logging.getLogger(__name__) - - -class InvalidRecordExtraKey(Exception): - pass - - -class PageGeneratorBakeContext(object): - def __init__(self, app, record, pool, generator): - self._app = app - self._record = record - self._pool = pool - self._generator = generator - self._job_queue = [] - self._is_running = False - - def getRecordExtraKey(self, seed): - return '%s:%s' % (self._generator.name, seed) - - def matchesRecordExtraKey(self, extra_key): - return (extra_key is not None and - extra_key.startswith(self._generator.name + ':')) - - def getSeedFromRecordExtraKey(self, extra_key): - if not self.matchesRecordExtraKey(extra_key): - raise InvalidRecordExtraKey("Invalid extra key: %s" % extra_key) - return extra_key[len(self._generator.name) + 1:] - - def getAllPageRecords(self): - return self._record.transitions.values() - - def getBakedPageRecords(self): - for prev, cur in self.getAllPageRecords(): - if cur and cur.was_any_sub_baked: - yield (prev, cur) - - def collapseRecord(self, entry): - self._record.collapseEntry(entry) - - def queueBakeJob(self, page_fac, route, extra_route_metadata, seed): - if self._is_running: - raise Exception("The job queue is running.") - - extra_key = self.getRecordExtraKey(seed) - entry = BakeRecordEntry( - page_fac.source.name, - page_fac.path, - extra_key) - self._record.addEntry(entry) - - page = page_fac.buildPage() - route_metadata = create_route_metadata(page) - route_metadata.update(extra_route_metadata) - uri = route.getUri(route_metadata) - override_entry = self._record.getOverrideEntry(page.path, uri) - if override_entry is not None: - override_source = self.app.getSource( - override_entry.source_name) - if override_source.realm == page_fac.source.realm: - entry.errors.append( - "Page '%s' maps to URL '%s' but is overriden " - "by page '%s'." % - (page_fac.ref_spec, uri, override_entry.path)) - logger.error(entry.errors[-1]) - entry.flags |= BakeRecordEntry.FLAG_OVERRIDEN - return - - route_index = self._app.routes.index(route) - job = { - 'type': JOB_BAKE, - 'job': { - 'factory_info': save_factory(page_fac), - 'generator_name': self._generator.name, - 'generator_record_key': extra_key, - 'route_index': route_index, - 'route_metadata': route_metadata, - 'dirty_source_names': self._record.dirty_source_names, - 'needs_config': True - } - } - self._job_queue.append(job) - - def runJobQueue(self): - def _handler(res): - entry = self._record.getCurrentEntry( - res['path'], res['generator_record_key']) - entry.config = res['config'] - entry.subs = res['sub_entries'] - if res['errors']: - entry.errors += res['errors'] - if entry.has_any_error: - self._record.current.success = False - - self._is_running = True - try: - ar = self._pool.queueJobs(self._job_queue, handler=_handler) - ar.wait() - finally: - self._is_running = False - - -class PageGenerator(object): - def __init__(self, app, name, config): - self.app = app - self.name = name - self.config = config or {} - - self.source_name = config.get('source') - if self.source_name is None: - raise ConfigurationError( - "Generator '%s' requires a source name" % name) - - page_ref = config.get('page') - if page_ref is None: - raise ConfigurationError( - "Generator '%s' requires a listing page ref." % name) - self.page_ref = PageRef(app, page_ref) - - self.data_endpoint = config.get('data_endpoint') - self.data_type = config.get('data_type') - if self.data_endpoint and not self.data_type: - raise ConfigurationError( - "Generator '%s' requires a data type because it has " - "a data endpoint." % name) - - self._provider_type = None - - @cached_property - def source(self): - for src in self.app.sources: - if src.name == self.source_name: - return src - raise Exception("Can't find source '%s' for generator '%s'." % ( - self.source_name, self.name)) - - def getSupportedRouteParameters(self): - raise NotImplementedError() - - def getPageFactory(self, route_metadata): - # This will raise `PageNotFoundError` naturally if not found. - return self.page_ref.getFactory() - - def bake(self, ctx): - raise NotImplementedError() - - def onRouteFunctionUsed(self, route, route_metadata): - pass - - def buildDataProvider(self, page, override): - if not self._provider_type: - from piecrust.data.provider import get_data_provider_class - self._provider_type = get_data_provider_class(self.app, - self.data_type) - return self._provider_type(self, page, override) diff -r 7a1903ede496 -r 2e5c5d33d62c piecrust/generation/blogarchives.py --- a/piecrust/generation/blogarchives.py Tue Nov 21 11:00:06 2017 -0800 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,123 +0,0 @@ -import logging -import datetime -from piecrust.chefutil import format_timed_scope -from piecrust.data.filters import PaginationFilter, IFilterClause -from piecrust.data.iterators import PageIterator -from piecrust.generation.base import PageGenerator, InvalidRecordExtraKey -from piecrust.routing import RouteParameter - - -logger = logging.getLogger(__name__) - - -class BlogArchivesPageGenerator(PageGenerator): - GENERATOR_NAME = 'blog_archives' - - def __init__(self, app, name, config): - super(BlogArchivesPageGenerator, self).__init__(app, name, config) - - def getSupportedRouteParameters(self): - return [RouteParameter('year', RouteParameter.TYPE_INT4)] - - def onRouteFunctionUsed(self, route, route_metadata): - pass - - def prepareRenderContext(self, ctx): - ctx.pagination_source = self.source - - year = ctx.page.route_metadata.get('year') - if year is None: - raise Exception( - "Can't find the archive year in the route metadata") - if type(year) is not int: - raise Exception( - "The route for generator '%s' should specify an integer " - "parameter for 'year'." % self.name) - - flt = PaginationFilter() - flt.addClause(IsFromYearFilterClause(year)) - ctx.pagination_filter = flt - - ctx.custom_data['year'] = year - - flt2 = PaginationFilter() - flt2.addClause(IsFromYearFilterClause(year)) - it = PageIterator(self.source, pagination_filter=flt2, - sorter=_date_sorter) - ctx.custom_data['archives'] = it - - def bake(self, ctx): - if not self.page_ref.exists: - logger.debug( - "No page found at '%s', skipping %s archives." % - (self.page_ref, self.source_name)) - return - - logger.debug("Baking %s archives...", self.source_name) - with format_timed_scope(logger, 'gathered archive years', - level=logging.DEBUG, colored=False): - all_years, dirty_years = self._buildDirtyYears(ctx) - - with format_timed_scope(logger, "baked %d %s archives." % - (len(dirty_years), self.source_name)): - self._bakeDirtyYears(ctx, all_years, dirty_years) - - def _buildDirtyYears(self, ctx): - logger.debug("Gathering dirty post years.") - all_years = set() - dirty_years = set() - for _, cur_entry in ctx.getAllPageRecords(): - if cur_entry and cur_entry.source_name == self.source_name: - dt = datetime.datetime.fromtimestamp(cur_entry.timestamp) - all_years.add(dt.year) - if cur_entry.was_any_sub_baked: - dirty_years.add(dt.year) - return all_years, dirty_years - - def _bakeDirtyYears(self, ctx, all_years, dirty_years): - route = self.app.getGeneratorRoute(self.name) - if route is None: - raise Exception( - "No routes have been defined for generator: %s" % - self.name) - - logger.debug("Using archive page: %s" % self.page_ref) - fac = self.page_ref.getFactory() - - for y in dirty_years: - extra_route_metadata = {'year': y} - - logger.debug("Queuing: %s [%s]" % (fac.ref_spec, y)) - ctx.queueBakeJob(fac, route, extra_route_metadata, str(y)) - ctx.runJobQueue() - - # Create bake entries for the years that were *not* dirty. - # Otherwise, when checking for deleted pages, we would not find any - # outputs and would delete those files. - all_str_years = [str(y) for y in all_years] - for prev_entry, cur_entry in ctx.getAllPageRecords(): - if prev_entry and not cur_entry: - try: - y = ctx.getSeedFromRecordExtraKey(prev_entry.extra_key) - except InvalidRecordExtraKey: - continue - if y in all_str_years: - logger.debug( - "Creating unbaked entry for year %s archive." % y) - ctx.collapseRecord(prev_entry) - else: - logger.debug( - "No page references year %s anymore." % y) - - -class IsFromYearFilterClause(IFilterClause): - def __init__(self, year): - self.year = year - - def pageMatches(self, fil, page): - return (page.datetime.year == self.year) - - -def _date_sorter(it): - return sorted(it, key=lambda x: x.datetime) - diff -r 7a1903ede496 -r 2e5c5d33d62c piecrust/generation/taxonomy.py --- a/piecrust/generation/taxonomy.py Tue Nov 21 11:00:06 2017 -0800 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,427 +0,0 @@ -import re -import time -import logging -import unidecode -from piecrust.chefutil import format_timed, format_timed_scope -from piecrust.configuration import ConfigurationError -from piecrust.data.filters import ( - PaginationFilter, SettingFilterClause, - page_value_accessor) -from piecrust.generation.base import PageGenerator, InvalidRecordExtraKey -from piecrust.routing import RouteParameter - - -logger = logging.getLogger(__name__) - - -SLUGIFY_ENCODE = 1 -SLUGIFY_TRANSLITERATE = 2 -SLUGIFY_LOWERCASE = 4 -SLUGIFY_DOT_TO_DASH = 8 -SLUGIFY_SPACE_TO_DASH = 16 - - -re_first_dot_to_dash = re.compile(r'^\.+') -re_dot_to_dash = re.compile(r'\.+') -re_space_to_dash = re.compile(r'\s+') - - -class Taxonomy(object): - def __init__(self, name, config): - self.name = name - self.config = config - self.term_name = config.get('term', name) - self.is_multiple = bool(config.get('multiple', False)) - self.separator = config.get('separator', '/') - self.page_ref = config.get('page') - - @property - def setting_name(self): - if self.is_multiple: - return self.name - return self.term_name - - -class TaxonomyPageGenerator(PageGenerator): - """ A page generator that handles taxonomies, _i.e._ lists of keywords - that pages are labelled with, and for which we need to generate - listing pages. - """ - GENERATOR_NAME = 'taxonomy' - - def __init__(self, app, name, config): - super(TaxonomyPageGenerator, self).__init__(app, name, config) - - tax_name = config.get('taxonomy') - if tax_name is None: - raise ConfigurationError( - "Generator '%s' requires a taxonomy name." % name) - tax_config = app.config.get('site/taxonomies/' + tax_name) - if tax_config is None: - raise ConfigurationError( - "Error initializing generator '%s', no such taxonomy: %s", - (name, tax_name)) - self.taxonomy = Taxonomy(tax_name, tax_config) - - sm = config.get('slugify_mode') - if not sm: - sm = app.config.get('site/slugify_mode', 'encode') - self.slugify_mode = _parse_slugify_mode(sm) - self.slugifier = _Slugifier(self.taxonomy, self.slugify_mode) - - def getSupportedRouteParameters(self): - name = self.taxonomy.term_name - param_type = (RouteParameter.TYPE_PATH if self.taxonomy.is_multiple - else RouteParameter.TYPE_STRING) - return [RouteParameter(name, param_type, - variadic=self.taxonomy.is_multiple)] - - def slugify(self, term): - return self.slugifier.slugify(term) - - def slugifyMultiple(self, terms): - return self.slugifier.slugifyMultiple(terms) - - def prepareRenderContext(self, ctx): - # Set the pagination source as the source we're generating for. - ctx.pagination_source = self.source - - # Get the taxonomy terms from the route metadata... this can come from - # the browser's URL (while serving) or from the baking (see `bake` - # method below). In both cases, we expect to have the *slugified* - # version of the term, because we're going to set a filter that also - # slugifies the terms found on each page. - # - # This is because: - # * while serving, we get everything from the request URL, so we only - # have the slugified version. - # * if 2 slightly different terms "collide" into the same slugified - # term, we'll get a merge of the 2 on the listing page, which is - # what the user expects. - # - tax_terms, is_combination = self._getTaxonomyTerms( - ctx.page.route_metadata) - self._setTaxonomyFilter(ctx, tax_terms, is_combination) - - # Add some custom data for rendering. - ctx.custom_data.update({ - self.taxonomy.term_name: tax_terms, - 'is_multiple_%s' % self.taxonomy.term_name: is_combination}) - # Add some "plural" version of the term... so for instance, if this - # is the "tags" taxonomy, "tag" will have one term most of the time, - # except when it's a combination. Here, we add "tags" as something that - # is always a tuple, even when it's not a combination. - if (self.taxonomy.is_multiple and - self.taxonomy.name != self.taxonomy.term_name): - mult_val = tax_terms - if not is_combination: - mult_val = (mult_val,) - ctx.custom_data[self.taxonomy.name] = mult_val - - def _getTaxonomyTerms(self, route_metadata): - # Get the individual slugified terms from the route metadata. - all_values = route_metadata.get(self.taxonomy.term_name) - if all_values is None: - raise Exception("'%s' values couldn't be found in route metadata" % - self.taxonomy.term_name) - - # If it's a "multiple" taxonomy, we need to potentially split the - # route value into the individual terms (_e.g._ when listing all pages - # that have 2 given tags, we need to get each of those 2 tags). - if self.taxonomy.is_multiple: - sep = self.taxonomy.separator - if sep in all_values: - return tuple(all_values.split(sep)), True - # Not a "multiple" taxonomy, so there's only the one value. - return all_values, False - - def _setTaxonomyFilter(self, ctx, term_value, is_combination): - # Set up the filter that will check the pages' terms. - flt = PaginationFilter(value_accessor=page_value_accessor) - flt.addClause(HasTaxonomyTermsFilterClause( - self.taxonomy, self.slugify_mode, term_value, is_combination)) - ctx.pagination_filter = flt - - def onRouteFunctionUsed(self, route, route_metadata): - # Get the values, and slugify them appropriately. - values = route_metadata[self.taxonomy.term_name] - if self.taxonomy.is_multiple: - # TODO: here we assume the route has been properly configured. - slugified_values = self.slugifyMultiple((str(v) for v in values)) - route_val = self.taxonomy.separator.join(slugified_values) - else: - slugified_values = self.slugify(str(values)) - route_val = slugified_values - - # We need to register this use of a taxonomy term. - eis = self.app.env.exec_info_stack - cpi = eis.current_page_info.render_ctx.current_pass_info - if cpi: - utt = cpi.getCustomInfo('used_taxonomy_terms', [], True) - utt.append(slugified_values) - - # Put the slugified values in the route metadata so they're used to - # generate the URL. - route_metadata[self.taxonomy.term_name] = route_val - - def bake(self, ctx): - if not self.page_ref.exists: - logger.debug( - "No page found at '%s', skipping taxonomy '%s'." % - (self.page_ref, self.taxonomy.name)) - return - - logger.debug("Baking %s pages...", self.taxonomy.name) - analyzer = _TaxonomyTermsAnalyzer(self.source_name, self.taxonomy, - self.slugify_mode) - with format_timed_scope(logger, 'gathered taxonomy terms', - level=logging.DEBUG, colored=False): - analyzer.analyze(ctx) - - start_time = time.perf_counter() - page_count = self._bakeTaxonomyTerms(ctx, analyzer) - if page_count > 0: - logger.info(format_timed( - start_time, - "baked %d %s pages for %s." % ( - page_count, self.taxonomy.term_name, self.source_name))) - - def _bakeTaxonomyTerms(self, ctx, analyzer): - # Start baking those terms. - logger.debug( - "Baking '%s' for source '%s': %d terms" % - (self.taxonomy.name, self.source_name, - len(analyzer.dirty_slugified_terms))) - - route = self.app.getGeneratorRoute(self.name) - if route is None: - raise Exception("No routes have been defined for generator: %s" % - self.name) - - logger.debug("Using taxonomy page: %s" % self.page_ref) - fac = self.page_ref.getFactory() - - job_count = 0 - for slugified_term in analyzer.dirty_slugified_terms: - extra_route_metadata = { - self.taxonomy.term_name: slugified_term} - - # Use the slugified term as the record's extra key seed. - logger.debug( - "Queuing: %s [%s=%s]" % - (fac.ref_spec, self.taxonomy.name, slugified_term)) - ctx.queueBakeJob(fac, route, extra_route_metadata, slugified_term) - job_count += 1 - ctx.runJobQueue() - - # Now we create bake entries for all the terms that were *not* dirty. - # This is because otherwise, on the next incremental bake, we wouldn't - # find any entry for those things, and figure that we need to delete - # their outputs. - for prev_entry, cur_entry in ctx.getAllPageRecords(): - # Only consider taxonomy-related entries that don't have any - # current version (i.e. they weren't baked just now). - if prev_entry and not cur_entry: - try: - t = ctx.getSeedFromRecordExtraKey(prev_entry.extra_key) - except InvalidRecordExtraKey: - continue - - if analyzer.isKnownSlugifiedTerm(t): - logger.debug("Creating unbaked entry for %s term: %s" % - (self.name, t)) - ctx.collapseRecord(prev_entry) - else: - logger.debug("Term %s in %s isn't used anymore." % - (self.name, t)) - - return job_count - - -class HasTaxonomyTermsFilterClause(SettingFilterClause): - def __init__(self, taxonomy, slugify_mode, value, is_combination): - super(HasTaxonomyTermsFilterClause, self).__init__( - taxonomy.setting_name, value) - self._taxonomy = taxonomy - self._is_combination = is_combination - self._slugifier = _Slugifier(taxonomy, slugify_mode) - - def pageMatches(self, fil, page): - if self._taxonomy.is_multiple: - # Multiple taxonomy, i.e. it supports multiple terms, like tags. - page_values = fil.value_accessor(page, self.name) - if page_values is None or not isinstance(page_values, list): - return False - - page_set = set(map(self._slugifier.slugify, page_values)) - if self._is_combination: - # Multiple taxonomy, and multiple terms to match. Check that - # the ones to match are all in the page's terms. - value_set = set(self.value) - return value_set.issubset(page_set) - else: - # Multiple taxonomy, one term to match. - return self.value in page_set - else: - # Single taxonomy. Just compare the values. - page_value = fil.value_accessor(page, self.name) - if page_value is None: - return False - page_value = self._slugifier.slugify(page_value) - return page_value == self.value - - -class _TaxonomyTermsAnalyzer(object): - def __init__(self, source_name, taxonomy, slugify_mode): - self.source_name = source_name - self.taxonomy = taxonomy - self.slugifier = _Slugifier(taxonomy, slugify_mode) - self._all_terms = {} - self._single_dirty_slugified_terms = set() - self._all_dirty_slugified_terms = None - - @property - def dirty_slugified_terms(self): - """ Returns the slugified terms that have been 'dirtied' during - this bake. - """ - return self._all_dirty_slugified_terms - - def isKnownSlugifiedTerm(self, term): - """ Returns whether the given slugified term has been seen during - this bake. - """ - return term in self._all_terms - - def analyze(self, ctx): - # Build the list of terms for our taxonomy, and figure out which ones - # are 'dirty' for the current bake. - # - # Remember all terms used. - for _, cur_entry in ctx.getAllPageRecords(): - if cur_entry and not cur_entry.was_overriden: - cur_terms = cur_entry.config.get(self.taxonomy.setting_name) - if cur_terms: - if not self.taxonomy.is_multiple: - self._addTerm(cur_entry.path, cur_terms) - else: - self._addTerms(cur_entry.path, cur_terms) - - # Re-bake all taxonomy terms that include new or changed pages, by - # marking them as 'dirty'. - for prev_entry, cur_entry in ctx.getBakedPageRecords(): - if cur_entry.source_name != self.source_name: - continue - - entries = [cur_entry] - if prev_entry: - entries.append(prev_entry) - - for e in entries: - entry_terms = e.config.get(self.taxonomy.setting_name) - if entry_terms: - if not self.taxonomy.is_multiple: - self._single_dirty_slugified_terms.add( - self.slugifier.slugify(entry_terms)) - else: - self._single_dirty_slugified_terms.update( - (self.slugifier.slugify(t) - for t in entry_terms)) - - self._all_dirty_slugified_terms = list( - self._single_dirty_slugified_terms) - logger.debug("Gathered %d dirty taxonomy terms", - len(self._all_dirty_slugified_terms)) - - # Re-bake the combination pages for terms that are 'dirty'. - # We make all terms into tuple, even those that are not actual - # combinations, so that we have less things to test further down the - # line. - # - # Add the combinations to that list. We get those combinations from - # wherever combinations were used, so they're coming from the - # `onRouteFunctionUsed` method. - if self.taxonomy.is_multiple: - known_combinations = set() - for _, cur_entry in ctx.getAllPageRecords(): - if cur_entry: - used_terms = _get_all_entry_taxonomy_terms(cur_entry) - for terms in used_terms: - if len(terms) > 1: - known_combinations.add(terms) - - dcc = 0 - for terms in known_combinations: - if not self._single_dirty_slugified_terms.isdisjoint( - set(terms)): - self._all_dirty_slugified_terms.append( - self.taxonomy.separator.join(terms)) - dcc += 1 - logger.debug("Gathered %d term combinations, with %d dirty." % - (len(known_combinations), dcc)) - - def _addTerms(self, entry_path, terms): - for t in terms: - self._addTerm(entry_path, t) - - def _addTerm(self, entry_path, term): - st = self.slugifier.slugify(term) - orig_terms = self._all_terms.setdefault(st, []) - if orig_terms and orig_terms[0] != term: - logger.warning( - "Term '%s' in '%s' is slugified to '%s' which conflicts with " - "previously existing '%s'. The two will be merged." % - (term, entry_path, st, orig_terms[0])) - orig_terms.append(term) - - -def _get_all_entry_taxonomy_terms(entry): - res = set() - for o in entry.subs: - for pinfo in o.render_info: - if pinfo: - terms = pinfo.getCustomInfo('used_taxonomy_terms') - if terms: - res |= set(terms) - return res - - -class _Slugifier(object): - def __init__(self, taxonomy, mode): - self.taxonomy = taxonomy - self.mode = mode - - def slugifyMultiple(self, terms): - return tuple(map(self.slugify, terms)) - - def slugify(self, term): - if self.mode & SLUGIFY_TRANSLITERATE: - term = unidecode.unidecode(term) - if self.mode & SLUGIFY_LOWERCASE: - term = term.lower() - if self.mode & SLUGIFY_DOT_TO_DASH: - term = re_first_dot_to_dash.sub('', term) - term = re_dot_to_dash.sub('-', term) - if self.mode & SLUGIFY_SPACE_TO_DASH: - term = re_space_to_dash.sub('-', term) - return term - - -def _parse_slugify_mode(value): - mapping = { - 'encode': SLUGIFY_ENCODE, - 'transliterate': SLUGIFY_TRANSLITERATE, - 'lowercase': SLUGIFY_LOWERCASE, - 'dot_to_dash': SLUGIFY_DOT_TO_DASH, - 'space_to_dash': SLUGIFY_SPACE_TO_DASH} - mode = 0 - for v in value.split(','): - f = mapping.get(v.strip()) - if f is None: - if v == 'iconv': - raise Exception("'iconv' is not supported as a slugify mode " - "in PieCrust2. Use 'transliterate'.") - raise Exception("Unknown slugify flag: %s" % v) - mode |= f - return mode - diff -r 7a1903ede496 -r 2e5c5d33d62c piecrust/importing/wordpress.py --- a/piecrust/importing/wordpress.py Tue Nov 21 11:00:06 2017 -0800 +++ b/piecrust/importing/wordpress.py Tue Nov 21 22:07:12 2017 -0800 @@ -5,9 +5,8 @@ from collections import OrderedDict from piecrust import CONFIG_PATH from piecrust.configuration import ( - ConfigurationLoader, ConfigurationDumper, merge_dicts) + ConfigurationLoader, ConfigurationDumper, merge_dicts) from piecrust.importing.base import Importer, create_page, download_asset -from piecrust.sources.base import MODE_CREATING logger = logging.getLogger(__name__) @@ -16,25 +15,25 @@ class WordpressImporterBase(Importer): def setupParser(self, parser, app): parser.add_argument( - '--pages-source', - default="pages", - help="The source to store pages in.") + '--pages-source', + default="pages", + help="The source to store pages in.") parser.add_argument( - '--posts-source', - default="posts", - help="The source to store posts in.") + '--posts-source', + default="posts", + help="The source to store posts in.") parser.add_argument( - '--default-post-layout', - help="The default layout to use for posts.") + '--default-post-layout', + help="The default layout to use for posts.") parser.add_argument( - '--default-post-category', - help="The default category to use for posts.") + '--default-post-category', + help="The default category to use for posts.") parser.add_argument( - '--default-page-layout', - help="The default layout to use for pages.") + '--default-page-layout', + help="The default layout to use for pages.") parser.add_argument( - '--default-page-category', - help="The default category to use for pages.") + '--default-page-category', + help="The default category to use for pages.") def importWebsite(self, app, args): impl = self._getImplementation(app, args) @@ -60,8 +59,8 @@ site_config = self._getSiteConfig(ctx) site_config.setdefault('site', {}) site_config['site'].update({ - 'post_url': '%year%/%month%/%slug%', - 'category_url': 'category/%category%'}) + 'post_url': '%year%/%month%/%slug%', + 'category_url': 'category/%category%'}) site_config_path = os.path.join(self.app.root_dir, CONFIG_PATH) with open(site_config_path, 'r') as fp: @@ -102,10 +101,10 @@ def _createPost(self, post_info): post_dt = post_info['datetime'] finder = { - 'year': post_dt.year, - 'month': post_dt.month, - 'day': post_dt.day, - 'slug': post_info['slug']} + 'year': post_dt.year, + 'month': post_dt.month, + 'day': post_dt.day, + 'slug': post_info['slug']} if post_info['type'] == 'post': source = self._posts_source elif post_info['type'] == 'page': @@ -174,25 +173,25 @@ title = find_text(channel, 'title') description = find_text(channel, 'description') site_config = OrderedDict({ - 'site': { - 'title': title, - 'description': description} - }) + 'site': { + 'title': title, + 'description': description} + }) # Get authors' names. authors = {} for a in channel.findall('wp:author', self.ns_wp): login = find_text(a, 'wp:author_login', self.ns_wp) authors[login] = { - 'email': find_text(a, 'wp:author_email', self.ns_wp), - 'display_name': find_text(a, 'wp:author_display_name', - self.ns_wp), - 'first_name': find_text(a, 'wp:author_first_name', - self.ns_wp), - 'last_name': find_text(a, 'wp:author_last_name', - self.ns_wp), - 'author_id': find_text(a, 'wp:author_id', - self.ns_wp)} + 'email': find_text(a, 'wp:author_email', self.ns_wp), + 'display_name': find_text(a, 'wp:author_display_name', + self.ns_wp), + 'first_name': find_text(a, 'wp:author_first_name', + self.ns_wp), + 'last_name': find_text(a, 'wp:author_last_name', + self.ns_wp), + 'author_id': find_text(a, 'wp:author_id', + self.ns_wp)} site_config['site']['authors'] = authors return site_config @@ -216,9 +215,9 @@ post_name = find_text(node, 'wp:post_name', self.ns_wp) post_type = find_text(node, 'wp:post_type', self.ns_wp) post_info = { - 'type': post_type, - 'slug': post_name, - 'datetime': post_date} + 'type': post_type, + 'slug': post_name, + 'datetime': post_date} title = find_text(node, 'title') creator = find_text(node, 'dc:creator', self.ns_dc) @@ -228,12 +227,12 @@ description = find_text(node, 'description') # TODO: menu order, parent, password, sticky post_info.update({ - 'title': title, - 'author': creator, - 'status': status, - 'post_id': post_id, - 'post_guid': guid, - 'description': description}) + 'title': title, + 'author': creator, + 'status': status, + 'post_id': post_id, + 'post_guid': guid, + 'description': description}) categories = [] for c in node.findall('category'): @@ -250,8 +249,8 @@ content = find_text(node, 'content:encoded', self.ns_content) excerpt = find_text(node, 'excerpt:encoded', self.ns_excerpt) post_info.update({ - 'content': content, - 'excerpt': excerpt}) + 'content': content, + 'excerpt': excerpt}) return post_info diff -r 7a1903ede496 -r 2e5c5d33d62c piecrust/main.py --- a/piecrust/main.py Tue Nov 21 11:00:06 2017 -0800 +++ b/piecrust/main.py Tue Nov 21 22:07:12 2017 -0800 @@ -9,9 +9,9 @@ import colorama from piecrust import APP_VERSION from piecrust.app import ( - PieCrust, PieCrustConfiguration, apply_variant_and_values) + PieCrustFactory, PieCrustConfiguration) from piecrust.chefutil import ( - format_timed, log_friendly_exception, print_help_item) + format_timed, log_friendly_exception, print_help_item) from piecrust.commands.base import CommandContext from piecrust.pathutil import SiteNotFoundError, find_app_root from piecrust.plugins.base import PluginLoader @@ -19,15 +19,17 @@ logger = logging.getLogger(__name__) +_chef_start_time = time.perf_counter() + class ColoredFormatter(logging.Formatter): COLORS = { - 'DEBUG': colorama.Fore.BLACK + colorama.Style.BRIGHT, - 'INFO': '', - 'WARNING': colorama.Fore.YELLOW, - 'ERROR': colorama.Fore.RED, - 'CRITICAL': colorama.Back.RED + colorama.Fore.WHITE - } + 'DEBUG': colorama.Fore.BLACK + colorama.Style.BRIGHT, + 'INFO': '', + 'WARNING': colorama.Fore.YELLOW, + 'ERROR': colorama.Fore.RED, + 'CRITICAL': colorama.Back.RED + colorama.Fore.WHITE + } def __init__(self, fmt=None, datefmt=None): super(ColoredFormatter, self).__init__(fmt, datefmt) @@ -79,70 +81,80 @@ def _setup_main_parser_arguments(parser): parser.add_argument( - '--version', - action='version', - version=('%(prog)s ' + APP_VERSION)) + '--version', + action='version', + version=('%(prog)s ' + APP_VERSION)) parser.add_argument( - '--root', - help="The root directory of the website.") + '--root', + help="The root directory of the website.") parser.add_argument( - '--theme', - action='store_true', - help="Makes the current command apply to a theme website.") + '--theme', + action='store_true', + help="Makes the current command apply to a theme website.") parser.add_argument( - '--config', - dest='config_variant', - help="The configuration variant to use for this command.") + '--config', + action='append', + dest='config_variants', + help="The configuration variant(s) to use for this command.") parser.add_argument( - '--config-set', - nargs=2, - metavar=('NAME', 'VALUE'), - action='append', - dest='config_values', - help="Sets a specific site configuration setting.") + '--config-set', + nargs=2, + metavar=('NAME', 'VALUE'), + action='append', + dest='config_values', + help="Sets a specific site configuration setting.") parser.add_argument( - '--debug', - help="Show debug information.", action='store_true') + '--debug', + help="Show debug information.", action='store_true') parser.add_argument( - '--debug-only', - action='append', - help="Only show debug information for the given categories.") + '--debug-only', + action='append', + help="Only show debug information for the given categories.") parser.add_argument( - '--no-cache', - help="When applicable, disable caching.", - action='store_true') + '--no-cache', + help="When applicable, disable caching.", + action='store_true') parser.add_argument( - '--quiet', - help="Print only important information.", - action='store_true') + '--quiet', + help="Print only important information.", + action='store_true') parser.add_argument( - '--log', - dest='log_file', - help="Send log messages to the specified file.") + '--log', + dest='log_file', + help="Send log messages to the specified file.") parser.add_argument( - '--log-debug', - help="Log debug messages to the log file.", - action='store_true') + '--log-debug', + help="Log debug messages to the log file.", + action='store_true') parser.add_argument( - '--no-color', - help="Don't use colorized output.", - action='store_true') + '--no-color', + help="Don't use colorized output.", + action='store_true') parser.add_argument( - '--pid-file', - dest='pid_file', - help="Write a PID file for the current process.") + '--pid-file', + dest='pid_file', + help="Write a PID file for the current process.") """ Kinda hacky, but we want the `serve` command to use a different cache - so that PieCrust doesn't need to re-render all the pages when going - between `serve` and `bake` (or, worse, *not* re-render them all correctly - and end up serving or baking the wrong version). +so that PieCrust doesn't need to re-render all the pages when going +between `serve` and `bake` (or, worse, *not* re-render them all correctly +and end up serving or baking the wrong version). """ _command_caches = { - 'serve': 'server'} + 'serve': 'server'} + + +def _make_chef_state(): + return [] -def _pre_parse_chef_args(argv): +def _recover_pre_chef_state(state): + for s in state: + s() + + +def _pre_parse_chef_args(argv, *, bypass_setup=False, state=None): # We need to parse some arguments before we can build the actual argument # parser, because it can affect which plugins will be loaded. Also, log- # related arguments must be parsed first because we want to log everything @@ -151,6 +163,8 @@ _setup_main_parser_arguments(parser) parser.add_argument('extra_args', nargs=argparse.REMAINDER) res, _ = parser.parse_known_args(argv) + if bypass_setup: + return res # Setup the logger. if res.debug and res.quiet: @@ -162,13 +176,20 @@ colorama.init(strip=strip_colors) root_logger = logging.getLogger() + previous_level = root_logger.level root_logger.setLevel(logging.INFO) if res.debug or res.log_debug: root_logger.setLevel(logging.DEBUG) + if state is not None: + state.append(lambda: root_logger.setLevel(previous_level)) if res.debug_only: for n in res.debug_only: - logging.getLogger(n).setLevel(logging.DEBUG) + sub_logger = logging.getLogger(n) + previous_level = sub_logger.level + sub_logger.setLevel(logging.DEBUG) + if state is not None: + state.append(lambda: sub_logger.setLevel(previous_level)) log_handler = logging.StreamHandler(sys.stdout) if res.debug or res.debug_only: @@ -181,12 +202,16 @@ log_handler.setLevel(logging.INFO) log_handler.setFormatter(ColoredFormatter("%(message)s")) root_logger.addHandler(log_handler) + if state is not None: + state.append(lambda: root_logger.removeHandler(log_handler)) if res.log_file: file_handler = logging.FileHandler(res.log_file, mode='w') root_logger.addHandler(file_handler) if res.log_debug: file_handler.setLevel(logging.DEBUG) + if state is not None: + state.append(lambda: root_logger.removeHandler(file_handler)) # PID file. if res.pid_file: @@ -209,8 +234,9 @@ cmd_name = pre_args.extra_args[0] if cmd_name in _command_caches: cache_key_str = _command_caches[cmd_name] - if pre_args.config_variant is not None: - cache_key_str += ',variant=%s' % pre_args.config_variant + if pre_args.config_variants: + for value in pre_args.config_variants: + cache_key_str += ',variant=%s' % value if pre_args.config_values: for name, value in pre_args.config_values: cache_key_str += ',%s=%s' % (name, value) @@ -222,7 +248,6 @@ def _run_chef(pre_args, argv): # Setup the app. - start_time = time.perf_counter() root = None if pre_args.root: root = os.path.expanduser(pre_args.root) @@ -233,32 +258,34 @@ root = None # Can't apply custom configuration stuff if there's no website. - if (pre_args.config_variant or pre_args.config_values) and not root: + if (pre_args.config_variants or pre_args.config_values) and not root: raise SiteNotFoundError( - "Can't apply any configuration variant or value overrides, " - "there is no website here.") + "Can't apply any configuration variant or value overrides, " + "there is no website here.") if root: cache_key = None if not pre_args.no_cache: cache_key = _build_cache_key(pre_args) - app = PieCrust( - root, - theme_site=pre_args.theme, - cache=(not pre_args.no_cache), - cache_key=cache_key, - debug=pre_args.debug) - apply_variant_and_values( - app, pre_args.config_variant, pre_args.config_values) + appfactory = PieCrustFactory( + root, + theme_site=pre_args.theme, + cache=(not pre_args.no_cache), + cache_key=cache_key, + debug=pre_args.debug, + config_variants=pre_args.config_variants, + config_values=pre_args.config_values) + app = appfactory.create() else: + appfactory = None app = NullPieCrust( - theme_site=pre_args.theme) + theme_site=pre_args.theme) # Setup the arg parser. parser = argparse.ArgumentParser( - prog='chef', - description="The PieCrust chef manages your website.", - formatter_class=argparse.RawDescriptionHelpFormatter) + prog='chef', + description="The PieCrust chef manages your website.", + formatter_class=argparse.RawDescriptionHelpFormatter) _setup_main_parser_arguments(parser) commands = sorted(app.plugin_loader.getCommands(), @@ -280,7 +307,7 @@ # Parse the command line. result = parser.parse_args(argv) - logger.debug(format_timed(start_time, 'initialized PieCrust', + logger.debug(format_timed(_chef_start_time, 'initialized PieCrust', colored=False)) # Print the help if no command was specified. @@ -288,11 +315,12 @@ parser.print_help() return 0 + # Add some timing information. + app.env.stats.registerTimer('ChefStartup') + app.env.stats.stepTimerSince('ChefStartup', _chef_start_time) + # Run the command! - ctx = CommandContext(app, parser, result) - ctx.config_variant = pre_args.config_variant - ctx.config_values = pre_args.config_values - + ctx = CommandContext(appfactory, app, parser, result) exit_code = result.func(ctx) if exit_code is None: return 0 diff -r 7a1903ede496 -r 2e5c5d33d62c piecrust/page.py --- a/piecrust/page.py Tue Nov 21 11:00:06 2017 -0800 +++ b/piecrust/page.py Tue Nov 21 22:07:12 2017 -0800 @@ -1,16 +1,14 @@ import re -import sys import json -import os.path import hashlib import logging import datetime -import dateutil.parser import collections from werkzeug.utils import cached_property from piecrust.configuration import ( - Configuration, ConfigurationError, - parse_config_header) + Configuration, ConfigurationError, + parse_config_header, + MERGE_PREPEND_LISTS) logger = logging.getLogger(__name__) @@ -21,7 +19,7 @@ super(PageConfiguration, self).__init__(values, validate) def _validateAll(self, values): - values.setdefault('title', 'Untitled Page') + values.setdefault('title', '') values.setdefault('content_type', 'html') ppp = values.get('posts_per_page') if ppp is not None: @@ -36,32 +34,41 @@ FLAG_RAW_CACHE_VALID = 2**0 -class Page(object): - def __init__(self, source, source_metadata, rel_path): +class PageNotFoundError(Exception): + pass + + +class Page: + """ Represents a page that is text content with an optional YAML + front-matter, and that goes through the page pipeline. + """ + def __init__(self, source, content_item): self.source = source - self.source_metadata = source_metadata - self.rel_path = rel_path + self.content_item = content_item self._config = None self._segments = None self._flags = FLAG_NONE self._datetime = None - @property + @cached_property def app(self): return self.source.app + @cached_property + def route(self): + return self.source.route + @property - def ref_spec(self): - return '%s:%s' % (self.source.name, self.rel_path) + def source_metadata(self): + return self.content_item.metadata + + @property + def content_spec(self): + return self.content_item.spec @cached_property - def path(self): - path, _ = self.source.resolveRef(self.rel_path) - return path - - @cached_property - def path_mtime(self): - return os.path.getmtime(self.path) + def content_mtime(self): + return self.source.getItemMtime(self.content_item) @property def flags(self): @@ -81,48 +88,32 @@ def datetime(self): if self._datetime is None: try: - if 'datetime' in self.source_metadata: - # Get the date/time from the source. - self._datetime = self.source_metadata['datetime'] - elif 'date' in self.source_metadata: - # Get the date from the source. Potentially get the - # time from the page config. - page_date = self.source_metadata['date'] - page_time = _parse_config_time(self.config.get('time')) - if page_time is not None: - self._datetime = datetime.datetime( - page_date.year, - page_date.month, - page_date.day) + page_time - else: - self._datetime = datetime.datetime( - page_date.year, page_date.month, page_date.day) - elif 'date' in self.config: - # Get the date from the page config, and maybe the - # time too. - page_date = _parse_config_date(self.config.get('date')) - self._datetime = datetime.datetime( - page_date.year, - page_date.month, - page_date.day) - page_time = _parse_config_time(self.config.get('time')) - if page_time is not None: - self._datetime += page_time - else: - # No idea what the date/time for this page is. - self._datetime = datetime.datetime.fromtimestamp( - self.path_mtime) + self._datetime = _compute_datetime(self.source_metadata, + self.config) except Exception as ex: logger.exception(ex) raise Exception( - "Error computing time for page: %s" % - self.path) from ex + "Error computing time for page: %s" % + self.content_spec) from ex + + if self._datetime is None: + self._datetime = datetime.datetime.fromtimestamp( + self.content_mtime) + return self._datetime @datetime.setter def datetime(self, value): self._datetime = value + @property + def was_modified(self): + return (self._flags & FLAG_RAW_CACHE_VALID) == 0 + + def getUri(self, sub_num=1): + route_params = self.source_metadata['route_params'] + return self.route.getUri(route_params, sub_num=sub_num) + def getSegment(self, name='content'): return self.segments[name] @@ -130,32 +121,74 @@ if self._config is not None: return - config, content, was_cache_valid = load_page(self.app, self.path, - self.path_mtime) - if 'config' in self.source_metadata: - config.merge(self.source_metadata['config']) + config, content, was_cache_valid = load_page( + self.source, self.content_item) + + extra_config = self.source_metadata.get('config') + if extra_config is not None: + # Merge the source metadata configuration settings with the + # configuration settings from the page's contents. We only + # prepend to lists, i.e. we don't overwrite values because we + # want to keep what the user wrote in the file. + config.merge(extra_config, mode=MERGE_PREPEND_LISTS) self._config = config self._segments = content if was_cache_valid: self._flags |= FLAG_RAW_CACHE_VALID - self.source.finalizeConfig(self) + +def _compute_datetime(source_metadata, config): + # Get the date/time from the source. + dt = source_metadata.get('datetime') + if dt is not None: + return dt + + # Get the date from the source. Potentially get the + # time from the page config. + page_date = source_metadata.get('date') + if page_date is not None: + dt = datetime.datetime( + page_date.year, page_date.month, page_date.day) + + page_time = _parse_config_time(config.get('time')) + if page_time is not None: + dt += page_time + + return dt + + # Get the date from the page config, and maybe the + # time too. + page_date = _parse_config_date(config.get('date')) + if page_date is not None: + dt = datetime.datetime( + page_date.year, page_date.month, page_date.day) + + page_time = _parse_config_time(config.get('time')) + if page_time is not None: + dt += page_time + + return dt + + # No idea what the date/time for this page is. + return None + def _parse_config_date(page_date): if page_date is None: return None if isinstance(page_date, str): + import dateutil.parser try: parsed_d = dateutil.parser.parse(page_date) except Exception as ex: logger.exception(ex) raise ConfigurationError("Invalid date: %s" % page_date) from ex return datetime.date( - year=parsed_d.year, - month=parsed_d.month, - day=parsed_d.day) + year=parsed_d.year, + month=parsed_d.month, + day=parsed_d.day) raise ConfigurationError("Invalid date: %s" % page_date) @@ -168,15 +201,16 @@ return page_time if isinstance(page_time, str): + import dateutil.parser try: parsed_t = dateutil.parser.parse(page_time) except Exception as ex: logger.exception(ex) raise ConfigurationError("Invalid time: %s" % page_time) from ex return datetime.timedelta( - hours=parsed_t.hour, - minutes=parsed_t.minute, - seconds=parsed_t.second) + hours=parsed_t.hour, + minutes=parsed_t.minute, + seconds=parsed_t.second) if isinstance(page_time, int): # Total seconds... convert to a time struct. @@ -186,41 +220,27 @@ class PageLoadingError(Exception): - def __init__(self, path, inner=None): - super(PageLoadingError, self).__init__( - "Error loading page: %s" % path, - inner) + def __init__(self, spec): + super().__init__("Error loading page: %s" % spec) class ContentSegment(object): debug_render_func = 'debug_render' - def __init__(self): - self.parts = [] - - def debug_render(self): - return '\n'.join([p.content for p in self.parts]) - - -class ContentSegmentPart(object): def __init__(self, content, fmt=None, offset=-1, line=-1): self.content = content self.fmt = fmt self.offset = offset self.line = line - def __str__(self): - return '%s [%s]' % (self.content, self.fmt or '') + def debug_render(self): + return '[%s] %s' % (self.fmt or '', self.content) def json_load_segments(data): segments = {} - for key, seg_data in data.items(): - seg = ContentSegment() - for p_data in seg_data: - part = ContentSegmentPart(p_data['c'], p_data['f'], p_data['o'], - p_data['l']) - seg.parts.append(part) + for key, sd in data.items(): + seg = ContentSegment(sd['c'], sd['f'], sd['o'], sd['l']) segments[key] = seg return segments @@ -228,87 +248,89 @@ def json_save_segments(segments): data = {} for key, seg in segments.items(): - seg_data = [] - for part in seg.parts: - p_data = {'c': part.content, 'f': part.fmt, 'o': part.offset, - 'l': part.line} - seg_data.append(p_data) + seg_data = { + 'c': seg.content, 'f': seg.fmt, 'o': seg.offset, 'l': seg.line} data[key] = seg_data return data -def load_page(app, path, path_mtime=None): +def load_page(source, content_item): try: - with app.env.timerScope('PageLoad'): - return _do_load_page(app, path, path_mtime) + with source.app.env.stats.timerScope('PageLoad'): + return _do_load_page(source, content_item) except Exception as e: - logger.exception( - "Error loading page: %s" % - os.path.relpath(path, app.root_dir)) - _, __, traceback = sys.exc_info() - raise PageLoadingError(path, e).with_traceback(traceback) + logger.exception("Error loading page: %s" % content_item.spec) + raise PageLoadingError(content_item.spec) from e -def _do_load_page(app, path, path_mtime): +def _do_load_page(source, content_item): # Check the cache first. + app = source.app cache = app.cache.getCache('pages') - cache_path = hashlib.md5(path.encode('utf8')).hexdigest() + '.json' - page_time = path_mtime or os.path.getmtime(path) + cache_token = "%s@%s" % (source.name, content_item.spec) + cache_path = hashlib.md5(cache_token.encode('utf8')).hexdigest() + '.json' + page_time = source.getItemMtime(content_item) if cache.isValid(cache_path, page_time): cache_data = json.loads( - cache.read(cache_path), - object_pairs_hook=collections.OrderedDict) + cache.read(cache_path), + object_pairs_hook=collections.OrderedDict) config = PageConfiguration( - values=cache_data['config'], - validate=False) + values=cache_data['config'], + validate=False) content = json_load_segments(cache_data['content']) return config, content, True # Nope, load the page from the source file. - logger.debug("Loading page configuration from: %s" % path) - with open(path, 'r', encoding='utf-8') as fp: + logger.debug("Loading page configuration from: %s" % content_item.spec) + with source.openItem(content_item, 'r', encoding='utf-8') as fp: raw = fp.read() header, offset = parse_config_header(raw) - if 'format' not in header: - auto_formats = app.config.get('site/auto_formats') - name, ext = os.path.splitext(path) - header['format'] = auto_formats.get(ext, None) - config = PageConfiguration(header) content = parse_segments(raw, offset) config.set('segments', list(content.keys())) # Save to the cache. cache_data = { - 'config': config.getAll(), - 'content': json_save_segments(content)} + 'config': config.getAll(), + 'content': json_save_segments(content)} cache.write(cache_path, json.dumps(cache_data)) + app.env.stats.stepCounter('PageLoads') + return config, content, False segment_pattern = re.compile( - r"""^\-\-\-\s*(?P\w+)(\:(?P\w+))?\s*\-\-\-\s*$""", - re.M) -part_pattern = re.compile( - r"""^<\-\-\s*(?P\w+)\s*\-\->\s*$""", - re.M) + r"""^\-\-\-\s*(?P\w+)(\:(?P\w+))?\s*\-\-\-\s*$""", + re.M) -def _count_lines(s): - return len(s.split('\n')) +def _count_lines(txt, start=0, end=-1): + cur = start + line_count = 1 + while True: + nex = txt.find('\n', cur) + if nex < 0 or (end >= 0 and nex >= end): + break + + cur = nex + 1 + line_count += 1 + + if end >= 0 and cur >= end: + break + + return line_count def _string_needs_parsing(txt, offset): txtlen = len(txt) index = txt.find('-', offset) while index >= 0 and index < txtlen - 8: - # Look for a potential `<--format-->` - if index > 0 and txt[index - 1] == '<' and txt[index + 1] == '-': - return True # Look for a potential `---segment---` - if txt[index + 1] == '-' and txt[index + 2] == '-': + if (index > 0 and + txt[index - 1] == '\n' and + txt[index + 1] == '-' and txt[index + 2] == '-'): return True index = txt.find('-', index + 1) return False @@ -316,18 +338,16 @@ def parse_segments(raw, offset=0): # Get the number of lines in the header. - header_lines = _count_lines(raw[:offset].rstrip()) + header_lines = _count_lines(raw, 0, offset) current_line = header_lines # Figure out if we need any parsing. do_parse = _string_needs_parsing(raw, offset) if not do_parse: - seg = ContentSegment() - seg.parts = [ - ContentSegmentPart(raw[offset:], None, offset, current_line)] + seg = ContentSegment(raw[offset:], None, offset, current_line) return {'content': seg} - # Start parsing segments and parts. + # Start parsing segments. matches = list(segment_pattern.finditer(raw, offset)) num_matches = len(matches) if num_matches > 0: @@ -336,70 +356,41 @@ first_offset = matches[0].start() if first_offset > 0: # There's some default content segment at the beginning. - seg = ContentSegment() - seg.parts, current_line = parse_segment_parts( - raw, offset, first_offset, current_line) + seg = ContentSegment( + raw[offset:first_offset], None, offset, current_line) + current_line += _count_lines(seg.content) contents['content'] = seg for i in range(1, num_matches): m1 = matches[i - 1] m2 = matches[i] - seg = ContentSegment() - seg.parts, current_line = parse_segment_parts( - raw, m1.end() + 1, m2.start(), current_line, - m1.group('fmt')) + + cur_seg_start = m1.end() + 1 + cur_seg_end = m2.start() + + seg = ContentSegment( + raw[cur_seg_start:cur_seg_end], + m1.group('fmt'), + cur_seg_start, + current_line) + current_line += _count_lines(seg.content) contents[m1.group('name')] = seg # Handle text past the last match. lastm = matches[-1] - seg = ContentSegment() - seg.parts, current_line = parse_segment_parts( - raw, lastm.end() + 1, len(raw), current_line, - lastm.group('fmt')) + + last_seg_start = lastm.end() + 1 + + seg = ContentSegment( + raw[last_seg_start:], + lastm.group('fmt'), + last_seg_start, + current_line) contents[lastm.group('name')] = seg + # No need to count lines for the last one. return contents else: # No segments, just content. - seg = ContentSegment() - seg.parts, current_line = parse_segment_parts( - raw, offset, len(raw), current_line) + seg = ContentSegment(raw[offset:], None, offset, current_line) return {'content': seg} - - -def parse_segment_parts(raw, start, end, line_offset, first_part_fmt=None): - matches = list(part_pattern.finditer(raw, start, end)) - num_matches = len(matches) - if num_matches > 0: - parts = [] - - # First part, before the first format change. - part_text = raw[start:matches[0].start()] - parts.append( - ContentSegmentPart(part_text, first_part_fmt, start, - line_offset)) - line_offset += _count_lines(part_text) - - for i in range(1, num_matches): - m1 = matches[i - 1] - m2 = matches[i] - part_text = raw[m1.end() + 1:m2.start()] - parts.append( - ContentSegmentPart( - part_text, m1.group('fmt'), m1.end() + 1, - line_offset)) - line_offset += _count_lines(part_text) - - lastm = matches[-1] - part_text = raw[lastm.end() + 1:end] - parts.append(ContentSegmentPart( - part_text, lastm.group('fmt'), lastm.end() + 1, - line_offset)) - - return parts, line_offset - else: - part_text = raw[start:end] - parts = [ContentSegmentPart(part_text, first_part_fmt, start, - line_offset)] - return parts, line_offset - diff -r 7a1903ede496 -r 2e5c5d33d62c piecrust/pipelines/__init__.py diff -r 7a1903ede496 -r 2e5c5d33d62c piecrust/pipelines/_pagebaker.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/piecrust/pipelines/_pagebaker.py Tue Nov 21 22:07:12 2017 -0800 @@ -0,0 +1,266 @@ +import os.path +import copy +import queue +import shutil +import logging +import threading +import urllib.parse +from piecrust.pipelines._pagerecords import ( + SubPageFlags, create_subpage_job_result) +from piecrust.rendering import RenderingContext, render_page +from piecrust.sources.base import AbortedSourceUseError +from piecrust.uriutil import split_uri + + +logger = logging.getLogger(__name__) + + +def get_output_path(app, out_dir, uri, pretty_urls): + uri_root, uri_path = split_uri(app, uri) + + bake_path = [out_dir] + decoded_uri = urllib.parse.unquote(uri_path) + if pretty_urls: + bake_path.append(decoded_uri) + bake_path.append('index.html') + elif decoded_uri == '': + bake_path.append('index.html') + else: + bake_path.append(decoded_uri) + + return os.path.normpath(os.path.join(*bake_path)) + + +class BakingError(Exception): + pass + + +class PageBaker(object): + def __init__(self, app, out_dir, force=False): + self.app = app + self.out_dir = out_dir + self.force = force + self.site_root = app.config.get('site/root') + self.pretty_urls = app.config.get('site/pretty_urls') + self._do_write = self._writeDirect + self._writer_queue = None + self._writer = None + self._stats = app.env.stats + self._rsr = app.env.rendered_segments_repository + + def startWriterQueue(self): + self._writer_queue = queue.Queue() + self._writer = threading.Thread( + name='PageSerializer', + daemon=True, + target=_text_writer, + args=(self._writer_queue,)) + self._writer.start() + self._do_write = self._sendToWriterQueue + + def stopWriterQueue(self): + self._writer_queue.put_nowait(None) + self._writer.join() + + def _sendToWriterQueue(self, out_path, content): + self._writer_queue.put_nowait((out_path, content)) + + def _writeDirect(self, out_path, content): + with open(out_path, 'w', encoding='utf8') as fp: + fp.write(content) + + def bake(self, page, prev_entry, force=False): + cur_sub = 1 + has_more_subs = True + app = self.app + out_dir = self.out_dir + force_bake = self.force or force + pretty_urls = page.config.get('pretty_urls', self.pretty_urls) + + rendered_subs = [] + + # Start baking the sub-pages. + while has_more_subs: + sub_uri = page.getUri(sub_num=cur_sub) + logger.debug("Baking '%s' [%d]..." % (sub_uri, cur_sub)) + + out_path = get_output_path(app, out_dir, sub_uri, pretty_urls) + + # Create the sub-entry for the bake record. + cur_sub_entry = create_subpage_job_result(sub_uri, out_path) + rendered_subs.append(cur_sub_entry) + + # Find a corresponding sub-entry in the previous bake record. + prev_sub_entry = None + if prev_entry is not None: + try: + prev_sub_entry = prev_entry.getSub(cur_sub) + except IndexError: + pass + + # Figure out if we need to bake this page. + bake_status = _get_bake_status(page, out_path, force_bake, + prev_sub_entry, cur_sub_entry) + + # If this page didn't bake because it's already up-to-date. + # Keep trying for as many subs as we know this page has. + if bake_status == STATUS_CLEAN: + cur_sub_entry['render_info'] = copy.deepcopy( + prev_sub_entry['render_info']) + cur_sub_entry['flags'] = SubPageFlags.FLAG_NONE + + if prev_entry.num_subs >= cur_sub + 1: + cur_sub += 1 + has_more_subs = True + logger.debug(" %s is up to date, skipping to next " + "sub-page." % out_path) + continue + + logger.debug(" %s is up to date, skipping bake." % out_path) + break + + # All good, proceed. + try: + if bake_status == STATUS_INVALIDATE_AND_BAKE: + cache_key = sub_uri + self._rsr.invalidate(cache_key) + cur_sub_entry['flags'] |= \ + SubPageFlags.FLAG_RENDER_CACHE_INVALIDATED + + logger.debug(" p%d -> %s" % (cur_sub, out_path)) + rp = self._bakeSingle(page, cur_sub, out_path) + except AbortedSourceUseError: + raise + except Exception as ex: + logger.exception(ex) + raise BakingError("%s: error baking '%s'." % + (page.content_spec, sub_uri)) from ex + + # Record what we did. + cur_sub_entry['flags'] |= SubPageFlags.FLAG_BAKED + cur_sub_entry['render_info'] = copy.deepcopy(rp.render_info) + + # Copy page assets. + if (cur_sub == 1 and + cur_sub_entry['render_info']['used_assets']): + if pretty_urls: + out_assets_dir = os.path.dirname(out_path) + else: + out_assets_dir, out_name = os.path.split(out_path) + if sub_uri != self.site_root: + out_name_noext, _ = os.path.splitext(out_name) + out_assets_dir = os.path.join(out_assets_dir, + out_name_noext) + + logger.debug("Copying page assets to: %s" % out_assets_dir) + _ensure_dir_exists(out_assets_dir) + assetor = rp.data.get('assets') + if assetor is not None: + for i in assetor._getAssetItems(): + fn = os.path.basename(i.spec) + out_asset_path = os.path.join(out_assets_dir, fn) + logger.debug(" %s -> %s" % (i.spec, out_asset_path)) + shutil.copy(i.spec, out_asset_path) + + # Figure out if we have more work. + has_more_subs = False + if cur_sub_entry['render_info']['pagination_has_more']: + cur_sub += 1 + has_more_subs = True + + return rendered_subs + + def _bakeSingle(self, page, sub_num, out_path): + ctx = RenderingContext(page, sub_num=sub_num) + page.source.prepareRenderContext(ctx) + + with self._stats.timerScope("PageRender"): + rp = render_page(ctx) + + with self._stats.timerScope("PageSerialize"): + self._do_write(out_path, rp.content) + + return rp + + +def _text_writer(q): + while True: + item = q.get() + if item is not None: + out_path, txt = item + out_dir = os.path.dirname(out_path) + _ensure_dir_exists(out_dir) + + with open(out_path, 'w', encoding='utf8') as fp: + fp.write(txt) + + q.task_done() + else: + # Sentinel object, terminate the thread. + q.task_done() + break + + +STATUS_CLEAN = 0 +STATUS_BAKE = 1 +STATUS_INVALIDATE_AND_BAKE = 2 + + +def _get_bake_status(page, out_path, force, prev_sub_entry, cur_sub_entry): + # Figure out if we need to invalidate or force anything. + status = _compute_force_flags(prev_sub_entry, cur_sub_entry) + if status != STATUS_CLEAN: + return status + + # Easy test. + if force: + cur_sub_entry['flags'] |= \ + SubPageFlags.FLAG_FORCED_BY_GENERAL_FORCE + # We need to invalidate any cache we have on this page because + # it's being forced, so something important has changed somehow. + return STATUS_INVALIDATE_AND_BAKE + + # Check for up-to-date outputs. + in_path_time = page.content_mtime + try: + out_path_time = os.path.getmtime(out_path) + except OSError: + # File doesn't exist, we'll need to bake. + cur_sub_entry['flags'] |= \ + SubPageFlags.FLAG_FORCED_BY_NO_PREVIOUS + return STATUS_BAKE + + if out_path_time <= in_path_time: + return STATUS_BAKE + + # Nope, all good. + return STATUS_CLEAN + + +def _compute_force_flags(prev_sub_entry, cur_sub_entry): + if prev_sub_entry and len(prev_sub_entry['errors']) > 0: + # Previous bake failed. We'll have to bake it again. + cur_sub_entry['flags'] |= \ + SubPageFlags.FLAG_FORCED_BY_PREVIOUS_ERRORS + return STATUS_BAKE + + if not prev_sub_entry: + # No previous record, so most probably was never baked. Bake it. + cur_sub_entry['flags'] |= \ + SubPageFlags.FLAG_FORCED_BY_NO_PREVIOUS + return STATUS_BAKE + + return STATUS_CLEAN + + +def _ensure_dir_exists(path): + try: + os.makedirs(path, mode=0o755, exist_ok=True) + except OSError: + # In a multiprocess environment, several process may very + # occasionally try to create the same directory at the same time. + # Let's ignore any error and if something's really wrong (like file + # acces permissions or whatever), then it will more legitimately fail + # just after this when we try to write files. + pass + diff -r 7a1903ede496 -r 2e5c5d33d62c piecrust/pipelines/_pagerecords.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/piecrust/pipelines/_pagerecords.py Tue Nov 21 22:07:12 2017 -0800 @@ -0,0 +1,161 @@ +import copy +from piecrust.pipelines.records import RecordEntry, get_flag_descriptions + + +class SubPageFlags: + FLAG_NONE = 0 + FLAG_BAKED = 2**0 + FLAG_FORCED_BY_SOURCE = 2**1 + FLAG_FORCED_BY_NO_PREVIOUS = 2**2 + FLAG_FORCED_BY_PREVIOUS_ERRORS = 2**3 + FLAG_FORCED_BY_GENERAL_FORCE = 2**4 + FLAG_RENDER_CACHE_INVALIDATED = 2**5 + + +def create_subpage_job_result(out_uri, out_path): + return { + 'out_uri': out_uri, + 'out_path': out_path, + 'flags': SubPageFlags.FLAG_NONE, + 'errors': [], + 'render_info': None + } + + +def was_subpage_clean(sub): + return ((sub['flags'] & SubPageFlags.FLAG_BAKED) == 0 and + len(sub['errors']) == 0) + + +def was_subpage_baked(sub): + return (sub['flags'] & SubPageFlags.FLAG_BAKED) != 0 + + +def was_subpage_baked_successfully(sub): + return was_subpage_baked(sub) and len(sub['errors']) == 0 + + +class PagePipelineRecordEntry(RecordEntry): + FLAG_NONE = 0 + FLAG_NEW = 2**0 + FLAG_SOURCE_MODIFIED = 2**1 + FLAG_OVERRIDEN = 2**2 + FLAG_COLLAPSED_FROM_LAST_RUN = 2**3 + FLAG_IS_DRAFT = 2**4 + FLAG_ABORTED_FOR_SOURCE_USE = 2**5 + + def __init__(self): + super().__init__() + self.flags = self.FLAG_NONE + self.config = None + self.route_params = None + self.timestamp = None + self.subs = [] + + @property + def was_touched(self): + return (self.flags & self.FLAG_SOURCE_MODIFIED) != 0 + + @property + def was_overriden(self): + return (self.flags & self.FLAG_OVERRIDEN) != 0 + + @property + def num_subs(self): + return len(self.subs) + + @property + def was_any_sub_baked(self): + for o in self.subs: + if was_subpage_baked(o): + return True + return False + + @property + def has_any_error(self): + if len(self.errors) > 0: + return True + for o in self.subs: + if len(o['errors']) > 0: + return True + return False + + def getSub(self, page_num): + return self.subs[page_num - 1] + + def getAllErrors(self): + yield from self.errors + for o in self.subs: + yield from o['errors'] + + def getAllUsedSourceNames(self): + res_segments = set() + res_layout = set() + for o in self.subs: + pinfo = o.get('render_info') + if pinfo: + usn = pinfo['used_source_names'] + res_segments |= set(usn['segments']) + res_layout |= set(usn['layout']) + return res_segments, res_layout + + def getAllOutputPaths(self): + for o in self.subs: + yield o['out_path'] + + def describe(self): + d = super().describe() + d['Flags'] = get_flag_descriptions(self.flags, flag_descriptions) + for i, sub in enumerate(self.subs): + d['Sub%02d' % i] = { + 'URI': sub['out_uri'], + 'Path': sub['out_path'], + 'Flags': get_flag_descriptions( + sub['flags'], sub_flag_descriptions), + 'RenderInfo': _describe_render_info(sub['render_info']) + } + return d + + +def add_page_job_result(result): + result.update({ + 'flags': PagePipelineRecordEntry.FLAG_NONE, + 'errors': [], + 'subs': [] + }) + + +def merge_job_result_into_record_entry(record_entry, result): + record_entry.flags |= result['flags'] + record_entry.errors += result['errors'] + record_entry.subs += result['subs'] + + +flag_descriptions = { + PagePipelineRecordEntry.FLAG_NEW: 'new', + PagePipelineRecordEntry.FLAG_SOURCE_MODIFIED: 'touched', + PagePipelineRecordEntry.FLAG_OVERRIDEN: 'overriden', + PagePipelineRecordEntry.FLAG_COLLAPSED_FROM_LAST_RUN: 'from last run', + PagePipelineRecordEntry.FLAG_IS_DRAFT: 'draft', + PagePipelineRecordEntry.FLAG_ABORTED_FOR_SOURCE_USE: 'aborted for source use'} + + +sub_flag_descriptions = { + SubPageFlags.FLAG_BAKED: 'baked', + SubPageFlags.FLAG_FORCED_BY_SOURCE: 'forced by source', + SubPageFlags.FLAG_FORCED_BY_NO_PREVIOUS: 'forced b/c new', + SubPageFlags.FLAG_FORCED_BY_PREVIOUS_ERRORS: 'forced by errors', + SubPageFlags.FLAG_FORCED_BY_GENERAL_FORCE: 'manually forced', + SubPageFlags.FLAG_RENDER_CACHE_INVALIDATED: 'cache invalidated' +} + + +def _describe_render_info(ri): + if ri is None: + return '' + return { + 'UsedPagination': ri['used_pagination'], + 'PaginationHasMore': ri['pagination_has_more'], + 'UsedAssets': ri['used_assets'], + 'UsedSourceNames': ri['used_source_names'] + } diff -r 7a1903ede496 -r 2e5c5d33d62c piecrust/pipelines/_procrecords.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/piecrust/pipelines/_procrecords.py Tue Nov 21 22:07:12 2017 -0800 @@ -0,0 +1,76 @@ +from piecrust.pipelines.records import ( + RecordEntry, get_flag_descriptions) + + +class AssetPipelineRecordEntry(RecordEntry): + FLAG_NONE = 0 + FLAG_PREPARED = 2**0 + FLAG_PROCESSED = 2**1 + FLAG_BYPASSED_STRUCTURED_PROCESSING = 2**3 + FLAG_COLLAPSED_FROM_LAST_RUN = 2**4 + + def __init__(self): + super().__init__() + self.flags = self.FLAG_NONE + self.proc_tree = None + self.out_paths = [] + + @property + def was_prepared(self): + return bool(self.flags & self.FLAG_PREPARED) + + @property + def was_processed(self): + return (self.was_prepared and + (bool(self.flags & self.FLAG_PROCESSED) or + len(self.errors) > 0)) + + @property + def was_processed_successfully(self): + return self.was_processed and not self.errors + + @property + def was_collapsed_from_last_run(self): + return self.flags & self.FLAG_COLLAPSED_FROM_LAST_RUN + + def describe(self): + d = super().describe() + d['Flags'] = get_flag_descriptions(self.flags, flag_descriptions) + d['Processing Tree'] = _format_proc_tree(self.proc_tree, 20 * ' ') + return d + + def getAllOutputPaths(self): + return self.out_paths + + +def add_asset_job_result(result): + result.update({ + 'item_spec': None, + 'flags': AssetPipelineRecordEntry.FLAG_NONE, + 'proc_tree': None, + 'out_paths': [], + }) + + +def merge_job_result_into_record_entry(record_entry, result): + record_entry.item_spec = result['item_spec'] + record_entry.flags |= result['flags'] + record_entry.proc_tree = result['proc_tree'] + record_entry.out_paths = result['out_paths'] + + +flag_descriptions = { + AssetPipelineRecordEntry.FLAG_PREPARED: 'prepared', + AssetPipelineRecordEntry.FLAG_PROCESSED: 'processed', + AssetPipelineRecordEntry.FLAG_BYPASSED_STRUCTURED_PROCESSING: 'external', + AssetPipelineRecordEntry.FLAG_COLLAPSED_FROM_LAST_RUN: 'from last run'} + + +def _format_proc_tree(tree, margin='', level=0): + name, children = tree + res = '%s%s+ %s\n' % (margin if level > 0 else '', level * ' ', name) + if children: + for c in children: + res += _format_proc_tree(c, margin, level + 1) + return res + diff -r 7a1903ede496 -r 2e5c5d33d62c piecrust/pipelines/_proctree.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/piecrust/pipelines/_proctree.py Tue Nov 21 22:07:12 2017 -0800 @@ -0,0 +1,297 @@ +import os +import time +import os.path +import logging +from piecrust.chefutil import format_timed +from piecrust.processing.base import FORCE_BUILD + + +logger = logging.getLogger(__name__) + + +STATE_UNKNOWN = 0 +STATE_DIRTY = 1 +STATE_CLEAN = 2 + + +class ProcessingTreeError(Exception): + pass + + +class ProcessorNotFoundError(ProcessingTreeError): + pass + + +class ProcessorError(ProcessingTreeError): + def __init__(self, proc_name, in_path, *args): + super(ProcessorError, self).__init__(*args) + self.proc_name = proc_name + self.in_path = in_path + + def __str__(self): + return "Processor %s failed on: %s" % (self.proc_name, self.in_path) + + +class ProcessingTreeNode(object): + def __init__(self, path, available_procs, level=0): + self.path = path + self.available_procs = available_procs + self.outputs = [] + self.level = level + self.state = STATE_UNKNOWN + self._processor = None + + def getProcessor(self): + if self._processor is None: + for p in self.available_procs: + if p.matches(self.path): + self._processor = p + self.available_procs.remove(p) + break + else: + raise ProcessorNotFoundError() + return self._processor + + def setState(self, state, recursive=True): + self.state = state + if recursive: + for o in self.outputs: + o.setState(state, True) + + @property + def is_leaf(self): + return len(self.outputs) == 0 + + def getLeaves(self): + if self.is_leaf: + return [self] + leaves = [] + for o in self.outputs: + for l in o.getLeaves(): + leaves.append(l) + return leaves + + +class ProcessingTreeBuilder(object): + def __init__(self, processors): + self.processors = processors + + def build(self, path): + tree_root = ProcessingTreeNode(path, list(self.processors)) + + loop_guard = 100 + walk_stack = [tree_root] + while len(walk_stack) > 0: + loop_guard -= 1 + if loop_guard <= 0: + raise ProcessingTreeError("Infinite loop detected!") + + cur_node = walk_stack.pop() + proc = cur_node.getProcessor() + + # If the root tree node (and only that one) wants to bypass this + # whole tree business, so be it. + if proc.is_bypassing_structured_processing: + if cur_node != tree_root: + raise ProcessingTreeError("Only root processors can " + "bypass structured processing.") + break + + # Get the destination directory and output files. + rel_dir, basename = os.path.split(cur_node.path) + out_names = proc.getOutputFilenames(basename) + if out_names is None: + continue + + for n in out_names: + out_node = ProcessingTreeNode( + os.path.join(rel_dir, n), + list(cur_node.available_procs), + cur_node.level + 1) + cur_node.outputs.append(out_node) + + if proc.PROCESSOR_NAME != 'copy': + walk_stack.append(out_node) + + return tree_root + + +class ProcessingTreeRunner(object): + def __init__(self, base_dir, tmp_dir, out_dir): + self.base_dir = base_dir + self.tmp_dir = tmp_dir + self.out_dir = out_dir + + def processSubTree(self, tree_root): + did_process = False + walk_stack = [tree_root] + while len(walk_stack) > 0: + cur_node = walk_stack.pop() + + self._computeNodeState(cur_node) + if cur_node.state == STATE_DIRTY: + did_process_this_node = self.processNode(cur_node) + did_process |= did_process_this_node + + if did_process_this_node: + for o in cur_node.outputs: + if not o.is_leaf: + walk_stack.append(o) + else: + for o in cur_node.outputs: + if not o.is_leaf: + walk_stack.append(o) + return did_process + + def processNode(self, node): + full_path = self._getNodePath(node) + proc = node.getProcessor() + if proc.is_bypassing_structured_processing: + try: + start_time = time.perf_counter() + with proc.app.env.stats.timerScope(proc.__class__.__name__): + proc.process(full_path, self.out_dir) + print_node( + node, + format_timed( + start_time, "(bypassing structured processing)", + colored=False)) + return True + except Exception as e: + raise ProcessorError(proc.PROCESSOR_NAME, full_path) from e + + # All outputs of a node must go to the same directory, so we can get + # the output directory off of the first output. + base_out_dir = self._getNodeBaseDir(node.outputs[0]) + rel_out_dir = os.path.dirname(node.path) + out_dir = os.path.join(base_out_dir, rel_out_dir) + if not os.path.isdir(out_dir): + try: + os.makedirs(out_dir, 0o755, exist_ok=True) + except OSError: + pass + + try: + start_time = time.perf_counter() + with proc.app.env.stats.timerScope(proc.__class__.__name__): + proc_res = proc.process(full_path, out_dir) + if proc_res is None: + raise Exception("Processor '%s' didn't return a boolean " + "result value." % proc) + if proc_res: + print_node(node, "-> %s" % out_dir) + return True + else: + print_node(node, "-> %s [clean]" % out_dir) + return False + except Exception as e: + raise ProcessorError(proc.PROCESSOR_NAME, full_path) from e + + def _computeNodeState(self, node): + if node.state != STATE_UNKNOWN: + return + + proc = node.getProcessor() + if (proc.is_bypassing_structured_processing or + not proc.is_delegating_dependency_check): + # This processor wants to handle things on its own... + node.setState(STATE_DIRTY, False) + return + + start_time = time.perf_counter() + + # Get paths and modification times for the input path and + # all dependencies (if any). + base_dir = self._getNodeBaseDir(node) + full_path = os.path.join(base_dir, node.path) + in_mtime = (full_path, os.path.getmtime(full_path)) + force_build = False + try: + deps = proc.getDependencies(full_path) + if deps == FORCE_BUILD: + force_build = True + elif deps is not None: + for dep in deps: + dep_mtime = os.path.getmtime(dep) + if dep_mtime > in_mtime[1]: + in_mtime = (dep, dep_mtime) + except Exception as e: + logger.warning("%s -- Will force-bake: %s" % (e, node.path)) + node.setState(STATE_DIRTY, True) + return + + if force_build: + # Just do what the processor told us to do. + node.setState(STATE_DIRTY, True) + message = "Processor requested a forced build." + print_node(node, message) + else: + # Get paths and modification times for the outputs. + message = None + for o in node.outputs: + full_out_path = self._getNodePath(o) + if not os.path.isfile(full_out_path): + message = "Output '%s' doesn't exist." % o.path + break + o_mtime = os.path.getmtime(full_out_path) + if o_mtime < in_mtime[1]: + message = "Input '%s' is newer than output '%s'." % ( + in_mtime[0], o.path) + break + if message is not None: + node.setState(STATE_DIRTY, True) + message += " Re-processing sub-tree." + print_node(node, message) + else: + node.setState(STATE_CLEAN, False) + + if node.state == STATE_DIRTY: + state = "dirty" + elif node.state == STATE_CLEAN: + state = "clean" + else: + state = "unknown" + logger.debug(format_timed(start_time, + "Computed node dirtyness: %s" % state, + indent_level=node.level, colored=False)) + + def _getNodeBaseDir(self, node): + if node.level == 0: + return self.base_dir + if node.is_leaf: + return self.out_dir + return os.path.join(self.tmp_dir, str(node.level)) + + def _getNodePath(self, node): + base_dir = self._getNodeBaseDir(node) + return os.path.join(base_dir, node.path) + + +def print_node(node, message=None, recursive=False): + indent = ' ' * node.level + try: + proc_name = node.getProcessor().PROCESSOR_NAME + except ProcessorNotFoundError: + proc_name = 'n/a' + + message = message or '' + logger.debug('%s%s [%s] %s' % (indent, node.path, proc_name, message)) + + if recursive: + for o in node.outputs: + print_node(o, None, True) + + +def get_node_name_tree(node): + try: + proc_name = node.getProcessor().PROCESSOR_NAME + except ProcessorNotFoundError: + proc_name = 'n/a' + + children = [] + for o in node.outputs: + if not o.outputs: + continue + children.append(get_node_name_tree(o)) + return (proc_name, children) + diff -r 7a1903ede496 -r 2e5c5d33d62c piecrust/pipelines/asset.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/piecrust/pipelines/asset.py Tue Nov 21 22:07:12 2017 -0800 @@ -0,0 +1,200 @@ +import os +import os.path +import re +import logging +from piecrust.pipelines._procrecords import ( + AssetPipelineRecordEntry, + add_asset_job_result, merge_job_result_into_record_entry) +from piecrust.pipelines._proctree import ( + ProcessingTreeBuilder, ProcessingTreeRunner, + get_node_name_tree, print_node, + STATE_DIRTY) +from piecrust.pipelines.base import ContentPipeline +from piecrust.processing.base import ProcessorContext +from piecrust.sources.fs import FSContentSourceBase + + +logger = logging.getLogger(__name__) + + +class AssetPipeline(ContentPipeline): + PIPELINE_NAME = 'asset' + RECORD_ENTRY_CLASS = AssetPipelineRecordEntry + + def __init__(self, source, ppctx): + if not isinstance(source, FSContentSourceBase): + raise Exception( + "The asset pipeline only support file-system sources.") + + super().__init__(source, ppctx) + self._ignore_patterns = [] + self._processors = None + self._base_dir = source.fs_endpoint_path + + def initialize(self): + # Get the list of processors for this run. + processors = self.app.plugin_loader.getProcessors() + enabled_processors = self.app.config.get('pipelines/asset/processors') + if enabled_processors is not None: + logger.debug("Filtering processors to: %s" % enabled_processors) + processors = get_filtered_processors(processors, + enabled_processors) + + # Invoke pre-processors. + proc_ctx = ProcessorContext(self) + for proc in processors: + proc.onPipelineStart(proc_ctx) + + # Add any extra processors registered in the `onPipelineStart` step. + processors += proc_ctx.extra_processors + + # Sort our processors by priority. + processors.sort(key=lambda p: p.priority) + + # Ok, that's the list of processors for this run. + self._processors = processors + + # Pre-processors can define additional ignore patterns so let's + # add them to what we had already. + ignores = self.app.config.get('pipelines/asset/ignore', []) + ignores += proc_ctx.ignore_patterns + self._ignore_patterns += make_re(ignores) + + # Register timers. + stats = self.app.env.stats + stats.registerTimer('BuildProcessingTree', raise_if_registered=False) + stats.registerTimer('RunProcessingTree', raise_if_registered=False) + + def run(self, job, ctx, result): + # Create the result stuff. + item_spec = job['job_spec'][1] + add_asset_job_result(result) + result['item_spec'] = item_spec + + # See if we need to ignore this item. + rel_path = os.path.relpath(item_spec, self._base_dir) + if re_matchany(rel_path, self._ignore_patterns): + return + + # Build the processing tree for this job. + stats = self.app.env.stats + with stats.timerScope('BuildProcessingTree'): + builder = ProcessingTreeBuilder(self._processors) + tree_root = builder.build(rel_path) + result['flags'] |= AssetPipelineRecordEntry.FLAG_PREPARED + + # Prepare and run the tree. + out_dir = self.ctx.out_dir + print_node(tree_root, recursive=True) + leaves = tree_root.getLeaves() + result['out_paths'] = [os.path.join(out_dir, l.path) + for l in leaves] + result['proc_tree'] = get_node_name_tree(tree_root) + if tree_root.getProcessor().is_bypassing_structured_processing: + result['flags'] |= ( + AssetPipelineRecordEntry.FLAG_BYPASSED_STRUCTURED_PROCESSING) + + if self.ctx.force: + tree_root.setState(STATE_DIRTY, True) + + with stats.timerScope('RunProcessingTree'): + runner = ProcessingTreeRunner( + self._base_dir, self.tmp_dir, out_dir) + if runner.processSubTree(tree_root): + result['flags'] |= ( + AssetPipelineRecordEntry.FLAG_PROCESSED) + + def handleJobResult(self, result, ctx): + entry = self.createRecordEntry(result['item_spec']) + merge_job_result_into_record_entry(entry, result) + ctx.record.addEntry(entry) + + def getDeletions(self, ctx): + for prev, cur in ctx.record_history.diffs: + if prev and not cur: + for p in prev.out_paths: + yield (p, 'previous asset was removed') + elif prev and cur and cur.was_processed_successfully: + diff = set(prev.out_paths) - set(cur.out_paths) + for p in diff: + yield (p, 'asset changed outputs') + + def collapseRecords(self, ctx): + for prev, cur in ctx.record_history.diffs: + if prev and cur and not cur.was_processed: + # This asset wasn't processed, so the information from + # last time is still valid. + cur.flags = ( + (prev.flags & ~AssetPipelineRecordEntry.FLAG_PROCESSED) | + AssetPipelineRecordEntry.FLAG_COLLAPSED_FROM_LAST_RUN) + cur.out_paths = list(prev.out_paths) + cur.errors = list(prev.errors) + + def shutdown(self): + # Invoke post-processors. + proc_ctx = ProcessorContext(self) + for proc in self._processors: + proc.onPipelineEnd(proc_ctx) + + +split_processor_names_re = re.compile(r'[ ,]+') + + +def get_filtered_processors(processors, authorized_names): + if not authorized_names or authorized_names == 'all': + return processors + + if isinstance(authorized_names, str): + authorized_names = split_processor_names_re.split(authorized_names) + + procs = [] + has_star = 'all' in authorized_names + for p in processors: + for name in authorized_names: + if name == p.PROCESSOR_NAME: + procs.append(p) + break + if name == ('-%s' % p.PROCESSOR_NAME): + break + else: + if has_star: + procs.append(p) + return procs + + +def make_re(patterns): + re_patterns = [] + for pat in patterns: + if pat[0] == '/' and pat[-1] == '/' and len(pat) > 2: + re_patterns.append(pat[1:-1]) + else: + escaped_pat = ( + re.escape(pat) + .replace(r'\*', r'[^/\\]*') + .replace(r'\?', r'[^/\\]')) + re_patterns.append(escaped_pat) + return [re.compile(p) for p in re_patterns] + + +def re_matchany(rel_path, patterns): + # skip patterns use a forward slash regardless of the platform. + rel_path = rel_path.replace('\\', '/') + for pattern in patterns: + if pattern.search(rel_path): + return True + return False + + +re_ansicolors = re.compile('\033\\[\d+m') + + +def _get_errors(ex, strip_colors=False): + errors = [] + while ex is not None: + msg = str(ex) + if strip_colors: + msg = re_ansicolors.sub('', msg) + errors.append(msg) + ex = ex.__cause__ + return errors + diff -r 7a1903ede496 -r 2e5c5d33d62c piecrust/pipelines/base.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/piecrust/pipelines/base.py Tue Nov 21 22:07:12 2017 -0800 @@ -0,0 +1,319 @@ +import os.path +import logging +from werkzeug.utils import cached_property +from piecrust.configuration import ConfigurationError +from piecrust.sources.base import ContentItem + + +logger = logging.getLogger(__name__) + + +class PipelineContext: + """ The context for running a content pipeline. + """ + def __init__(self, out_dir, *, + worker_id=-1, force=None): + self.out_dir = out_dir + self.worker_id = worker_id + self.force = force + + @property + def is_worker(self): + """ Returns `True` if the content pipeline is running inside + a worker process, and this is the first one. + """ + return self.worker_id >= 0 + + @property + def is_main_process(self): + """ Returns `True` is the content pipeline is running inside + the main process (and not a worker process). This is the case + if there are no worker processes at all. + """ + return self.worker_id < 0 + + +class _PipelineMasterProcessJobContextBase: + def __init__(self, record_name, record_histories): + self.record_name = record_name + self.record_histories = record_histories + + @property + def previous_record(self): + return self.record_histories.getPreviousRecord(self.record_name) + + @property + def current_record(self): + return self.record_histories.getCurrentRecord(self.record_name) + + +class PipelineJobCreateContext(_PipelineMasterProcessJobContextBase): + """ Context for creating pipeline baking jobs. + + This is run on the master process, so it can access both the + previous and current records. + """ + def __init__(self, pass_num, record_name, record_histories): + super().__init__(record_name, record_histories) + self.pass_num = pass_num + + +class PipelineJobValidateContext(_PipelineMasterProcessJobContextBase): + """ Context for validating jobs on subsequent step runs (i.e. validating + the list of jobs to run starting with the second step). + + This is run on the master process, so it can access both the + previous and current records. + """ + def __init__(self, pass_num, step_num, record_name, record_histories): + super().__init__(record_name, record_histories) + self.pass_num = pass_num + self.step_num = step_num + + +class PipelineJobRunContext: + """ Context for running pipeline baking jobs. + + This is run on the worker processes, so it can only access the + previous records. + """ + def __init__(self, job, record_name, previous_records): + self.job = job + self.record_name = record_name + self.previous_records = previous_records + + @cached_property + def record_entry_spec(self): + return self.job.get('record_entry_spec', + self.job['job_spec'][1]) + + @cached_property + def previous_record(self): + return self.previous_records.getRecord(self.record_name) + + @cached_property + def previous_entry(self): + return self.previous_record.getEntry(self.record_entry_spec) + + +class PipelineJobResultHandleContext: + """ The context for handling the result from a job that ran in a + worker process. + + This is run on the master process, so it can access the current + record. + """ + def __init__(self, record, job, step_num): + self.record = record + self.job = job + self.step_num = step_num + + @cached_property + def record_entry(self): + record_entry_spec = self.job.get('record_entry_spec', + self.job['job_spec'][1]) + return self.record.getEntry(record_entry_spec) + + +class PipelinePostJobRunContext: + def __init__(self, record_history): + self.record_history = record_history + + +class PipelineDeletionContext: + def __init__(self, record_history): + self.record_history = record_history + + +class PipelineCollapseRecordContext: + def __init__(self, record_history): + self.record_history = record_history + + +class ContentPipeline: + """ A pipeline that processes content from a `ContentSource`. + """ + PIPELINE_NAME = None + RECORD_ENTRY_CLASS = None + PASS_NUM = 0 + + def __init__(self, source, ctx): + self.source = source + self.ctx = ctx + self.record_name = '%s@%s' % (source.name, self.PIPELINE_NAME) + + app = source.app + tmp_dir = app.cache_dir + if not tmp_dir: + import tempfile + tmp_dir = os.path.join(tempfile.gettempdir(), 'piecrust') + self.tmp_dir = os.path.join(tmp_dir, self.PIPELINE_NAME) + + @property + def app(self): + return self.source.app + + def initialize(self): + pass + + def loadAllContents(self): + return None + + def createJobs(self, ctx): + return [ + create_job(self, item.spec) + for item in self.source.getAllContents()] + + def createRecordEntry(self, item_spec): + entry_class = self.RECORD_ENTRY_CLASS + record_entry = entry_class() + record_entry.item_spec = item_spec + return record_entry + + def handleJobResult(self, result, ctx): + raise NotImplementedError() + + def validateNextStepJobs(self, jobs, ctx): + pass + + def run(self, job, ctx, result): + raise NotImplementedError() + + def postJobRun(self, ctx): + pass + + def getDeletions(self, ctx): + pass + + def collapseRecords(self, ctx): + pass + + def shutdown(self): + pass + + +def create_job(pipeline, item_spec, **kwargs): + job = { + 'job_spec': (pipeline.source.name, item_spec) + } + job.update(kwargs) + return job + + +def content_item_from_job(pipeline, job): + return pipeline.source.findContentFromSpec(job['job_spec'][1]) + + +def get_record_name_for_source(source): + ppname = get_pipeline_name_for_source(source) + return '%s@%s' % (source.name, ppname) + + +def get_pipeline_name_for_source(source): + pname = source.config['pipeline'] + if not pname: + pname = source.DEFAULT_PIPELINE_NAME + if not pname: + raise ConfigurationError( + "Source '%s' doesn't specify any pipeline." % source.name) + return pname + + +class PipelineManager: + def __init__(self, app, out_dir, *, + record_histories=None, worker_id=-1, force=False): + self.app = app + self.record_histories = record_histories + self.out_dir = out_dir + self.worker_id = worker_id + self.force = force + + self._pipeline_classes = {} + for pclass in app.plugin_loader.getPipelines(): + self._pipeline_classes[pclass.PIPELINE_NAME] = pclass + + self._pipelines = {} + + def getPipeline(self, source_name): + return self.getPipelineInfo(source_name).pipeline + + def getPipelineInfo(self, source_name): + return self._pipelines[source_name] + + def getPipelineInfos(self): + return self._pipelines.values() + + def createPipeline(self, source): + if source.name in self._pipelines: + raise ValueError("Pipeline for source '%s' was already created." % + source.name) + + pname = get_pipeline_name_for_source(source) + ppctx = PipelineContext(self.out_dir, + worker_id=self.worker_id, force=self.force) + pp = self._pipeline_classes[pname](source, ppctx) + pp.initialize() + + record_history = None + if self.record_histories: + record_history = self.record_histories.getHistory(pp.record_name) + + info = _PipelineInfo(pp, record_history) + self._pipelines[source.name] = info + return info + + def postJobRun(self): + for ppinfo in self.getPipelineInfos(): + ppinfo.record_history.build() + + for ppinfo in self.getPipelineInfos(): + ctx = PipelinePostJobRunContext(ppinfo.record_history) + ppinfo.pipeline.postJobRun(ctx) + + def deleteStaleOutputs(self): + for ppinfo in self.getPipelineInfos(): + ctx = PipelineDeletionContext(ppinfo.record_history) + to_delete = ppinfo.pipeline.getDeletions(ctx) + current_record = ppinfo.record_history.current + if to_delete is not None: + for path, reason in to_delete: + logger.debug("Removing '%s': %s" % (path, reason)) + current_record.deleted_out_paths.append(path) + try: + os.remove(path) + except FileNotFoundError: + pass + logger.info('[delete] %s' % path) + + def collapseRecords(self): + for ppinfo in self.getPipelineInfos(): + ctx = PipelineCollapseRecordContext(ppinfo.record_history) + ppinfo.pipeline.collapseRecords(ctx) + + def shutdownPipelines(self): + for ppinfo in self.getPipelineInfos(): + ppinfo.pipeline.shutdown() + + self._pipelines = {} + + +class _PipelineInfo: + def __init__(self, pipeline, record_history): + self.pipeline = pipeline + self.record_history = record_history + self.userdata = None + + @property + def source(self): + return self.pipeline.source + + @property + def current_record(self): + if self.record_history is not None: + return self.record_history.current + raise Exception("The current record is not available.") + + @property + def pipeline_name(self): + return self.pipeline.PIPELINE_NAME + diff -r 7a1903ede496 -r 2e5c5d33d62c piecrust/pipelines/page.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/piecrust/pipelines/page.py Tue Nov 21 22:07:12 2017 -0800 @@ -0,0 +1,258 @@ +import copy +import time +import logging +from piecrust.pipelines.base import ( + ContentPipeline, create_job, content_item_from_job) +from piecrust.pipelines._pagebaker import PageBaker, get_output_path +from piecrust.pipelines._pagerecords import ( + PagePipelineRecordEntry, + add_page_job_result, merge_job_result_into_record_entry) +from piecrust.sources.base import AbortedSourceUseError + + +logger = logging.getLogger(__name__) + + +class PagePipeline(ContentPipeline): + PIPELINE_NAME = 'page' + RECORD_ENTRY_CLASS = PagePipelineRecordEntry + PASS_NUM = [0, 1] + + def __init__(self, source, ppctx): + super().__init__(source, ppctx) + self._pagebaker = None + self._stats = source.app.env.stats + self._draft_setting = self.app.config['baker/no_bake_setting'] + + def initialize(self): + stats = self._stats + stats.registerCounter('SourceUseAbortions', raise_if_registered=False) + stats.registerManifest('SourceUseAbortions', raise_if_registered=False) + + self._pagebaker = PageBaker(self.app, + self.ctx.out_dir, + force=self.ctx.force) + self._pagebaker.startWriterQueue() + + def loadAllContents(self): + # Here we load all the pages in the source, making sure they all + # have a valid cache for their configuration and contents. + # We also create the record entries while we're at it. + source = self.source + page_fac = self.app.getPage + record_fac = self.createRecordEntry + for item in source.getAllContents(): + page = page_fac(source, item) + + cur_entry = record_fac(item.spec) + cur_entry.config = page.config.getAll() + cur_entry.route_params = item.metadata['route_params'] + cur_entry.timestamp = page.datetime.timestamp() + + if page.was_modified: + cur_entry.flags |= PagePipelineRecordEntry.FLAG_SOURCE_MODIFIED + if page.config.get(self._draft_setting): + cur_entry.flags |= PagePipelineRecordEntry.FLAG_IS_DRAFT + + yield cur_entry + + def createJobs(self, ctx): + if ctx.pass_num == 0: + return self._createFirstPassJobs(ctx) + return self._createSecondPassJobs(ctx) + + def _createFirstPassJobs(self, ctx): + jobs = [] + + app = self.app + out_dir = self.ctx.out_dir + uri_getter = self.source.route.getUri + pretty_urls = app.config.get('site/pretty_urls') + + used_paths = _get_used_paths_from_records( + ctx.record_histories.current.records) + history = ctx.record_histories.getHistory(ctx.record_name).copy() + history.build() + + record = ctx.current_record + record.user_data['dirty_source_names'] = set() + + for prev, cur in history.diffs: + # Ignore pages that disappeared since last bake. + if cur is None: + continue + + # Skip draft pages. + if cur.flags & PagePipelineRecordEntry.FLAG_IS_DRAFT: + continue + + # Skip pages that are known to use other sources... we'll + # schedule them in the second pass. + if prev: + usn1, usn2 = prev.getAllUsedSourceNames() + if usn1 or usn2: + continue + + # Check if this item has been overriden by a previous pipeline + # run... for instance, we could be the pipeline for a "theme pages" + # source, and some of our pages have been overriden by a user + # page that writes out to the same URL. + uri = uri_getter(cur.route_params) + path = get_output_path(app, out_dir, uri, pretty_urls) + override = used_paths.get(path) + if override is not None: + override_source_name, override_entry = override + override_source = app.getSource(override_source_name) + if override_source.config['realm'] == \ + self.source.config['realm']: + logger.error( + "Page '%s' would get baked to '%s' " + "but is overriden by '%s'." % + (enrty.item_spec, path, override_entry.item_spec)) + else: + logger.debug( + "Page '%s' would get baked to '%s' " + "but is overriden by '%s'." % + (cur.item_spec, path, override_entry.item_spec)) + + cur.flags |= PagePipelineRecordEntry.FLAG_OVERRIDEN + continue + + # Nope, all good, let's create a job for this item. + jobs.append(create_job(self, cur.item_spec)) + + if len(jobs) > 0: + return jobs + return None + + def _createSecondPassJobs(self, ctx): + # Get the list of all sources that had anything baked. + dirty_source_names = set() + all_records = ctx.record_histories.current.records + for rec in all_records: + rec_dsn = rec.user_data.get('dirty_source_names') + if rec_dsn: + dirty_source_names |= rec_dsn + + # Now look at the stuff we bake for our own source on the first pass. + # For anything that wasn't baked (i.e. it was considered 'up to date') + # we look at the records from last time, and if they say that some + # page was using a source that is "dirty", then we force bake it. + # + # The common example for this is a blog index page which hasn't been + # touched, but needs to be re-baked because someone added or edited + # a post. + jobs = [] + pass_num = ctx.pass_num + history = ctx.record_histories.getHistory(ctx.record_name).copy() + history.build() + for prev, cur in history.diffs: + if not cur: + continue + if cur.was_any_sub_baked: + continue + if prev: + if any(map( + lambda usn: usn in dirty_source_names, + prev.getAllUsedSourceNames()[0])): + jobs.append(create_job(self, prev.item_spec, + pass_num=pass_num, + force_bake=True)) + else: + # This page uses other sources, but no source was dirty + # this time around (it was a null build, maybe). We + # don't have any work to do, but we need to carry over + # any information we have, otherwise the post bake step + # will think we need to delete last bake's outputs. + cur.subs = copy.deepcopy(prev.subs) + + if len(jobs) > 0: + return jobs + return None + + def handleJobResult(self, result, ctx): + existing = ctx.record_entry + merge_job_result_into_record_entry(existing, result) + if existing.was_any_sub_baked: + ctx.record.user_data['dirty_source_names'].add(self.source.name) + + def run(self, job, ctx, result): + pass_num = job.get('pass_num', 0) + step_num = job.get('step_num', 0) + if pass_num == 0: + if step_num == 0: + self._renderOrPostpone(job, ctx, result) + elif step_num == 1: + self._renderAlways(job, ctx, result) + elif pass_num == 1: + self._renderAlways(job, ctx, result) + + def getDeletions(self, ctx): + for prev, cur in ctx.record_history.diffs: + if prev and not cur: + for sub in prev.subs: + yield (sub['out_path'], 'previous source file was removed') + elif prev and cur: + prev_out_paths = [o['out_path'] for o in prev.subs] + cur_out_paths = [o['out_path'] for o in cur.subs] + diff = set(prev_out_paths) - set(cur_out_paths) + for p in diff: + yield (p, 'source file changed outputs') + + def collapseRecords(self, ctx): + pass + + def shutdown(self): + self._pagebaker.stopWriterQueue() + + def _renderOrPostpone(self, job, ctx, result): + # Here our job is to render the page's segments so that they're + # cached in memory and on disk... unless we detect that the page + # is using some other sources, in which case we abort and we'll try + # again on the second pass. + content_item = content_item_from_job(self, job) + logger.debug("Conditional render for: %s" % content_item.spec) + page = self.app.getPage(self.source, content_item) + if page.config.get(self._draft_setting): + return + + prev_entry = ctx.previous_entry + + env = self.app.env + env.abort_source_use = True + add_page_job_result(result) + try: + rdr_subs = self._pagebaker.bake(page, prev_entry) + result['subs'] = rdr_subs + except AbortedSourceUseError: + logger.debug("Page was aborted for using source: %s" % + content_item.spec) + result['flags'] |= \ + PagePipelineRecordEntry.FLAG_ABORTED_FOR_SOURCE_USE + env.stats.stepCounter("SourceUseAbortions") + env.stats.addManifestEntry("SourceUseAbortions", content_item.spec) + result['next_step_job'] = create_job(self, content_item.spec) + finally: + env.abort_source_use = False + + def _renderAlways(self, job, ctx, result): + content_item = content_item_from_job(self, job) + logger.debug("Full render for: %s" % content_item.spec) + page = self.app.getPage(self.source, content_item) + prev_entry = ctx.previous_entry + rdr_subs = self._pagebaker.bake(page, prev_entry, + force=job.get('force_bake')) + + add_page_job_result(result) + result['subs'] = rdr_subs + +def _get_used_paths_from_records(records): + used_paths = {} + for rec in records: + src_name = rec.name.split('@')[0] + for e in rec.getEntries(): + paths = e.getAllOutputPaths() + if paths is not None: + for p in paths: + used_paths[p] = (src_name, e) + return used_paths diff -r 7a1903ede496 -r 2e5c5d33d62c piecrust/pipelines/records.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/piecrust/pipelines/records.py Tue Nov 21 22:07:12 2017 -0800 @@ -0,0 +1,273 @@ +import os +import os.path +import pickle +import hashlib +import logging +from piecrust import APP_VERSION + + +logger = logging.getLogger(__name__) + + +class RecordEntry: + """ An entry in a record, for a specific content item. + """ + def __init__(self): + self.item_spec = None + self.errors = [] + + @property + def success(self): + return len(self.errors) == 0 + + def describe(self): + return {} + + def getAllOutputPaths(self): + return None + + def getAllErrors(self): + return self.errors + + +class Record: + """ A class that represents a 'record' of a bake operation on a + content source. + """ + def __init__(self, name): + self.name = name + self.deleted_out_paths = [] + self.user_data = {} + self.success = True + self._entries = {} + + @property + def entry_count(self): + return len(self._entries) + + def addEntry(self, entry): + if entry.item_spec in self._entries: + raise ValueError("Entry '%s' is already in the record." % + entry.item_spec) + self._entries[entry.item_spec] = entry + + def getEntries(self): + return self._entries.values() + + def getEntry(self, item_spec): + return self._entries.get(item_spec) + + +class MultiRecord: + """ A container that includes multiple `Record` instances -- one for + each content source that was baked. + """ + RECORD_VERSION = 13 + + def __init__(self): + self.records = [] + self.success = True + self.bake_time = 0 + self.incremental_count = 0 + self.invalidated = False + self.stats = None + self._app_version = APP_VERSION + self._record_version = self.RECORD_VERSION + + def getRecord(self, record_name, auto_create=True): + for r in self.records: + if r.name == record_name: + return r + if not auto_create: + raise Exception("No such record: %s" % record_name) + record = Record(record_name) + self.records.append(record) + return record + + def save(self, path): + path_dir = os.path.dirname(path) + if not os.path.isdir(path_dir): + os.makedirs(path_dir, 0o755) + + with open(path, 'wb') as fp: + pickle.dump(self, fp, pickle.HIGHEST_PROTOCOL) + + @staticmethod + def load(path): + logger.debug("Loading bake records from: %s" % path) + with open(path, 'rb') as fp: + return pickle.load(fp) + + +def get_flag_descriptions(flags, flag_descriptions): + res = [] + for k, v in flag_descriptions.items(): + if flags & k: + res.append(v) + if res: + return ', '.join(res) + return 'none' + + +def _are_records_valid(multi_record): + return (multi_record._app_version == APP_VERSION and + multi_record._record_version == MultiRecord.RECORD_VERSION) + + +def load_records(path, raise_errors=False): + try: + multi_record = MultiRecord.load(path) + except FileNotFoundError: + if raise_errors: + raise + logger.debug("No existing records found at: %s" % path) + multi_record = None + except Exception as ex: + if raise_errors: + raise + logger.debug("Error loading records from: %s" % path) + logger.debug(ex) + logger.debug("Will use empty records.") + multi_record = None + + was_invalid = False + if multi_record is not None and not _are_records_valid(multi_record): + logger.debug( + "Records from '%s' have old version: %s/%s." % + (path, multi_record._app_version, multi_record._record_version)) + logger.debug("Will use empty records.") + multi_record = None + was_invalid = True + + if multi_record is None: + multi_record = MultiRecord() + multi_record.invalidated = was_invalid + + return multi_record + + +class RecordHistory: + def __init__(self, previous, current): + if previous is None or current is None: + raise ValueError() + + if previous.name != current.name: + raise Exception("The two records must have the same name! " + "Got '%s' and '%s'." % + (previous.name, current.name)) + + self._previous = previous + self._current = current + self._diffs = None + + @property + def name(self): + return self._current.name + + @property + def current(self): + return self._current + + @property + def previous(self): + return self._previous + + @property + def diffs(self): + if self._diffs is None: + raise Exception("This record history hasn't been built yet.") + return self._diffs.values() + + def getPreviousEntry(self, item_spec): + key = _build_diff_key(item_spec) + return self._diffs[key][0] + + def getCurrentEntry(self, item_spec): + key = _build_diff_key(item_spec) + return self._diffs[key][1] + + def build(self): + if self._diffs is not None: + raise Exception("This record history has already been built.") + + self._diffs = {} + if self._previous is not None: + for e in self._previous.getEntries(): + key = _build_diff_key(e.item_spec) + self._diffs[key] = (e, None) + + if self._current is not None: + for e in self._current.getEntries(): + key = _build_diff_key(e.item_spec) + diff = self._diffs.get(key) + if diff is None: + self._diffs[key] = (None, e) + elif diff[1] is None: + self._diffs[key] = (diff[0], e) + else: + raise Exception( + "A current record entry already exists for '%s' " + "(%s)" % (key, diff[1].item_spec)) + + def copy(self): + return RecordHistory(self._previous, self._current) + + +class MultiRecordHistory: + """ Tracks the differences between an 'old' and a 'new' record + container. + """ + def __init__(self, previous, current): + if previous is None or current is None: + raise ValueError() + + self.previous = previous + self.current = current + self.histories = [] + self._linkHistories(previous, current) + + def getPreviousRecord(self, record_name, auto_create=True): + return self.previous.getRecord(record_name, auto_create=auto_create) + + def getCurrentRecord(self, record_name): + return self.current.getRecord(record_name) + + def getHistory(self, record_name): + for h in self.histories: + if h.name == record_name: + return h + + rh = RecordHistory( + Record(record_name), + Record(record_name)) + self.histories.append(rh) + self.previous.records.append(rh.previous) + self.current.records.append(rh.current) + return rh + + def _linkHistories(self, previous, current): + pairs = {} + if previous: + for r in previous.records: + pairs[r.name] = (r, None) + if current: + for r in current.records: + p = pairs.get(r.name, (None, None)) + if p[1] is not None: + raise Exception("Got several records named: %s" % r.name) + pairs[r.name] = (p[0], r) + + for name, pair in pairs.items(): + p, c = pair + if p is None: + p = Record(name) + previous.records.append(p) + if c is None: + c = Record(name) + current.records.append(c) + self.histories.append(RecordHistory(p, c)) + + +def _build_diff_key(item_spec): + return hashlib.md5(item_spec.encode('utf8')).hexdigest() + diff -r 7a1903ede496 -r 2e5c5d33d62c piecrust/plugins/base.py --- a/piecrust/plugins/base.py Tue Nov 21 11:00:06 2017 -0800 +++ b/piecrust/plugins/base.py Tue Nov 21 22:07:12 2017 -0800 @@ -2,7 +2,6 @@ import sys import logging import importlib -import importlib.util logger = logging.getLogger(__name__) @@ -39,7 +38,7 @@ def getSources(self): return [] - def getPageGenerators(self): + def getPipelines(self): return [] def getPublishers(self): @@ -62,15 +61,15 @@ def getFormatters(self): return self._getPluginComponents( - 'getFormatters', - initialize=True, register_timer=True, - order_key=lambda f: f.priority) + 'getFormatters', + initialize=True, register_timer=True, + order_key=lambda f: f.priority) def getTemplateEngines(self): return self._getPluginComponents( - 'getTemplateEngines', - initialize=True, register_timer=True, - register_timer_suffixes=['_segment', '_layout']) + 'getTemplateEngines', + initialize=True, register_timer=True, + register_timer_suffixes=['_segment', '_layout']) def getTemplateEngineExtensions(self, engine_name): return self._getPluginComponents('getTemplateEngineExtensions', @@ -81,9 +80,9 @@ def getProcessors(self): return self._getPluginComponents( - 'getProcessors', - initialize=True, register_timer=True, - order_key=lambda p: p.priority) + 'getProcessors', + initialize=True, register_timer=True, + order_key=lambda p: p.priority) def getImporters(self): return self._getPluginComponents('getImporters') @@ -100,8 +99,8 @@ def getSources(self): return self._getPluginComponents('getSources') - def getPageGenerators(self): - return self._getPluginComponents('getPageGenerators') + def getPipelines(self): + return self._getPluginComponents('getPipelines') def getPublishers(self): return self._getPluginComponents('getPublishers') @@ -132,20 +131,27 @@ mod = None if mod is None: - # Import as a loose Python file from the plugins dirs. + # Import as a loose Python file from the plugins dir. for plugins_dir in self.app.plugins_dirs: pfile = os.path.join(plugins_dir, plugin_name + '.py') if os.path.isfile(pfile): - spec = importlib.util.spec_from_file_location(plugin_name, - pfile) - mod = importlib.util.module_from_spec(spec) - spec.loader.exec_module(mod) - sys.modules[mod_name] = mod - break + if sys.version_info[1] >= 5: + # Python 3.5+ + from importlib.util import (spec_from_file_location, + module_from_spec) + spec = spec_from_file_location(plugin_name, pfile) + mod = module_from_spec(spec) + spec.loader.exec_module(mod) + sys.modules[mod_name] = mod + else: + # Python 3.4, 3.3. + from importlib.machinery import SourceFileLoader + mod = SourceFileLoader( + plugin_name, pfile).load_module() + sys.modules[mod_name] = mod if mod is None: logger.error("Failed to load plugin '%s'." % plugin_name) - logger.error("Looking in: %s" % self.app.plugins_dirs) return plugin_class = getattr(mod, '__piecrust_plugin__', None) @@ -174,6 +180,8 @@ all_components = [] for plugin in self.plugins: plugin_components = getattr(plugin, name)(*args) + # Make sure it's a list in case it was an iterator. + plugin_components = list(plugin_components) all_components += plugin_components if initialize: @@ -183,10 +191,11 @@ if register_timer: for comp in plugin_components: if not register_timer_suffixes: - self.app.env.registerTimer(comp.__class__.__name__) + self.app.env.stats.registerTimer( + comp.__class__.__name__) else: for s in register_timer_suffixes: - self.app.env.registerTimer( + self.app.env.stats.registerTimer( comp.__class__.__name__ + s) if order_key is not None: diff -r 7a1903ede496 -r 2e5c5d33d62c piecrust/plugins/builtin.py --- a/piecrust/plugins/builtin.py Tue Nov 21 11:00:06 2017 -0800 +++ b/piecrust/plugins/builtin.py Tue Nov 21 22:07:12 2017 -0800 @@ -16,8 +16,8 @@ from piecrust.commands.builtin.plugins import PluginsCommand from piecrust.commands.builtin.publishing import PublishCommand from piecrust.commands.builtin.scaffolding import PrepareCommand - from piecrust.commands.builtin.serving import (ServeCommand) - from piecrust.commands.builtin.themes import (ThemesCommand) + from piecrust.commands.builtin.serving import ServeCommand + from piecrust.commands.builtin.themes import ThemesCommand from piecrust.commands.builtin.util import ( InitCommand, PurgeCommand, ImportCommand) @@ -53,36 +53,47 @@ DefaultPrepareTemplatesHelpTopic()] def getSources(self): - from piecrust.sources.default import DefaultPageSource + from piecrust.sources.autoconfig import ( + AutoConfigContentSource, OrderedContentSource) + from piecrust.sources.blogarchives import BlogArchivesSource + from piecrust.sources.default import DefaultContentSource + from piecrust.sources.fs import FSContentSource from piecrust.sources.posts import ( - FlatPostsSource, ShallowPostsSource, HierarchyPostsSource) - from piecrust.sources.autoconfig import ( - AutoConfigSource, OrderedPageSource) + FlatPostsSource, ShallowPostsSource, HierarchyPostsSource) from piecrust.sources.prose import ProseSource + from piecrust.sources.taxonomy import TaxonomySource return [ - DefaultPageSource, + AutoConfigContentSource, + BlogArchivesSource, + DefaultContentSource, + FSContentSource, FlatPostsSource, - ShallowPostsSource, HierarchyPostsSource, - AutoConfigSource, - OrderedPageSource, - ProseSource] + OrderedContentSource, + ProseSource, + ShallowPostsSource, + TaxonomySource] - def getPageGenerators(self): - from piecrust.generation.blogarchives import BlogArchivesPageGenerator - from piecrust.generation.taxonomy import TaxonomyPageGenerator + def getPipelines(self): + from piecrust.pipelines.page import PagePipeline + from piecrust.pipelines.asset import AssetPipeline + from piecrust.sources.taxonomy import TaxonomyPipeline + from piecrust.sources.blogarchives import BlogArchivesPipeline return [ - TaxonomyPageGenerator, - BlogArchivesPageGenerator] + PagePipeline, + AssetPipeline, + TaxonomyPipeline, + BlogArchivesPipeline] def getDataProviders(self): - from piecrust.data.provider import ( - IteratorDataProvider, BlogDataProvider) + from piecrust.dataproviders.pageiterator import \ + PageIteratorDataProvider + from piecrust.dataproviders.blog import BlogDataProvider return [ - IteratorDataProvider, + PageIteratorDataProvider, BlogDataProvider] def getTemplateEngines(self): @@ -107,10 +118,10 @@ TextileFormatter()] def getProcessors(self): - from piecrust.processing.base import CopyFileProcessor from piecrust.processing.compass import CompassProcessor from piecrust.processing.compressors import ( CleanCssProcessor, UglifyJSProcessor) + from piecrust.processing.copy import CopyFileProcessor from piecrust.processing.less import LessProcessor from piecrust.processing.pygments_style import PygmentsStyleProcessor from piecrust.processing.requirejs import RequireJSProcessor @@ -141,11 +152,13 @@ WordpressXmlImporter()] def getPublishers(self): + from piecrust.publishing.copy import CopyPublisher from piecrust.publishing.sftp import SftpPublisher from piecrust.publishing.shell import ShellCommandPublisher from piecrust.publishing.rsync import RsyncPublisher return [ + CopyPublisher, ShellCommandPublisher, SftpPublisher, RsyncPublisher] diff -r 7a1903ede496 -r 2e5c5d33d62c piecrust/processing/base.py --- a/piecrust/processing/base.py Tue Nov 21 11:00:06 2017 -0800 +++ b/piecrust/processing/base.py Tue Nov 21 22:07:12 2017 -0800 @@ -1,4 +1,3 @@ -import shutil import os.path import logging @@ -11,26 +10,31 @@ PRIORITY_LAST = 1 -class PipelineContext(object): - def __init__(self, worker_id, app, out_dir, tmp_dir, force=None): - self.worker_id = worker_id - self.app = app - self.out_dir = out_dir - self.tmp_dir = tmp_dir - self.force = force - self.record = None - self._additional_ignore_patterns = [] +FORCE_BUILD = object() + + +class ProcessorContext: + def __init__(self, pipeline): + self.ignore_patterns = [] + self.extra_processors = [] + self._pipeline = pipeline + self._pipeline_ctx = pipeline.ctx @property - def is_first_worker(self): - return self.worker_id == 0 + def tmp_dir(self): + return self._pipeline.tmp_dir @property - def is_pipeline_process(self): - return self.worker_id < 0 + def out_dir(self): + return self._pipeline_ctx.out_dir - def addIgnorePatterns(self, patterns): - self._additional_ignore_patterns += patterns + @property + def worker_id(self): + return self._pipeline_ctx.worker_id + + @property + def is_main_process(self): + return self._pipeline_ctx.is_main_process class Processor(object): @@ -63,24 +67,12 @@ pass -class CopyFileProcessor(Processor): - PROCESSOR_NAME = 'copy' - - def __init__(self): - super(CopyFileProcessor, self).__init__() - self.priority = PRIORITY_LAST +class ExternalProcessException(Exception): + def __init__(self, stderr_data): + self.stderr_data = stderr_data - def matches(self, path): - return True - - def getOutputFilenames(self, filename): - return [filename] - - def process(self, path, out_dir): - out_path = os.path.join(out_dir, os.path.basename(path)) - logger.debug("Copying: %s -> %s" % (path, out_path)) - shutil.copyfile(path, out_path) - return True + def __str__(self): + return self.stderr_data class SimpleFileProcessor(Processor): @@ -109,12 +101,3 @@ def _doProcess(self, in_path, out_path): raise NotImplementedError() - -class ExternalProcessException(Exception): - def __init__(self, stderr_data): - self.stderr_data = stderr_data - - def __str__(self): - return self.stderr_data - - diff -r 7a1903ede496 -r 2e5c5d33d62c piecrust/processing/compass.py --- a/piecrust/processing/compass.py Tue Nov 21 11:00:06 2017 -0800 +++ b/piecrust/processing/compass.py Tue Nov 21 22:07:12 2017 -0800 @@ -28,13 +28,13 @@ def initialize(self, app): super(CompassProcessor, self).initialize(app) - def onPipelineStart(self, pipeline): - super(CompassProcessor, self).onPipelineStart(pipeline) - self._maybeActivate(pipeline) + def onPipelineStart(self, ctx): + super(CompassProcessor, self).onPipelineStart(ctx) + self._maybeActivate(ctx) - def onPipelineEnd(self, pipeline): - super(CompassProcessor, self).onPipelineEnd(pipeline) - self._maybeRunCompass(pipeline) + def onPipelineEnd(self, ctx): + super(CompassProcessor, self).onPipelineEnd(ctx) + self._maybeRunCompass(ctx) def matches(self, path): if self._state != self.STATE_ACTIVE: @@ -62,7 +62,7 @@ "is done.") self._runInSite = True - def _maybeActivate(self, pipeline): + def _maybeActivate(self, ctx): if self._state != self.STATE_UNKNOWN: return @@ -95,17 +95,17 @@ if custom_args: self._args += ' ' + custom_args - out_dir = pipeline.out_dir - tmp_dir = os.path.join(pipeline.tmp_dir, 'compass') + out_dir = ctx.out_dir + tmp_dir = os.path.join(ctx.tmp_dir, 'compass') self._args = multi_replace( - self._args, - {'%out_dir%': out_dir, - '%tmp_dir%': tmp_dir}) + self._args, + {'%out_dir%': out_dir, + '%tmp_dir%': tmp_dir}) self._runInSite = False self._runInTheme = False - def _maybeRunCompass(self, pipeline): + def _maybeRunCompass(self, ctx): if self._state != self.STATE_ACTIVE: return diff -r 7a1903ede496 -r 2e5c5d33d62c piecrust/processing/compressors.py --- a/piecrust/processing/compressors.py Tue Nov 21 11:00:06 2017 -0800 +++ b/piecrust/processing/compressors.py Tue Nov 21 22:07:12 2017 -0800 @@ -17,7 +17,7 @@ self._conf = None def matches(self, path): - return path.endswith('.css') + return path.endswith('.css') and not path.endswith('.min.css') def getOutputFilenames(self, filename): self._ensureInitialized() @@ -73,6 +73,9 @@ super(UglifyJSProcessor, self).__init__({'js': 'js'}) self._conf = None + def matches(self, path): + return path.endswith('.js') and not path.endswith('.min.js') + def _doProcess(self, in_path, out_path): self._ensureInitialized() diff -r 7a1903ede496 -r 2e5c5d33d62c piecrust/processing/copy.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/piecrust/processing/copy.py Tue Nov 21 22:07:12 2017 -0800 @@ -0,0 +1,27 @@ +import os.path +import shutil +import logging +from piecrust.processing.base import Processor, PRIORITY_LAST + + +logger = logging.getLogger(__name__) + + +class CopyFileProcessor(Processor): + PROCESSOR_NAME = 'copy' + + def __init__(self): + super(CopyFileProcessor, self).__init__() + self.priority = PRIORITY_LAST + + def matches(self, path): + return True + + def getOutputFilenames(self, filename): + return [filename] + + def process(self, path, out_dir): + out_path = os.path.join(out_dir, os.path.basename(path)) + logger.debug("Copying: %s -> %s" % (path, out_path)) + shutil.copyfile(path, out_path) + return True diff -r 7a1903ede496 -r 2e5c5d33d62c piecrust/processing/less.py --- a/piecrust/processing/less.py Tue Nov 21 11:00:06 2017 -0800 +++ b/piecrust/processing/less.py Tue Nov 21 22:07:12 2017 -0800 @@ -7,8 +7,7 @@ import platform import subprocess from piecrust.processing.base import ( - SimpleFileProcessor, ExternalProcessException) -from piecrust.processing.tree import FORCE_BUILD + SimpleFileProcessor, ExternalProcessException, FORCE_BUILD) logger = logging.getLogger(__name__) @@ -22,9 +21,9 @@ self._conf = None self._map_dir = None - def onPipelineStart(self, pipeline): - self._map_dir = os.path.join(pipeline.tmp_dir, 'less') - if (pipeline.is_first_worker and + def onPipelineStart(self, ctx): + self._map_dir = os.path.join(ctx.tmp_dir, 'less') + if (ctx.is_main_process and not os.path.isdir(self._map_dir)): os.makedirs(self._map_dir) @@ -59,7 +58,7 @@ map_path = self._getMapPath(in_path) map_url = '/' + os.path.relpath( - map_path, self.app.root_dir).replace('\\', '/') + map_path, self.app.root_dir).replace('\\', '/') # On Windows, it looks like LESSC is confused with paths when the # map file is not to be created in the same directory as the input @@ -67,8 +66,8 @@ # a mix of relative and absolute paths stuck together). # So create it there and move it afterwards... :( temp_map_path = os.path.join( - os.path.dirname(in_path), - os.path.basename(map_path)) + os.path.dirname(in_path), + os.path.basename(map_path)) args = [self._conf['bin'], '--source-map=%s' % temp_map_path, @@ -83,8 +82,8 @@ shell = (platform.system() == 'Windows') try: proc = subprocess.Popen( - args, shell=shell, - stderr=subprocess.PIPE) + args, shell=shell, + stderr=subprocess.PIPE) stdout_data, stderr_data = proc.communicate() except FileNotFoundError as ex: logger.error("Tried running LESS processor with command: %s" % @@ -93,7 +92,7 @@ "Did you install it?") from ex if proc.returncode != 0: raise ExternalProcessException( - stderr_data.decode(sys.stderr.encoding)) + stderr_data.decode(sys.stderr.encoding)) logger.debug("Moving map file: %s -> %s" % (temp_map_path, map_path)) if os.path.exists(map_path): @@ -115,8 +114,8 @@ def _getMapPath(self, path): map_name = "%s_%s.map" % ( - os.path.basename(path), - hashlib.md5(path.encode('utf8')).hexdigest()) + os.path.basename(path), + hashlib.md5(path.encode('utf8')).hexdigest()) map_path = os.path.join(self._map_dir, map_name) return map_path diff -r 7a1903ede496 -r 2e5c5d33d62c piecrust/processing/pipeline.py --- a/piecrust/processing/pipeline.py Tue Nov 21 11:00:06 2017 -0800 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,325 +0,0 @@ -import os -import os.path -import re -import time -import hashlib -import logging -import multiprocessing -from piecrust.chefutil import format_timed, format_timed_scope -from piecrust.environment import ExecutionStats -from piecrust.processing.base import PipelineContext -from piecrust.processing.records import ( - ProcessorPipelineRecordEntry, TransitionalProcessorPipelineRecord, - FLAG_PROCESSED) -from piecrust.processing.worker import ( - ProcessingWorkerJob, - get_filtered_processors) - - -logger = logging.getLogger(__name__) - - -class _ProcessingContext(object): - def __init__(self, jobs, record, base_dir, mount_info): - self.jobs = jobs - self.record = record - self.base_dir = base_dir - self.mount_info = mount_info - - -class ProcessorPipeline(object): - def __init__(self, app, out_dir, force=False, - applied_config_variant=None, - applied_config_values=None): - assert app and out_dir - self.app = app - self.out_dir = out_dir - self.force = force - self.applied_config_variant = applied_config_variant - self.applied_config_values = applied_config_values - - tmp_dir = app.cache_dir - if not tmp_dir: - import tempfile - tmp_dir = os.path.join(tempfile.gettempdir(), 'piecrust') - self.tmp_dir = os.path.join(tmp_dir, 'proc') - - baker_params = app.config.get('baker', {}) - - mount_params = baker_params.get('assets_dirs', {}) - self.mounts = make_mount_infos(app, mount_params) - - self.num_workers = baker_params.get( - 'workers', multiprocessing.cpu_count()) - - ignores = baker_params.get('ignore', []) - ignores += [ - '_cache', '_counter', - '.DS_Store', 'Thumbs.db', - '.git*', '.hg*', '.svn'] - self.ignore_patterns = make_re(ignores) - self.force_patterns = make_re(baker_params.get('force', [])) - - # Those things are mostly for unit-testing. - # - # Note that additiona processors can't be passed as instances. - # Instead, we need some factory functions because we need to create - # one instance right away to use during the initialization phase, and - # another instance to pass to the worker pool. The initialized one will - # be tied to the PieCrust app instance, which can't be pickled across - # processes. - self.enabled_processors = None - self.additional_processors_factories = None - - def addIgnorePatterns(self, patterns): - self.ignore_patterns += make_re(patterns) - - def run(self, src_dir_or_file=None, *, - delete=True, previous_record=None, save_record=True): - start_time = time.perf_counter() - - # Get the list of processors for this run. - processors = self.app.plugin_loader.getProcessors() - if self.enabled_processors is not None: - logger.debug("Filtering processors to: %s" % - self.enabled_processors) - processors = get_filtered_processors(processors, - self.enabled_processors) - if self.additional_processors_factories is not None: - logger.debug("Adding %s additional processors." % - len(self.additional_processors_factories)) - for proc_fac in self.additional_processors_factories: - proc = proc_fac() - self.app.env.registerTimer(proc.__class__.__name__, - raise_if_registered=False) - proc.initialize(self.app) - processors.append(proc) - - # Invoke pre-processors. - pipeline_ctx = PipelineContext(-1, self.app, self.out_dir, - self.tmp_dir, self.force) - for proc in processors: - proc.onPipelineStart(pipeline_ctx) - - # Pre-processors can define additional ignore patterns. - self.ignore_patterns += make_re( - pipeline_ctx._additional_ignore_patterns) - - # Create the pipeline record. - record = TransitionalProcessorPipelineRecord() - record_cache = self.app.cache.getCache('proc') - record_name = ( - hashlib.md5(self.out_dir.encode('utf8')).hexdigest() + - '.record') - if previous_record: - record.setPrevious(previous_record) - elif not self.force and record_cache.has(record_name): - with format_timed_scope(logger, 'loaded previous bake record', - level=logging.DEBUG, colored=False): - record.loadPrevious(record_cache.getCachePath(record_name)) - logger.debug("Got %d entries in process record." % - len(record.previous.entries)) - record.current.success = True - record.current.processed_count = 0 - - # Work! - def _handler(res): - entry = record.getCurrentEntry(res.path) - assert entry is not None - entry.flags = res.flags - entry.proc_tree = res.proc_tree - entry.rel_outputs = res.rel_outputs - if entry.flags & FLAG_PROCESSED: - record.current.processed_count += 1 - if res.errors: - entry.errors += res.errors - record.current.success = False - - rel_path = os.path.relpath(res.path, self.app.root_dir) - logger.error("Errors found in %s:" % rel_path) - for e in entry.errors: - logger.error(" " + e) - - jobs = [] - self._process(src_dir_or_file, record, jobs) - pool = self._createWorkerPool() - ar = pool.queueJobs(jobs, handler=_handler) - ar.wait() - - # Shutdown the workers and get timing information from them. - reports = pool.close() - total_stats = ExecutionStats() - record.current.stats['_Total'] = total_stats - for i in range(len(reports)): - worker_stats = reports[i]['data'] - if worker_stats is not None: - worker_name = 'PipelineWorker_%d' % i - record.current.stats[worker_name] = worker_stats - total_stats.mergeStats(worker_stats) - - # Invoke post-processors. - pipeline_ctx.record = record.current - for proc in processors: - proc.onPipelineEnd(pipeline_ctx) - - # Handle deletions. - if delete: - for path, reason in record.getDeletions(): - logger.debug("Removing '%s': %s" % (path, reason)) - record.current.deleted.append(path) - try: - os.remove(path) - except FileNotFoundError: - pass - logger.info('[delete] %s' % path) - - # Finalize the process record. - record.current.process_time = time.time() - record.current.out_dir = self.out_dir - record.collapseRecords() - - # Save the process record. - if save_record: - with format_timed_scope(logger, 'saved bake record', - level=logging.DEBUG, colored=False): - record.saveCurrent(record_cache.getCachePath(record_name)) - - logger.info(format_timed( - start_time, - "processed %d assets." % record.current.processed_count)) - - return record.detach() - - def _process(self, src_dir_or_file, record, jobs): - if src_dir_or_file is not None: - # Process only the given path. - # Find out what mount point this is in. - for path, info in self.mounts.items(): - if src_dir_or_file[:len(path)] == path: - base_dir = path - mount_info = info - break - else: - known_roots = list(self.mounts.keys()) - raise Exception("Input path '%s' is not part of any known " - "mount point: %s" % - (src_dir_or_file, known_roots)) - - ctx = _ProcessingContext(jobs, record, base_dir, mount_info) - logger.debug("Initiating processing pipeline on: %s" % - src_dir_or_file) - if os.path.isdir(src_dir_or_file): - self._processDirectory(ctx, src_dir_or_file) - elif os.path.isfile(src_dir_or_file): - self._processFile(ctx, src_dir_or_file) - - else: - # Process everything. - for path, info in self.mounts.items(): - ctx = _ProcessingContext(jobs, record, path, info) - logger.debug("Initiating processing pipeline on: %s" % path) - self._processDirectory(ctx, path) - - def _processDirectory(self, ctx, start_dir): - for dirpath, dirnames, filenames in os.walk(start_dir): - rel_dirpath = os.path.relpath(dirpath, start_dir) - dirnames[:] = [d for d in dirnames - if not re_matchany( - d, self.ignore_patterns, rel_dirpath)] - - for filename in filenames: - if re_matchany(filename, self.ignore_patterns, rel_dirpath): - continue - self._processFile(ctx, os.path.join(dirpath, filename)) - - def _processFile(self, ctx, path): - # TODO: handle overrides between mount-points. - - entry = ProcessorPipelineRecordEntry(path) - ctx.record.addEntry(entry) - - previous_entry = ctx.record.getPreviousEntry(path) - force_this = (self.force or previous_entry is None or - not previous_entry.was_processed_successfully) - - job = ProcessingWorkerJob(ctx.base_dir, ctx.mount_info, path, - force=force_this) - ctx.jobs.append(job) - - def _createWorkerPool(self): - from piecrust.app import PieCrustFactory - from piecrust.workerpool import WorkerPool - from piecrust.processing.worker import ( - ProcessingWorkerContext, ProcessingWorker) - - appfactory = PieCrustFactory( - self.app.root_dir, - cache=self.app.cache.enabled, - cache_key=self.app.cache_key, - config_variant=self.applied_config_variant, - config_values=self.applied_config_values, - debug=self.app.debug, - theme_site=self.app.theme_site) - - ctx = ProcessingWorkerContext( - appfactory, - self.out_dir, self.tmp_dir, - force=self.force) - ctx.enabled_processors = self.enabled_processors - if self.additional_processors_factories is not None: - ctx.additional_processors = [ - proc_fac() - for proc_fac in self.additional_processors_factories] - - pool = WorkerPool( - worker_class=ProcessingWorker, - initargs=(ctx,)) - return pool - - -def make_mount_infos(app, mount_params): - mounts = {d: {} for d in app.assets_dirs} - - for name, cfg in mount_params.items(): - mdir = os.path.join(app.root_dir, name) - mounts[mdir] = cfg - - for mdir, info in mounts.items(): - mname = os.path.basename(mdir) - info_from_config = mount_params.get(mname) - if info_from_config is not None: - if not isinstance(info, dict): - raise Exception("Asset directory info for '%s' is not a " - "dictionary." % mname) - info.update(info_from_config) - info.setdefault('processors', 'all -uglifyjs -cleancss') - info['name'] = mname - - return mounts - - -def make_re(patterns): - re_patterns = [] - for pat in patterns: - if pat[0] == '/' and pat[-1] == '/' and len(pat) > 2: - re_patterns.append(pat[1:-1]) - else: - escaped_pat = ( - re.escape(pat) - .replace(r'\*', r'[^/\\]*') - .replace(r'\?', r'[^/\\]')) - re_patterns.append(escaped_pat) - return [re.compile(p) for p in re_patterns] - - -def re_matchany(filename, patterns, dirname=None): - if dirname and dirname != '.': - filename = os.path.join(dirname, filename) - - # skip patterns use a forward slash regardless of the platform. - filename = filename.replace('\\', '/') - for pattern in patterns: - if pattern.search(filename): - return True - return False - diff -r 7a1903ede496 -r 2e5c5d33d62c piecrust/processing/records.py --- a/piecrust/processing/records.py Tue Nov 21 11:00:06 2017 -0800 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,100 +0,0 @@ -import os.path -import hashlib -from piecrust.records import Record, TransitionalRecord - - -class ProcessorPipelineRecord(Record): - RECORD_VERSION = 7 - - def __init__(self): - super(ProcessorPipelineRecord, self).__init__() - self.out_dir = None - self.process_time = None - self.processed_count = 0 - self.deleted = [] - self.success = False - - -FLAG_NONE = 0 -FLAG_PREPARED = 2**0 -FLAG_PROCESSED = 2**1 -FLAG_BYPASSED_STRUCTURED_PROCESSING = 2**3 -FLAG_COLLAPSED_FROM_LAST_RUN = 2**4 - - -def _get_transition_key(path): - return hashlib.md5(path.encode('utf8')).hexdigest() - - -class ProcessorPipelineRecordEntry(object): - def __init__(self, path): - self.path = path - - self.flags = FLAG_NONE - self.rel_outputs = [] - self.proc_tree = None - self.errors = [] - - @property - def was_prepared(self): - return bool(self.flags & FLAG_PREPARED) - - @property - def was_processed(self): - return (self.was_prepared and - (bool(self.flags & FLAG_PROCESSED) or len(self.errors) > 0)) - - @property - def was_processed_successfully(self): - return self.was_processed and not self.errors - - @property - def was_collapsed_from_last_run(self): - return self.flags & FLAG_COLLAPSED_FROM_LAST_RUN - - -class TransitionalProcessorPipelineRecord(TransitionalRecord): - def __init__(self, previous_path=None): - super(TransitionalProcessorPipelineRecord, self).__init__( - ProcessorPipelineRecord, previous_path) - - def getTransitionKey(self, entry): - return _get_transition_key(entry.path) - - def getCurrentEntry(self, path): - key = _get_transition_key(path) - pair = self.transitions.get(key) - if pair is not None: - return pair[1] - return None - - def getPreviousEntry(self, path): - key = _get_transition_key(path) - pair = self.transitions.get(key) - if pair is not None: - return pair[0] - return None - - def collapseRecords(self): - for prev, cur in self.transitions.values(): - if prev and cur and not cur.was_processed: - # This asset wasn't processed, so the information from - # last time is still valid. - cur.flags = (prev.flags - & ~FLAG_PROCESSED - | FLAG_COLLAPSED_FROM_LAST_RUN) - cur.rel_outputs = list(prev.rel_outputs) - cur.errors = list(prev.errors) - - def getDeletions(self): - for prev, cur in self.transitions.values(): - if prev and not cur: - for p in prev.rel_outputs: - abs_p = os.path.join(self.previous.out_dir, p) - yield (abs_p, 'previous asset was removed') - elif prev and cur and cur.was_processed_successfully: - diff = set(prev.rel_outputs) - set(cur.rel_outputs) - for p in diff: - abs_p = os.path.join(self.previous.out_dir, p) - yield (abs_p, 'asset changed outputs') - diff -r 7a1903ede496 -r 2e5c5d33d62c piecrust/processing/requirejs.py --- a/piecrust/processing/requirejs.py Tue Nov 21 11:00:06 2017 -0800 +++ b/piecrust/processing/requirejs.py Tue Nov 21 22:07:12 2017 -0800 @@ -1,12 +1,9 @@ import os import os.path -import json -import hashlib import logging import platform import subprocess -from piecrust.processing.base import Processor, PRIORITY_FIRST -from piecrust.processing.tree import FORCE_BUILD +from piecrust.processing.base import Processor, PRIORITY_FIRST, FORCE_BUILD logger = logging.getLogger(__name__) @@ -33,15 +30,15 @@ self._conf.setdefault('bin', 'r.js') self._conf.setdefault('out_path', self._conf['build_path']) - def onPipelineStart(self, pipeline): - super(RequireJSProcessor, self).onPipelineStart(pipeline) + def onPipelineStart(self, ctx): + super(RequireJSProcessor, self).onPipelineStart(ctx) if self._conf is None: return logger.debug("Adding Javascript suppressor to build pipeline.") skip = _JavascriptSkipProcessor(self._conf['build_path']) - pipeline.processors.append(skip) + ctx.extra_processors.append(skip) def matches(self, path): if self._conf is None: diff -r 7a1903ede496 -r 2e5c5d33d62c piecrust/processing/sass.py --- a/piecrust/processing/sass.py Tue Nov 21 11:00:06 2017 -0800 +++ b/piecrust/processing/sass.py Tue Nov 21 22:07:12 2017 -0800 @@ -5,8 +5,7 @@ import logging import platform import subprocess -from piecrust.processing.base import SimpleFileProcessor -from piecrust.processing.tree import FORCE_BUILD +from piecrust.processing.base import SimpleFileProcessor, FORCE_BUILD logger = logging.getLogger(__name__) @@ -17,23 +16,23 @@ def __init__(self): super(SassProcessor, self).__init__( - extensions={'scss': 'css', 'sass': 'css'}) + extensions={'scss': 'css', 'sass': 'css'}) self._conf = None self._map_dir = None def initialize(self, app): super(SassProcessor, self).initialize(app) - def onPipelineStart(self, pipeline): - super(SassProcessor, self).onPipelineStart(pipeline) + def onPipelineStart(self, ctx): + super(SassProcessor, self).onPipelineStart(ctx) - self._map_dir = os.path.join(pipeline.tmp_dir, 'sass') - if pipeline.is_first_worker: + self._map_dir = os.path.join(ctx.tmp_dir, 'sass') + if ctx.is_main_process: if not os.path.isdir(self._map_dir): os.makedirs(self._map_dir) # Ignore include-only Sass files. - pipeline.addIgnorePatterns(['_*.scss', '_*.sass']) + ctx.ignore_patterns += ['_*.scss', '_*.sass'] def getDependencies(self, path): if _is_include_only(path): diff -r 7a1903ede496 -r 2e5c5d33d62c piecrust/processing/sitemap.py --- a/piecrust/processing/sitemap.py Tue Nov 21 11:00:06 2017 -0800 +++ b/piecrust/processing/sitemap.py Tue Nov 21 22:07:12 2017 -0800 @@ -1,9 +1,10 @@ +import os +import os.path import time import logging import yaml -from piecrust.data.iterators import PageIterator +from piecrust.dataproviders.pageiterator import PageIterator from piecrust.processing.base import SimpleFileProcessor -from piecrust.routing import create_route_metadata logger = logging.getLogger(__name__) @@ -15,12 +16,12 @@ """ SITEMAP_FOOTER = "\n" -SITEURL_HEADER = " \n" -SITEURL_LOC = " %s\n" -SITEURL_LASTMOD = " %s\n" -SITEURL_CHANGEFREQ = " %s\n" -SITEURL_PRIORITY = " %0.1f\n" -SITEURL_FOOTER = " \n" +SITEURL_HEADER = " \n" # NOQA: E222 +SITEURL_LOC = " %s\n" # NOQA: E222 +SITEURL_LASTMOD = " %s\n" # NOQA: E222 +SITEURL_CHANGEFREQ = " %s\n" # NOQA: E222 +SITEURL_PRIORITY = " %0.1f\n" # NOQA: E222 +SITEURL_FOOTER = " \n" # NOQA: E222 class SitemapProcessor(SimpleFileProcessor): @@ -30,18 +31,26 @@ super(SitemapProcessor, self).__init__({'sitemap': 'xml'}) self._start_time = None - def onPipelineStart(self, pipeline): + def onPipelineStart(self, ctx): self._start_time = time.time() def _doProcess(self, in_path, out_path): with open(in_path, 'r') as fp: sitemap = yaml.load(fp) - with open(out_path, 'w') as fp: - fp.write(SITEMAP_HEADER) - self._writeManualLocs(sitemap, fp) - self._writeAutoLocs(sitemap, fp) - fp.write(SITEMAP_FOOTER) + try: + with open(out_path, 'w') as fp: + fp.write(SITEMAP_HEADER) + self._writeManualLocs(sitemap, fp) + self._writeAutoLocs(sitemap, fp) + fp.write(SITEMAP_FOOTER) + except: + # If an exception occurs, delete the output file otherwise + # the pipeline will think the output was correctly produced. + if os.path.isfile(out_path): + logger.debug("Error occured, removing output sitemap.") + os.unlink(out_path) + raise return True diff -r 7a1903ede496 -r 2e5c5d33d62c piecrust/processing/tree.py --- a/piecrust/processing/tree.py Tue Nov 21 11:00:06 2017 -0800 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,299 +0,0 @@ -import os -import time -import os.path -import logging -from piecrust.chefutil import format_timed - - -logger = logging.getLogger(__name__) - - -STATE_UNKNOWN = 0 -STATE_DIRTY = 1 -STATE_CLEAN = 2 - - -FORCE_BUILD = object() - - -class ProcessingTreeError(Exception): - pass - - -class ProcessorNotFoundError(ProcessingTreeError): - pass - - -class ProcessorError(ProcessingTreeError): - def __init__(self, proc_name, in_path, *args): - super(ProcessorError, self).__init__(*args) - self.proc_name = proc_name - self.in_path = in_path - - def __str__(self): - return "Processor %s failed on: %s" % (self.proc_name, self.in_path) - - -class ProcessingTreeNode(object): - def __init__(self, path, available_procs, level=0): - self.path = path - self.available_procs = available_procs - self.outputs = [] - self.level = level - self.state = STATE_UNKNOWN - self._processor = None - - def getProcessor(self): - if self._processor is None: - for p in self.available_procs: - if p.matches(self.path): - self._processor = p - self.available_procs.remove(p) - break - else: - raise ProcessorNotFoundError() - return self._processor - - def setState(self, state, recursive=True): - self.state = state - if recursive: - for o in self.outputs: - o.setState(state, True) - - @property - def is_leaf(self): - return len(self.outputs) == 0 - - def getLeaves(self): - if self.is_leaf: - return [self] - leaves = [] - for o in self.outputs: - for l in o.getLeaves(): - leaves.append(l) - return leaves - - -class ProcessingTreeBuilder(object): - def __init__(self, processors): - self.processors = processors - - def build(self, path): - tree_root = ProcessingTreeNode(path, list(self.processors)) - - loop_guard = 100 - walk_stack = [tree_root] - while len(walk_stack) > 0: - loop_guard -= 1 - if loop_guard <= 0: - raise ProcessingTreeError("Infinite loop detected!") - - cur_node = walk_stack.pop() - proc = cur_node.getProcessor() - - # If the root tree node (and only that one) wants to bypass this - # whole tree business, so be it. - if proc.is_bypassing_structured_processing: - if cur_node != tree_root: - raise ProcessingTreeError("Only root processors can " - "bypass structured processing.") - break - - # Get the destination directory and output files. - rel_dir, basename = os.path.split(cur_node.path) - out_names = proc.getOutputFilenames(basename) - if out_names is None: - continue - - for n in out_names: - out_node = ProcessingTreeNode( - os.path.join(rel_dir, n), - list(cur_node.available_procs), - cur_node.level + 1) - cur_node.outputs.append(out_node) - - if proc.PROCESSOR_NAME != 'copy': - walk_stack.append(out_node) - - return tree_root - - -class ProcessingTreeRunner(object): - def __init__(self, base_dir, tmp_dir, out_dir): - self.base_dir = base_dir - self.tmp_dir = tmp_dir - self.out_dir = out_dir - - def processSubTree(self, tree_root): - did_process = False - walk_stack = [tree_root] - while len(walk_stack) > 0: - cur_node = walk_stack.pop() - - self._computeNodeState(cur_node) - if cur_node.state == STATE_DIRTY: - did_process_this_node = self.processNode(cur_node) - did_process |= did_process_this_node - - if did_process_this_node: - for o in cur_node.outputs: - if not o.is_leaf: - walk_stack.append(o) - else: - for o in cur_node.outputs: - if not o.is_leaf: - walk_stack.append(o) - return did_process - - def processNode(self, node): - full_path = self._getNodePath(node) - proc = node.getProcessor() - if proc.is_bypassing_structured_processing: - try: - start_time = time.perf_counter() - with proc.app.env.timerScope(proc.__class__.__name__): - proc.process(full_path, self.out_dir) - print_node( - node, - format_timed( - start_time, "(bypassing structured processing)", - colored=False)) - return True - except Exception as e: - raise ProcessorError(proc.PROCESSOR_NAME, full_path) from e - - # All outputs of a node must go to the same directory, so we can get - # the output directory off of the first output. - base_out_dir = self._getNodeBaseDir(node.outputs[0]) - rel_out_dir = os.path.dirname(node.path) - out_dir = os.path.join(base_out_dir, rel_out_dir) - if not os.path.isdir(out_dir): - try: - os.makedirs(out_dir, 0o755, exist_ok=True) - except OSError: - pass - - try: - start_time = time.perf_counter() - with proc.app.env.timerScope(proc.__class__.__name__): - proc_res = proc.process(full_path, out_dir) - if proc_res is None: - raise Exception("Processor '%s' didn't return a boolean " - "result value." % proc) - if proc_res: - print_node(node, "-> %s" % out_dir) - return True - else: - print_node(node, "-> %s [clean]" % out_dir) - return False - except Exception as e: - raise ProcessorError(proc.PROCESSOR_NAME, full_path) from e - - def _computeNodeState(self, node): - if node.state != STATE_UNKNOWN: - return - - proc = node.getProcessor() - if (proc.is_bypassing_structured_processing or - not proc.is_delegating_dependency_check): - # This processor wants to handle things on its own... - node.setState(STATE_DIRTY, False) - return - - start_time = time.perf_counter() - - # Get paths and modification times for the input path and - # all dependencies (if any). - base_dir = self._getNodeBaseDir(node) - full_path = os.path.join(base_dir, node.path) - in_mtime = (full_path, os.path.getmtime(full_path)) - force_build = False - try: - deps = proc.getDependencies(full_path) - if deps == FORCE_BUILD: - force_build = True - elif deps is not None: - for dep in deps: - dep_mtime = os.path.getmtime(dep) - if dep_mtime > in_mtime[1]: - in_mtime = (dep, dep_mtime) - except Exception as e: - logger.warning("%s -- Will force-bake: %s" % (e, node.path)) - node.setState(STATE_DIRTY, True) - return - - if force_build: - # Just do what the processor told us to do. - node.setState(STATE_DIRTY, True) - message = "Processor requested a forced build." - print_node(node, message) - else: - # Get paths and modification times for the outputs. - message = None - for o in node.outputs: - full_out_path = self._getNodePath(o) - if not os.path.isfile(full_out_path): - message = "Output '%s' doesn't exist." % o.path - break - o_mtime = os.path.getmtime(full_out_path) - if o_mtime < in_mtime[1]: - message = "Input '%s' is newer than output '%s'." % ( - in_mtime[0], o.path) - break - if message is not None: - node.setState(STATE_DIRTY, True) - message += " Re-processing sub-tree." - print_node(node, message) - else: - node.setState(STATE_CLEAN, False) - - if node.state == STATE_DIRTY: - state = "dirty" - elif node.state == STATE_CLEAN: - state = "clean" - else: - state = "unknown" - logger.debug(format_timed(start_time, - "Computed node dirtyness: %s" % state, - indent_level=node.level, colored=False)) - - def _getNodeBaseDir(self, node): - if node.level == 0: - return self.base_dir - if node.is_leaf: - return self.out_dir - return os.path.join(self.tmp_dir, str(node.level)) - - def _getNodePath(self, node): - base_dir = self._getNodeBaseDir(node) - return os.path.join(base_dir, node.path) - - -def print_node(node, message=None, recursive=False): - indent = ' ' * node.level - try: - proc_name = node.getProcessor().PROCESSOR_NAME - except ProcessorNotFoundError: - proc_name = 'n/a' - - message = message or '' - logger.debug('%s%s [%s] %s' % (indent, node.path, proc_name, message)) - - if recursive: - for o in node.outputs: - print_node(o, None, True) - - -def get_node_name_tree(node): - try: - proc_name = node.getProcessor().PROCESSOR_NAME - except ProcessorNotFoundError: - proc_name = 'n/a' - - children = [] - for o in node.outputs: - if not o.outputs: - continue - children.append(get_node_name_tree(o)) - return (proc_name, children) - diff -r 7a1903ede496 -r 2e5c5d33d62c piecrust/processing/worker.py --- a/piecrust/processing/worker.py Tue Nov 21 11:00:06 2017 -0800 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,185 +0,0 @@ -import re -import os.path -import time -import logging -from piecrust.app import PieCrust, apply_variant_and_values -from piecrust.processing.base import PipelineContext -from piecrust.processing.records import ( - FLAG_NONE, FLAG_PREPARED, FLAG_PROCESSED, - FLAG_BYPASSED_STRUCTURED_PROCESSING) -from piecrust.processing.tree import ( - ProcessingTreeBuilder, ProcessingTreeRunner, - ProcessingTreeError, ProcessorError, - get_node_name_tree, print_node, - STATE_DIRTY) -from piecrust.workerpool import IWorker - - -logger = logging.getLogger(__name__) - - -split_processor_names_re = re.compile(r'[ ,]+') -re_ansicolors = re.compile('\033\\[\d+m') - - -class ProcessingWorkerContext(object): - def __init__(self, appfactory, out_dir, tmp_dir, *, - force=False): - self.appfactory = appfactory - self.out_dir = out_dir - self.tmp_dir = tmp_dir - self.force = force - self.is_profiling = False - self.enabled_processors = None - self.additional_processors = None - - -class ProcessingWorkerJob(object): - def __init__(self, base_dir, mount_info, path, *, force=False): - self.base_dir = base_dir - self.mount_info = mount_info - self.path = path - self.force = force - - -class ProcessingWorkerResult(object): - def __init__(self, path): - self.path = path - self.flags = FLAG_NONE - self.proc_tree = None - self.rel_outputs = None - self.errors = None - - -class ProcessingWorker(IWorker): - def __init__(self, ctx): - self.ctx = ctx - self.work_start_time = time.perf_counter() - - def initialize(self): - # Create the app local to this worker. - app = self.ctx.appfactory.create() - app.env.registerTimer("PipelineWorker_%d_Total" % self.wid) - app.env.registerTimer("PipelineWorkerInit") - app.env.registerTimer("JobReceive") - app.env.registerTimer('BuildProcessingTree') - app.env.registerTimer('RunProcessingTree') - self.app = app - - processors = app.plugin_loader.getProcessors() - if self.ctx.enabled_processors: - logger.debug("Filtering processors to: %s" % - self.ctx.enabled_processors) - processors = get_filtered_processors(processors, - self.ctx.enabled_processors) - if self.ctx.additional_processors: - logger.debug("Adding %s additional processors." % - len(self.ctx.additional_processors)) - for proc in self.ctx.additional_processors: - app.env.registerTimer(proc.__class__.__name__) - proc.initialize(app) - processors.append(proc) - self.processors = processors - - # Invoke pre-processors. - pipeline_ctx = PipelineContext(self.wid, self.app, self.ctx.out_dir, - self.ctx.tmp_dir, self.ctx.force) - for proc in processors: - proc.onPipelineStart(pipeline_ctx) - - # Sort our processors again in case the pre-process step involved - # patching the processors with some new ones. - processors.sort(key=lambda p: p.priority) - - app.env.stepTimerSince("PipelineWorkerInit", self.work_start_time) - - def process(self, job): - result = ProcessingWorkerResult(job.path) - - processors = get_filtered_processors( - self.processors, job.mount_info['processors']) - - # Build the processing tree for this job. - rel_path = os.path.relpath(job.path, job.base_dir) - try: - with self.app.env.timerScope('BuildProcessingTree'): - builder = ProcessingTreeBuilder(processors) - tree_root = builder.build(rel_path) - result.flags |= FLAG_PREPARED - except ProcessingTreeError as ex: - result.errors = _get_errors(ex) - return result - - # Prepare and run the tree. - print_node(tree_root, recursive=True) - leaves = tree_root.getLeaves() - result.rel_outputs = [l.path for l in leaves] - result.proc_tree = get_node_name_tree(tree_root) - if tree_root.getProcessor().is_bypassing_structured_processing: - result.flags |= FLAG_BYPASSED_STRUCTURED_PROCESSING - - if job.force: - tree_root.setState(STATE_DIRTY, True) - - try: - with self.app.env.timerScope('RunProcessingTree'): - runner = ProcessingTreeRunner( - job.base_dir, self.ctx.tmp_dir, self.ctx.out_dir) - if runner.processSubTree(tree_root): - result.flags |= FLAG_PROCESSED - except ProcessingTreeError as ex: - if isinstance(ex, ProcessorError): - ex = ex.__cause__ - # Need to strip out colored errors from external processes. - result.errors = _get_errors(ex, strip_colors=True) - - return result - - def getReport(self, pool_reports): - # Invoke post-processors. - pipeline_ctx = PipelineContext(self.wid, self.app, self.ctx.out_dir, - self.ctx.tmp_dir, self.ctx.force) - for proc in self.processors: - proc.onPipelineEnd(pipeline_ctx) - - self.app.env.stepTimerSince("PipelineWorker_%d_Total" % self.wid, - self.work_start_time) - data = self.app.env.getStats() - data.timers.update(pool_reports) - return { - 'type': 'stats', - 'data': data} - - -def get_filtered_processors(processors, authorized_names): - if not authorized_names or authorized_names == 'all': - return processors - - if isinstance(authorized_names, str): - authorized_names = split_processor_names_re.split(authorized_names) - - procs = [] - has_star = 'all' in authorized_names - for p in processors: - for name in authorized_names: - if name == p.PROCESSOR_NAME: - procs.append(p) - break - if name == ('-%s' % p.PROCESSOR_NAME): - break - else: - if has_star: - procs.append(p) - return procs - - -def _get_errors(ex, strip_colors=False): - errors = [] - while ex is not None: - msg = str(ex) - if strip_colors: - msg = re_ansicolors.sub('', msg) - errors.append(msg) - ex = ex.__cause__ - return errors - diff -r 7a1903ede496 -r 2e5c5d33d62c piecrust/publishing/base.py --- a/piecrust/publishing/base.py Tue Nov 21 11:00:06 2017 -0800 +++ b/piecrust/publishing/base.py Tue Nov 21 22:07:12 2017 -0800 @@ -1,10 +1,7 @@ import os.path -import shlex -import urllib.parse +import time import logging -import threading -import subprocess -from piecrust.configuration import try_get_dict_value +from piecrust.chefutil import format_timed logger = logging.getLogger(__name__) @@ -18,17 +15,17 @@ pass -class PublishingContext(object): +class PublishingContext: def __init__(self): self.bake_out_dir = None - self.bake_record = None + self.bake_records = None self.processing_record = None self.was_baked = False self.preview = False self.args = None -class Publisher(object): +class Publisher: PUBLISHER_NAME = 'undefined' PUBLISHER_SCHEME = None @@ -36,90 +33,167 @@ self.app = app self.target = target self.config = config - self.has_url_config = isinstance(config, urllib.parse.ParseResult) self.log_file_path = None def setupPublishParser(self, parser, app): return - def getConfigValue(self, name, default_value=None): - if self.has_url_config: - raise Exception("This publisher only has a URL configuration.") - return try_get_dict_value(self.config, name, default=default_value) + def parseUrlTarget(self, url): + raise NotImplementedError() def run(self, ctx): raise NotImplementedError() def getBakedFiles(self, ctx): - for e in ctx.bake_record.entries: - for sub in e.subs: - if sub.was_baked: - yield sub.out_path - for e in ctx.processing_record.entries: - if e.was_processed: - yield from [os.path.join(ctx.processing_record.out_dir, p) - for p in e.rel_outputs] + for rec in ctx.bake_records.records: + for e in rec.getEntries(): + paths = e.getAllOutputPaths() + if paths is not None: + yield from paths def getDeletedFiles(self, ctx): - yield from ctx.bake_record.deleted - yield from ctx.processing_record.deleted + for rec in ctx.bake_records.records: + yield from rec.deleted_out_paths + + +class InvalidPublishTargetError(Exception): + pass + + +class PublishingError(Exception): + pass -class ShellCommandPublisherBase(Publisher): - def __init__(self, app, target, config): - super(ShellCommandPublisherBase, self).__init__(app, target, config) - self.expand_user_args = True +class PublishingManager: + def __init__(self, appfactory, app): + self.appfactory = appfactory + self.app = app + + def run(self, target, + force=False, preview=False, extra_args=None, + log_file=None, log_debug_info=False, append_log_file=False): + start_time = time.perf_counter() + + # Get publisher for this target. + pub = self.app.getPublisher(target) + if pub is None: + raise InvalidPublishTargetError( + "No such publish target: %s" % target) + + # Will we need to bake first? + bake_first = pub.config.get('bake', True) - def run(self, ctx): - args = self._getCommandArgs(ctx) - if self.expand_user_args: - args = [os.path.expanduser(i) for i in args] + # Setup logging stuff. + hdlr = None + root_logger = logging.getLogger() + if log_file and not preview: + logger.debug("Adding file handler for: %s" % log_file) + mode = 'w' + if append_log_file: + mode = 'a' + hdlr = logging.FileHandler(log_file, mode=mode, encoding='utf8') + root_logger.addHandler(hdlr) + + if log_debug_info: + _log_debug_info(target, force, preview, extra_args) - if ctx.preview: - preview_args = ' '.join([shlex.quote(i) for i in args]) - logger.info( - "Would run shell command: %s" % preview_args) - return True + if not preview: + logger.info("Deploying to %s" % target) + else: + logger.info("Previewing deployment to %s" % target) - logger.debug( - "Running shell command: %s" % args) + # Bake first is necessary. + records = None + was_baked = False + bake_out_dir = os.path.join(self.app.root_dir, '_pub', target) + if bake_first: + if not preview: + bake_start_time = time.perf_counter() + logger.debug("Baking first to: %s" % bake_out_dir) - proc = subprocess.Popen( - args, cwd=self.app.root_dir, bufsize=0, - stdout=subprocess.PIPE) + from piecrust.baking.baker import Baker + baker = Baker( + self.appfactory, self.app, bake_out_dir, force=force) + records = baker.bake() + was_baked = True - logger.debug("Running publishing monitor for PID %d" % proc.pid) - thread = _PublishThread(proc) - thread.start() - proc.wait() - thread.join() + if not records.success: + raise Exception( + "Error during baking, aborting publishing.") + logger.info(format_timed(bake_start_time, "Baked website.")) + else: + logger.info("Would bake to: %s" % bake_out_dir) + + # Publish! + logger.debug( + "Running publish target '%s' with publisher: %s" % + (target, pub.PUBLISHER_NAME)) + pub_start_time = time.perf_counter() - if proc.returncode != 0: - logger.error( - "Publish process returned code %d" % proc.returncode) + success = False + ctx = PublishingContext() + ctx.bake_out_dir = bake_out_dir + ctx.bake_records = records + ctx.was_baked = was_baked + ctx.preview = preview + ctx.args = extra_args + try: + success = pub.run(ctx) + except Exception as ex: + raise PublishingError( + "Error publishing to target: %s" % target) from ex + finally: + if hdlr: + root_logger.removeHandler(hdlr) + hdlr.close() + + logger.info(format_timed( + pub_start_time, "Ran publisher %s" % pub.PUBLISHER_NAME)) + + if success: + logger.info(format_timed(start_time, 'Deployed to %s' % target)) + return 0 else: - logger.debug("Publish process returned successfully.") - - return proc.returncode == 0 - - def _getCommandArgs(self, ctx): - raise NotImplementedError() + logger.error(format_timed(start_time, 'Failed to deploy to %s' % + target)) + return 1 -class _PublishThread(threading.Thread): - def __init__(self, proc): - super(_PublishThread, self).__init__( - name='publish_monitor', daemon=True) - self.proc = proc - self.root_logger = logging.getLogger() +def find_publisher_class(app, name, is_scheme=False): + attr_name = 'PUBLISHER_SCHEME' if is_scheme else 'PUBLISHER_NAME' + for pub_cls in app.plugin_loader.getPublishers(): + pub_sch = getattr(pub_cls, attr_name, None) + if pub_sch == name: + return pub_cls + return None + + +def find_publisher_name(app, scheme): + pub_cls = find_publisher_class(app, scheme, True) + if pub_cls: + return pub_cls.PUBLISHER_NAME + return None + - def run(self): - for line in iter(self.proc.stdout.readline, b''): - line_str = line.decode('utf8') - logger.info(line_str.rstrip('\r\n')) - for h in self.root_logger.handlers: - h.flush() +def _log_debug_info(target, force, preview, extra_args): + import os + import sys - self.proc.communicate() - logger.debug("Publish monitor exiting.") + logger.info("---- DEBUG INFO START ----") + logger.info("System:") + logger.info(" sys.argv=%s" % sys.argv) + logger.info(" sys.base_exec_prefix=%s" % sys.base_exec_prefix) + logger.info(" sys.base_prefix=%s" % sys.base_prefix) + logger.info(" sys.exec_prefix=%s" % sys.exec_prefix) + logger.info(" sys.executable=%s" % sys.executable) + logger.info(" sys.path=%s" % sys.path) + logger.info(" sys.platform=%s" % sys.platform) + logger.info(" sys.prefix=%s" % sys.prefix) + logger.info("Environment:") + logger.info(" cwd=%s" % os.getcwd()) + logger.info(" pid=%s" % os.getpid()) + logger.info("Variables:") + for k, v in os.environ.items(): + logger.info(" %s=%s" % (k, v)) + logger.info("---- DEBUG INFO END ----") diff -r 7a1903ede496 -r 2e5c5d33d62c piecrust/publishing/copy.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/piecrust/publishing/copy.py Tue Nov 21 22:07:12 2017 -0800 @@ -0,0 +1,51 @@ +import os +import os.path +import shutil +import logging +from piecrust.publishing.base import Publisher + + +logger = logging.getLogger(__name__) + + +class CopyPublisher(Publisher): + PUBLISHER_NAME = 'copy' + PUBLISHER_SCHEME = 'file' + + def parseUrlTarget(self, url): + self.config = {'output': (url.netloc + url.path)} + + def run(self, ctx): + dest = self.config.get('output') + + if ctx.was_baked: + to_upload = list(self.getBakedFiles(ctx)) + to_delete = list(self.getDeletedFiles(ctx)) + if to_upload or to_delete: + logger.info("Copying new/changed files...") + for path in to_upload: + rel_path = os.path.relpath(path, ctx.bake_out_dir) + dest_path = os.path.join(dest, rel_path) + dest_dir = os.path.dirname(dest_path) + os.makedirs(dest_dir, exist_ok=True) + try: + dest_mtime = os.path.getmtime(dest_path) + except OSError: + dest_mtime = 0 + if os.path.getmtime(path) >= dest_mtime: + logger.info(rel_path) + if not ctx.preview: + shutil.copyfile(path, dest_path) + + logger.info("Deleting removed files...") + for path in self.getDeletedFiles(ctx): + rel_path = os.path.relpath(path, ctx.bake_out_dir) + logger.info("%s [DELETE]" % rel_path) + if not ctx.preview: + try: + os.remove(path) + except OSError: + pass + else: + logger.info("Nothing to copy to the output folder.") + diff -r 7a1903ede496 -r 2e5c5d33d62c piecrust/publishing/publisher.py --- a/piecrust/publishing/publisher.py Tue Nov 21 11:00:06 2017 -0800 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,128 +0,0 @@ -import os.path -import time -import logging -import urllib.parse -from piecrust.chefutil import format_timed -from piecrust.publishing.base import PublishingContext - - -logger = logging.getLogger(__name__) - - -class InvalidPublishTargetError(Exception): - pass - - -class PublishingError(Exception): - pass - - -class Publisher(object): - def __init__(self, app): - self.app = app - - def run(self, target, - force=False, preview=False, extra_args=None, log_file=None, - applied_config_variant=None, applied_config_values=None): - start_time = time.perf_counter() - - # Get publisher for this target. - pub = self.app.getPublisher(target) - if pub is None: - raise InvalidPublishTargetError( - "No such publish target: %s" % target) - - # Will we need to bake first? - bake_first = True - if not pub.has_url_config: - bake_first = pub.getConfigValue('bake', True) - - # Setup logging stuff. - hdlr = None - root_logger = logging.getLogger() - if log_file and not preview: - logger.debug("Adding file handler for: %s" % log_file) - hdlr = logging.FileHandler(log_file, mode='w', encoding='utf8') - root_logger.addHandler(hdlr) - if not preview: - logger.info("Deploying to %s" % target) - else: - logger.info("Previewing deployment to %s" % target) - - # Bake first is necessary. - rec1 = None - rec2 = None - was_baked = False - bake_out_dir = os.path.join(self.app.root_dir, '_pub', target) - if bake_first: - if not preview: - bake_start_time = time.perf_counter() - logger.debug("Baking first to: %s" % bake_out_dir) - - from piecrust.baking.baker import Baker - baker = Baker( - self.app, bake_out_dir, - applied_config_variant=applied_config_variant, - applied_config_values=applied_config_values) - rec1 = baker.bake() - - from piecrust.processing.pipeline import ProcessorPipeline - proc = ProcessorPipeline( - self.app, bake_out_dir, - applied_config_variant=applied_config_variant, - applied_config_values=applied_config_values) - rec2 = proc.run() - - was_baked = True - - if not rec1.success or not rec2.success: - raise Exception( - "Error during baking, aborting publishing.") - logger.info(format_timed(bake_start_time, "Baked website.")) - else: - logger.info("Would bake to: %s" % bake_out_dir) - - # Publish! - logger.debug( - "Running publish target '%s' with publisher: %s" % - (target, pub.PUBLISHER_NAME)) - pub_start_time = time.perf_counter() - - ctx = PublishingContext() - ctx.bake_out_dir = bake_out_dir - ctx.bake_record = rec1 - ctx.processing_record = rec2 - ctx.was_baked = was_baked - ctx.preview = preview - ctx.args = extra_args - try: - pub.run(ctx) - except Exception as ex: - raise PublishingError( - "Error publishing to target: %s" % target) from ex - finally: - if hdlr: - root_logger.removeHandler(hdlr) - hdlr.close() - - logger.info(format_timed( - pub_start_time, "Ran publisher %s" % pub.PUBLISHER_NAME)) - - logger.info(format_timed(start_time, 'Deployed to %s' % target)) - - -def find_publisher_class(app, name, is_scheme=False): - attr_name = 'PUBLISHER_SCHEME' if is_scheme else 'PUBLISHER_NAME' - for pub_cls in app.plugin_loader.getPublishers(): - pub_sch = getattr(pub_cls, attr_name, None) - if pub_sch == name: - return pub_cls - return None - - -def find_publisher_name(app, scheme): - pub_cls = find_publisher_class(app, scheme, True) - if pub_cls: - return pub_cls.PUBLISHER_NAME - return None - diff -r 7a1903ede496 -r 2e5c5d33d62c piecrust/publishing/rsync.py --- a/piecrust/publishing/rsync.py Tue Nov 21 11:00:06 2017 -0800 +++ b/piecrust/publishing/rsync.py Tue Nov 21 22:07:12 2017 -0800 @@ -1,21 +1,22 @@ -from piecrust.publishing.base import ShellCommandPublisherBase +from piecrust.publishing.shell import ShellCommandPublisherBase class RsyncPublisher(ShellCommandPublisherBase): PUBLISHER_NAME = 'rsync' PUBLISHER_SCHEME = 'rsync' + def parseUrlTarget(self, url): + self.config = { + 'destination': (url.netloc + url.path) + } + def _getCommandArgs(self, ctx): - if self.has_url_config: - orig = ctx.bake_out_dir - dest = self.config.netloc + self.config.path - else: - orig = self.getConfigValue('source', ctx.bake_out_dir) - dest = self.getConfigValue('destination') + orig = self.config.get('source', ctx.bake_out_dir) + dest = self.config.get('destination') + if not dest: + raise Exception("No destination specified.") - rsync_options = None - if not self.has_url_config: - rsync_options = self.getConfigValue('options') + rsync_options = self.config.get('options') if rsync_options is None: rsync_options = ['-avc', '--delete'] diff -r 7a1903ede496 -r 2e5c5d33d62c piecrust/publishing/sftp.py --- a/piecrust/publishing/sftp.py Tue Nov 21 11:00:06 2017 -0800 +++ b/piecrust/publishing/sftp.py Tue Nov 21 22:07:12 2017 -0800 @@ -1,10 +1,6 @@ import os import os.path -import socket -import urllib.parse -import getpass import logging -import paramiko from piecrust.publishing.base import Publisher, PublisherConfigurationError @@ -17,20 +13,23 @@ def setupPublishParser(self, parser, app): parser.add_argument( - '--force', - action='store_true', - help=("Upload the entire bake directory instead of only " - "the files changed by the last bake.")) + '--force', + action='store_true', + help=("Upload the entire bake directory instead of only " + "the files changed by the last bake.")) + + def parseUrlTarget(self, url): + self.config = {'host': str(url)} def run(self, ctx): - remote = self.config - if not self.has_url_config: - host = self.getConfigValue('host') - if not host: - raise PublisherConfigurationError( - "Publish target '%s' doesn't specify a 'host'." % - self.target) - remote = urllib.parse.urlparse(host) + host = self.config.get('host') + if not host: + raise PublisherConfigurationError( + "Publish target '%s' doesn't specify a 'host'." % + self.target) + + import urllib.parse + remote = urllib.parse.urlparse(host) hostname = remote.hostname port = remote.port or 22 @@ -39,19 +38,13 @@ hostname = path path = '' - username = remote.username - pkey_path = None - - if not self.has_url_config: - if not username: - username = self.getConfigValue('username') - if not path: - path = self.getConfigValue('path') - - pkey_path = self.getConfigValue('key') + username = self.config.get('username', remote.username) + path = self.config.get('path', path) + pkey_path = self.config.get('key') password = None if username and not ctx.preview: + import getpass password = getpass.getpass("Password for '%s': " % username) if ctx.preview: @@ -59,16 +52,18 @@ self._previewUpload(ctx, path) return + import paramiko + logger.debug("Connecting to %s:%s..." % (hostname, port)) lfk = (not username and not pkey_path) sshc = paramiko.SSHClient() sshc.load_system_host_keys() sshc.set_missing_host_key_policy(paramiko.WarningPolicy()) sshc.connect( - hostname, port=port, - username=username, password=password, - key_filename=pkey_path, - look_for_keys=lfk) + hostname, port=port, + username=username, password=password, + key_filename=pkey_path, + look_for_keys=lfk) try: logger.info("Connected as %s" % sshc.get_transport().get_username()) @@ -120,9 +115,11 @@ except OSError: pass else: - logger.info("Nothing to upload or delete on the remote server.") - logger.info("If you want to force uploading the entire website, " - "use the `--force` flag.") + logger.info( + "Nothing to upload or delete on the remote server.") + logger.info( + "If you want to force uploading the entire website, " + "use the `--force` flag.") else: logger.info("Uploading entire website...") for dirpath, dirnames, filenames in os.walk(ctx.bake_out_dir): @@ -148,7 +145,7 @@ cur = os.path.join(cur, b) if cur not in known_dirs: try: - info = client.stat(cur) + client.stat(cur) except FileNotFoundError: logger.debug("Creating remote dir: %s" % cur) client.mkdir(cur) diff -r 7a1903ede496 -r 2e5c5d33d62c piecrust/publishing/shell.py --- a/piecrust/publishing/shell.py Tue Nov 21 11:00:06 2017 -0800 +++ b/piecrust/publishing/shell.py Tue Nov 21 22:07:12 2017 -0800 @@ -1,12 +1,79 @@ +import os.path import shlex -from piecrust.publishing.base import ShellCommandPublisherBase +import logging +import threading +import subprocess +from piecrust.publishing.base import Publisher + + +logger = logging.getLogger(__name__) + + +class ShellCommandPublisherBase(Publisher): + def __init__(self, app, target, config): + super(ShellCommandPublisherBase, self).__init__(app, target, config) + self.expand_user_args = True + + def run(self, ctx): + args = self._getCommandArgs(ctx) + if self.expand_user_args: + args = [os.path.expanduser(i) for i in args] + + if ctx.preview: + preview_args = ' '.join([shlex.quote(i) for i in args]) + logger.info( + "Would run shell command: %s" % preview_args) + return True + + logger.debug( + "Running shell command: %s" % args) + + proc = subprocess.Popen( + args, cwd=self.app.root_dir, bufsize=0, + stdout=subprocess.PIPE, + stderr=subprocess.STDOUT) + + logger.debug("Running publishing monitor for PID %d" % proc.pid) + thread = _PublishThread(proc) + thread.start() + proc.wait() + thread.join() + + if proc.returncode != 0: + logger.error( + "Publish process returned code %d" % proc.returncode) + else: + logger.debug("Publish process returned successfully.") + + return proc.returncode == 0 + + def _getCommandArgs(self, ctx): + raise NotImplementedError() + + +class _PublishThread(threading.Thread): + def __init__(self, proc): + super(_PublishThread, self).__init__( + name='publish_monitor', daemon=True) + self.proc = proc + self.root_logger = logging.getLogger() + + def run(self): + for line in iter(self.proc.stdout.readline, b''): + line_str = line.decode('utf8') + logger.info(line_str.rstrip('\r\n')) + for h in self.root_logger.handlers: + h.flush() + + self.proc.communicate() + logger.debug("Publish monitor exiting.") class ShellCommandPublisher(ShellCommandPublisherBase): PUBLISHER_NAME = 'shell' def _getCommandArgs(self, ctx): - target_cmd = self.getConfigValue('command') + target_cmd = self.config.get('command') if not target_cmd: raise Exception("No command specified for publish target: %s" % self.target) diff -r 7a1903ede496 -r 2e5c5d33d62c piecrust/records.py --- a/piecrust/records.py Tue Nov 21 11:00:06 2017 -0800 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,133 +0,0 @@ -import os -import os.path -import pickle -import logging -from piecrust import APP_VERSION -from piecrust.events import Event - - -logger = logging.getLogger(__name__) - - -class Record(object): - def __init__(self): - self.entries = [] - self.entry_added = Event() - self.app_version = APP_VERSION - self.record_version = self.__class__.RECORD_VERSION - self.stats = {} - - def hasLatestVersion(self): - return (self.app_version == APP_VERSION and - self.record_version == self.__class__.RECORD_VERSION) - - def addEntry(self, entry): - self.entries.append(entry) - self.entry_added.fire(entry) - - def save(self, path): - path_dir = os.path.dirname(path) - if not os.path.isdir(path_dir): - os.makedirs(path_dir, 0o755) - - with open(path, 'wb') as fp: - pickle.dump(self, fp, pickle.HIGHEST_PROTOCOL) - - def __getstate__(self): - odict = self.__dict__.copy() - del odict['entry_added'] - return odict - - def __setstate__(self, state): - state['entry_added'] = Event() - self.__dict__.update(state) - - @staticmethod - def load(path): - logger.debug("Loading bake record from: %s" % path) - with open(path, 'rb') as fp: - return pickle.load(fp) - - -class TransitionalRecord(object): - def __init__(self, record_class, previous_path=None): - self._record_class = record_class - self.transitions = {} - self.incremental_count = 0 - self.current = record_class() - if previous_path: - self.loadPrevious(previous_path) - else: - self.previous = record_class() - self.current.entry_added += self._onCurrentEntryAdded - - def loadPrevious(self, previous_path): - previous_record_valid = True - try: - self.previous = self._record_class.load(previous_path) - except Exception as ex: - logger.debug("Error loading previous record: %s" % ex) - logger.debug("Will reset to an empty one.") - previous_record_valid = False - - if self.previous.record_version != self._record_class.RECORD_VERSION: - logger.debug( - "Previous record has old version %s." % - self.previous.record_version) - logger.debug("Will reset to an empty one.") - previous_record_valid = False - - if not previous_record_valid: - self.previous = self._record_class() - return - - self._rebuildTransitions() - - def setPrevious(self, previous_record): - self.previous = previous_record - self._rebuildTransitions() - - def clearPrevious(self): - self.setPrevious(self._record_class()) - - def saveCurrent(self, current_path): - self.current.save(current_path) - - def detach(self): - res = self.current - self.current.entry_added -= self._onCurrentEntryAdded - self.current = None - self.previous = None - self.transitions = {} - return res - - def addEntry(self, entry): - self.current.addEntry(entry) - - def getTransitionKey(self, entry): - raise NotImplementedError() - - def _rebuildTransitions(self): - self.transitions = {} - for e in self.previous.entries: - key = self.getTransitionKey(e) - self.transitions[key] = (e, None) - - def _onCurrentEntryAdded(self, entry): - key = self.getTransitionKey(entry) - te = self.transitions.get(key) - if te is None: - logger.debug("Adding new record entry: %s" % key) - self.transitions[key] = (None, entry) - self._onNewEntryAdded(entry) - return - - if te[1] is not None: - raise Exception("A current entry already exists for: %s" % - key) - logger.debug("Setting current record entry: %s" % key) - self.transitions[key] = (te[0], entry) - - def _onNewEntryAdded(self, entry): - pass - diff -r 7a1903ede496 -r 2e5c5d33d62c piecrust/rendering.py --- a/piecrust/rendering.py Tue Nov 21 11:00:06 2017 -0800 +++ b/piecrust/rendering.py Tue Nov 21 22:07:12 2017 -0800 @@ -1,15 +1,10 @@ import re import os.path -import copy import logging -from werkzeug.utils import cached_property from piecrust.data.builder import ( - DataBuildingContext, build_page_data, build_layout_data) -from piecrust.data.filters import ( - PaginationFilter, SettingFilterClause, page_value_accessor) -from piecrust.fastpickle import _pickle_object, _unpickle_object -from piecrust.sources.base import PageSource + DataBuildingContext, build_page_data, add_layout_data) from piecrust.templating.base import TemplateNotFoundError, TemplatingError +from piecrust.sources.base import AbortedSourceUseError logger = logging.getLogger(__name__) @@ -19,7 +14,7 @@ re.MULTILINE) -class PageRenderingError(Exception): +class RenderingError(Exception): pass @@ -27,268 +22,277 @@ pass -class QualifiedPage(object): - def __init__(self, page, route, route_metadata): - self.page = page - self.route = route - self.route_metadata = route_metadata - - def getUri(self, sub_num=1): - return self.route.getUri(self.route_metadata, sub_num=sub_num) - - def __getattr__(self, name): - return getattr(self.page, name) - - class RenderedSegments(object): - def __init__(self, segments, render_pass_info): + def __init__(self, segments, used_templating=False): self.segments = segments - self.render_pass_info = render_pass_info + self.used_templating = used_templating class RenderedLayout(object): - def __init__(self, content, render_pass_info): + def __init__(self, content): self.content = content - self.render_pass_info = render_pass_info class RenderedPage(object): - def __init__(self, page, uri, num=1): + def __init__(self, page, sub_num): self.page = page - self.uri = uri - self.num = num + self.sub_num = sub_num self.data = None self.content = None - self.render_info = [None, None] + self.render_info = {} @property def app(self): return self.page.app - def copyRenderInfo(self): - return copy.deepcopy(self.render_info) - -PASS_NONE = -1 -PASS_FORMATTING = 0 -PASS_RENDERING = 1 - - -RENDER_PASSES = [PASS_FORMATTING, PASS_RENDERING] +def create_render_info(): + """ Creates a bag of rendering properties. It's a dictionary because + it will be passed between workers during the bake process, and + saved to records. + """ + return { + 'used_source_names': {'segments': [], 'layout': []}, + 'used_pagination': False, + 'pagination_has_more': False, + 'used_assets': False, + } -class RenderPassInfo(object): - def __init__(self): - self.used_source_names = set() - self.used_pagination = False - self.pagination_has_more = False - self.used_assets = False - self._custom_info = {} - - def setCustomInfo(self, key, info): - self._custom_info[key] = info - - def getCustomInfo(self, key, default=None, create_if_missing=False): - if create_if_missing: - return self._custom_info.setdefault(key, default) - return self._custom_info.get(key, default) - - -class PageRenderingContext(object): - def __init__(self, qualified_page, page_num=1, - force_render=False, is_from_request=False): - self.page = qualified_page - self.page_num = page_num +class RenderingContext(object): + def __init__(self, page, *, sub_num=1, force_render=False): + self.page = page + self.sub_num = sub_num self.force_render = force_render - self.is_from_request = is_from_request self.pagination_source = None self.pagination_filter = None + self.render_info = create_render_info() self.custom_data = {} - self.render_passes = [None, None] # Same length as RENDER_PASSES - self._current_pass = PASS_NONE + self._current_used_source_names = None @property def app(self): return self.page.app @property - def source_metadata(self): - return self.page.source_metadata - - @cached_property - def uri(self): - return self.page.getUri(self.page_num) + def current_used_source_names(self): + usn = self._current_used_source_names + if usn is not None: + return usn + else: + raise Exception("No render pass specified.") - @property - def current_pass_info(self): - if self._current_pass != PASS_NONE: - return self.render_passes[self._current_pass] - return None - - def setCurrentPass(self, rdr_pass): - if rdr_pass != PASS_NONE: - self.render_passes[rdr_pass] = RenderPassInfo() - self._current_pass = rdr_pass + def setRenderPass(self, name): + if name is not None: + self._current_used_source_names = \ + self.render_info['used_source_names'][name] + else: + self._current_used_source_names = None def setPagination(self, paginator): - self._raiseIfNoCurrentPass() - pass_info = self.current_pass_info - if pass_info.used_pagination: + ri = self.render_info + if ri.get('used_pagination'): raise Exception("Pagination has already been used.") assert paginator.is_loaded - pass_info.used_pagination = True - pass_info.pagination_has_more = paginator.has_more + ri['used_pagination'] = True + ri['pagination_has_more'] = paginator.has_more self.addUsedSource(paginator._source) def addUsedSource(self, source): - self._raiseIfNoCurrentPass() - if isinstance(source, PageSource): - pass_info = self.current_pass_info - pass_info.used_source_names.add(source.name) + usn = self.current_used_source_names + if source.name not in usn: + usn.append(source.name) + + +class RenderingContextStack(object): + def __init__(self): + self._ctx_stack = [] + + @property + def is_empty(self): + return len(self._ctx_stack) == 0 + + @property + def current_ctx(self): + if len(self._ctx_stack) == 0: + return None + return self._ctx_stack[-1] - def _raiseIfNoCurrentPass(self): - if self._current_pass == PASS_NONE: - raise Exception("No rendering pass is currently active.") + @property + def is_main_ctx(self): + return len(self._ctx_stack) == 1 + + def hasPage(self, page): + for ei in self._ctx_stack: + if ei.page == page: + return True + return False + + def pushCtx(self, render_ctx): + for ctx in self._ctx_stack: + if ctx.page == render_ctx.page: + raise Exception("Loop detected during rendering!") + self._ctx_stack.append(render_ctx) + + def popCtx(self): + del self._ctx_stack[-1] + + def clear(self): + self._ctx_stack = [] def render_page(ctx): - eis = ctx.app.env.exec_info_stack - eis.pushPage(ctx.page, ctx) + env = ctx.app.env + stats = env.stats + + stack = env.render_ctx_stack + stack.pushCtx(ctx) + + page = ctx.page + page_uri = page.getUri(ctx.sub_num) + try: # Build the data for both segment and layout rendering. - with ctx.app.env.timerScope("BuildRenderData"): + with stats.timerScope("BuildRenderData"): page_data = _build_render_data(ctx) # Render content segments. - ctx.setCurrentPass(PASS_FORMATTING) - repo = ctx.app.env.rendered_segments_repository + repo = env.rendered_segments_repository save_to_fs = True - if ctx.app.env.fs_cache_only_for_main_page and not eis.is_main_page: + if env.fs_cache_only_for_main_page and not stack.is_main_ctx: save_to_fs = False - with ctx.app.env.timerScope("PageRenderSegments"): - if repo and not ctx.force_render: + with stats.timerScope("PageRenderSegments"): + if repo is not None and not ctx.force_render: render_result = repo.get( - ctx.uri, - lambda: _do_render_page_segments(ctx.page, page_data), - fs_cache_time=ctx.page.path_mtime, - save_to_fs=save_to_fs) + page_uri, + lambda: _do_render_page_segments(ctx, page_data), + fs_cache_time=page.content_mtime, + save_to_fs=save_to_fs) else: - render_result = _do_render_page_segments(ctx.page, page_data) + render_result = _do_render_page_segments(ctx, page_data) if repo: - repo.put(ctx.uri, render_result, save_to_fs) + repo.put(page_uri, render_result, save_to_fs) # Render layout. - page = ctx.page - ctx.setCurrentPass(PASS_RENDERING) layout_name = page.config.get('layout') if layout_name is None: - layout_name = page.source.config.get('default_layout', 'default') + layout_name = page.source.config.get( + 'default_layout', 'default') null_names = ['', 'none', 'nil'] if layout_name not in null_names: - with ctx.app.env.timerScope("BuildRenderData"): - build_layout_data(page, page_data, render_result['segments']) - - with ctx.app.env.timerScope("PageRenderLayout"): - layout_result = _do_render_layout(layout_name, page, page_data) - else: - layout_result = { - 'content': render_result['segments']['content'], - 'pass_info': None} + with stats.timerScope("BuildRenderData"): + add_layout_data(page_data, render_result.segments) - rp = RenderedPage(page, ctx.uri, ctx.page_num) + with stats.timerScope("PageRenderLayout"): + layout_result = _do_render_layout( + layout_name, page, page_data) + else: + layout_result = RenderedLayout( + render_result.segments['content']) + + rp = RenderedPage(page, ctx.sub_num) rp.data = page_data - rp.content = layout_result['content'] - rp.render_info[PASS_FORMATTING] = _unpickle_object( - render_result['pass_info']) - if layout_result['pass_info'] is not None: - rp.render_info[PASS_RENDERING] = _unpickle_object( - layout_result['pass_info']) + rp.content = layout_result.content + rp.render_info = ctx.render_info return rp + + except AbortedSourceUseError: + raise except Exception as ex: if ctx.app.debug: raise logger.exception(ex) - page_rel_path = os.path.relpath(ctx.page.path, ctx.app.root_dir) - raise Exception("Error rendering page: %s" % page_rel_path) from ex + raise Exception("Error rendering page: %s" % + ctx.page.content_spec) from ex + finally: - ctx.setCurrentPass(PASS_NONE) - eis.popPage() + stack.popCtx() def render_page_segments(ctx): - eis = ctx.app.env.exec_info_stack - eis.pushPage(ctx.page, ctx) + env = ctx.app.env + stats = env.stats + + stack = env.render_ctx_stack + + if env.abort_source_use and not stack.is_empty: + cur_spec = ctx.page.content_spec + from_spec = stack.current_ctx.page.content_spec + logger.debug("Aborting rendering of '%s' from: %s." % + (cur_spec, from_spec)) + raise AbortedSourceUseError() + + stack.pushCtx(ctx) + + page = ctx.page + page_uri = page.getUri(ctx.sub_num) + try: - ctx.setCurrentPass(PASS_FORMATTING) - repo = ctx.app.env.rendered_segments_repository + repo = env.rendered_segments_repository + save_to_fs = True - if ctx.app.env.fs_cache_only_for_main_page and not eis.is_main_page: + if env.fs_cache_only_for_main_page and not stack.is_main_ctx: save_to_fs = False - with ctx.app.env.timerScope("PageRenderSegments"): - if repo and not ctx.force_render: + + with stats.timerScope("PageRenderSegments"): + if repo is not None and not ctx.force_render: render_result = repo.get( - ctx.uri, + page_uri, lambda: _do_render_page_segments_from_ctx(ctx), - fs_cache_time=ctx.page.path_mtime, + fs_cache_time=page.content_mtime, save_to_fs=save_to_fs) else: render_result = _do_render_page_segments_from_ctx(ctx) if repo: - repo.put(ctx.uri, render_result, save_to_fs) + repo.put(page_uri, render_result, save_to_fs) finally: - ctx.setCurrentPass(PASS_NONE) - eis.popPage() + stack.popCtx() - rs = RenderedSegments( - render_result['segments'], - _unpickle_object(render_result['pass_info'])) - return rs + return render_result def _build_render_data(ctx): - with ctx.app.env.timerScope("PageDataBuild"): - data_ctx = DataBuildingContext(ctx.page, page_num=ctx.page_num) - data_ctx.pagination_source = ctx.pagination_source - data_ctx.pagination_filter = ctx.pagination_filter - page_data = build_page_data(data_ctx) - if ctx.custom_data: - page_data._appendMapping(ctx.custom_data) - return page_data + data_ctx = DataBuildingContext(ctx.page, ctx.sub_num) + data_ctx.pagination_source = ctx.pagination_source + data_ctx.pagination_filter = ctx.pagination_filter + page_data = build_page_data(data_ctx) + if ctx.custom_data: + page_data._appendMapping(ctx.custom_data) + return page_data def _do_render_page_segments_from_ctx(ctx): page_data = _build_render_data(ctx) - return _do_render_page_segments(ctx.page, page_data) + return _do_render_page_segments(ctx, page_data) -def _do_render_page_segments(page, page_data): +def _do_render_page_segments(ctx, page_data): + page = ctx.page app = page.app - cpi = app.env.exec_info_stack.current_page_info - assert cpi is not None - assert cpi.page == page + ctx.setRenderPass('segments') engine_name = page.config.get('template_engine') format_name = page.config.get('format') engine = get_template_engine(app, engine_name) + used_templating = False formatted_segments = {} for seg_name, seg in page.segments.items(): - seg_text = '' - for seg_part in seg.parts: - part_format = seg_part.fmt or format_name - try: - with app.env.timerScope( - engine.__class__.__name__ + '_segment'): - part_text = engine.renderSegmentPart( - page.path, seg_part, page_data) - except TemplatingError as err: - err.lineno += seg_part.line - raise err + try: + with app.env.stats.timerScope( + engine.__class__.__name__ + '_segment'): + seg_text, was_rendered = engine.renderSegment( + page.content_spec, seg, page_data) + if was_rendered: + used_templating = True + except TemplatingError as err: + err.lineno += seg.line + raise err - part_text = format_text(app, part_format, part_text) - seg_text += part_text + seg_format = seg.fmt or format_name + seg_text = format_text(app, seg_format, seg_text) formatted_segments[seg_name] = seg_text if seg_name == 'content': @@ -298,43 +302,47 @@ content_abstract = seg_text[:offset] formatted_segments['content.abstract'] = content_abstract - pass_info = cpi.render_ctx.render_passes[PASS_FORMATTING] - res = { - 'segments': formatted_segments, - 'pass_info': _pickle_object(pass_info)} + res = RenderedSegments(formatted_segments, used_templating) + + app.env.stats.stepCounter('PageRenderSegments') + return res def _do_render_layout(layout_name, page, layout_data): - cpi = page.app.env.exec_info_stack.current_page_info - assert cpi is not None - assert cpi.page == page + app = page.app + cur_ctx = app.env.render_ctx_stack.current_ctx + assert cur_ctx is not None + assert cur_ctx.page == page + + cur_ctx.setRenderPass('layout') names = layout_name.split(',') - default_exts = page.app.env.default_layout_extensions full_names = [] for name in names: if '.' not in name: - for ext in default_exts: - full_names.append(name + ext) + full_names.append(name + '.html') else: full_names.append(name) _, engine_name = os.path.splitext(full_names[0]) engine_name = engine_name.lstrip('.') - engine = get_template_engine(page.app, engine_name) + engine = get_template_engine(app, engine_name) try: - with page.app.env.timerScope(engine.__class__.__name__ + '_layout'): + with app.env.stats.timerScope( + engine.__class__.__name__ + '_layout'): output = engine.renderFile(full_names, layout_data) except TemplateNotFoundError as ex: logger.exception(ex) - msg = "Can't find template for page: %s\n" % page.path + msg = "Can't find template for page: %s\n" % page.content_item.spec msg += "Looked for: %s" % ', '.join(full_names) raise Exception(msg) from ex - pass_info = cpi.render_ctx.render_passes[PASS_RENDERING] - res = {'content': output, 'pass_info': _pickle_object(pass_info)} + res = RenderedLayout(output) + + app.env.stats.stepCounter('PageRenderLayout') + return res @@ -354,11 +362,17 @@ format_count = 0 format_name = format_name or app.config.get('site/default_format') + + auto_fmts = app.config.get('site/auto_formats') + redirect = auto_fmts.get(format_name) + if redirect is not None: + format_name = redirect + for fmt in app.plugin_loader.getFormatters(): if not fmt.enabled: continue if fmt.FORMAT_NAMES is None or format_name in fmt.FORMAT_NAMES: - with app.env.timerScope(fmt.__class__.__name__): + with app.env.stats.timerScope(fmt.__class__.__name__): txt = fmt.render(format_name, txt) format_count += 1 if fmt.OUTPUT_FORMAT is not None: diff -r 7a1903ede496 -r 2e5c5d33d62c piecrust/resources/prepare/default.html --- a/piecrust/resources/prepare/default.html Tue Nov 21 11:00:06 2017 -0800 +++ b/piecrust/resources/prepare/default.html Tue Nov 21 22:07:12 2017 -0800 @@ -1,6 +1,7 @@ --- title: %title% time: '%time.now%' +draft: true --- This is a brand new page. diff -r 7a1903ede496 -r 2e5c5d33d62c piecrust/resources/theme/pages/_category.html --- a/piecrust/resources/theme/pages/_category.html Tue Nov 21 11:00:06 2017 -0800 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,16 +0,0 @@ ---- -title: -format: none ---- -

Posts in {{ category }}

- -
- {% for post in pagination.posts %} - {% include 'partial_post.html' %} - {% endfor %} -
-
- {% if pagination.prev_page %}{% endif %} - {% if pagination.next_page %}{% endif %} -
- diff -r 7a1903ede496 -r 2e5c5d33d62c piecrust/resources/theme/pages/_tag.html --- a/piecrust/resources/theme/pages/_tag.html Tue Nov 21 11:00:06 2017 -0800 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,20 +0,0 @@ ---- -title: -format: none ---- -{% set display_tag = tag %} -{% if is_multiple_tag %} - {% set display_tag = tag|join(', ') %} -{% endif %} -

Posts tagged with {{ display_tag }}

- -
- {% for post in pagination.posts %} - {% include 'partial_post.html' %} - {% endfor %} -
-
- {% if pagination.prev_page %}{% endif %} - {% if pagination.next_page %}{% endif %} -
- diff -r 7a1903ede496 -r 2e5c5d33d62c piecrust/resources/theme/templates/_category.html --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/piecrust/resources/theme/templates/_category.html Tue Nov 21 22:07:12 2017 -0800 @@ -0,0 +1,15 @@ +{% extends "default.html" %} + +{% block main %} +

Posts in {{ category }}

+ +
+ {% for post in pagination.posts %} + {% include 'partial_post.html' %} + {% endfor %} +
+
+ {% if pagination.prev_page %}{% endif %} + {% if pagination.next_page %}{% endif %} +
+{% endblock %} diff -r 7a1903ede496 -r 2e5c5d33d62c piecrust/resources/theme/templates/_tag.html --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/piecrust/resources/theme/templates/_tag.html Tue Nov 21 22:07:12 2017 -0800 @@ -0,0 +1,19 @@ +{% extends "default.html" %} + +{% block main %} +{% set display_tag = tag %} +{% if is_multiple_tag %} + {% set display_tag = tag|join(', ') %} +{% endif %} +

Posts tagged with {{ display_tag }}

+ +
+ {% for post in pagination.posts %} + {% include 'partial_post.html' %} + {% endfor %} +
+
+ {% if pagination.prev_page %}{% endif %} + {% if pagination.next_page %}{% endif %} +
+{% endblock %} diff -r 7a1903ede496 -r 2e5c5d33d62c piecrust/resources/theme/templates/_year.html --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/piecrust/resources/theme/templates/_year.html Tue Nov 21 22:07:12 2017 -0800 @@ -0,0 +1,15 @@ +{% extends "default.html" %} + +{% block main %} +

Posts in {{ year }}

+ +
+ {% for post in pagination.posts %} +

{{post.timestamp|date('%d %B')}}{{ post.title }}

+ {% endfor %} +
+
+ {% if pagination.prev_page %}{% endif %} + {% if pagination.next_page %}{% endif %} +
+{% endblock %} diff -r 7a1903ede496 -r 2e5c5d33d62c piecrust/routing.py --- a/piecrust/routing.py Tue Nov 21 11:00:06 2017 -0800 +++ b/piecrust/routing.py Tue Nov 21 22:07:12 2017 -0800 @@ -10,7 +10,8 @@ route_re = re.compile(r'%((?P[\w\d]+):)?(?P\+)?(?P\w+)%') -route_esc_re = re.compile(r'\\%((?P[\w\d]+)\\:)?(?P\\\+)?(?P\w+)\\%') +route_esc_re = re.compile( + r'\\%((?P[\w\d]+)\\:)?(?P\\\+)?(?P\w+)\\%') ugly_url_cleaner = re.compile(r'\.html$') @@ -22,15 +23,6 @@ pass -def create_route_metadata(page): - route_metadata = copy.deepcopy(page.source_metadata) - return route_metadata - - -ROUTE_TYPE_SOURCE = 0 -ROUTE_TYPE_GENERATOR = 1 - - class RouteParameter(object): TYPE_STRING = 0 TYPE_PATH = 1 @@ -46,29 +38,23 @@ class Route(object): """ Information about a route for a PieCrust application. Each route defines the "shape" of an URL and how it maps to - sources and generators. + content sources. """ def __init__(self, app, cfg): self.app = app - - self.source_name = cfg.get('source') - self.generator_name = cfg.get('generator') - if not self.source_name and not self.generator_name: - raise InvalidRouteError( - "Both `source` and `generator` are specified.") + self.config = copy.deepcopy(cfg) + self.source_name = cfg['source'] self.uri_pattern = cfg['url'].lstrip('/') + self.pass_num = cfg.get('pass', 1) - if self.is_source_route: - self.supported_params = self.source.getSupportedRouteParameters() - else: - self.supported_params = self.generator.getSupportedRouteParameters() + self.supported_params = self.source.getSupportedRouteParameters() self.pretty_urls = app.config.get('site/pretty_urls') self.trailing_slash = app.config.get('site/trailing_slash') self.show_debug_info = app.config.get('site/show_debug_info') self.pagination_suffix_format = app.config.get( - '__cache/pagination_suffix_format') + '__cache/pagination_suffix_format') self.uri_root = app.config.get('site/root') self.uri_params = [] @@ -87,9 +73,9 @@ # (maybe there's a better way to do it but I can't think of any # right now) uri_pattern_no_path = ( - route_re.sub(self._uriNoPathRepl, self.uri_pattern) - .replace('//', '/') - .rstrip('/')) + route_re.sub(self._uriNoPathRepl, self.uri_pattern) + .replace('//', '/') + .rstrip('/')) if uri_pattern_no_path != self.uri_pattern: p = route_esc_re.sub(self._uriPatternRepl, re.escape(uri_pattern_no_path)) + '$' @@ -109,43 +95,15 @@ last_param = self.getParameter(self.uri_params[-1]) self.func_has_variadic_parameter = last_param.variadic - @property - def route_type(self): - if self.source_name: - return ROUTE_TYPE_SOURCE - elif self.generator_name: - return ROUTE_TYPE_GENERATOR - else: - raise InvalidRouteError() - - @property - def is_source_route(self): - return self.route_type == ROUTE_TYPE_SOURCE - - @property - def is_generator_route(self): - return self.route_type == ROUTE_TYPE_GENERATOR - @cached_property def source(self): - if not self.is_source_route: - return InvalidRouteError("This is not a source route.") for src in self.app.sources: if src.name == self.source_name: return src - raise Exception("Can't find source '%s' for route '%s'." % ( + raise Exception( + "Can't find source '%s' for route '%s'." % ( self.source_name, self.uri_pattern)) - @cached_property - def generator(self): - if not self.is_generator_route: - return InvalidRouteError("This is not a generator route.") - for gen in self.app.generators: - if gen.name == self.generator_name: - return gen - raise Exception("Can't find generator '%s' for route '%s'." % ( - self.generator_name, self.uri_pattern)) - def hasParameter(self, name): return any(lambda p: p.param_name == name, self.supported_params) @@ -159,8 +117,8 @@ def getParameterType(self, name): return self.getParameter(name).param_type - def matchesMetadata(self, route_metadata): - return set(self.uri_params).issubset(route_metadata.keys()) + def matchesParameters(self, route_params): + return set(self.uri_params).issubset(route_params.keys()) def matchUri(self, uri, strict=False): if not uri.startswith(self.uri_root): @@ -172,42 +130,42 @@ elif self.trailing_slash: uri = uri.rstrip('/') - route_metadata = None + route_params = None m = self.uri_re.match(uri) if m: - route_metadata = m.groupdict() + route_params = m.groupdict() if self.uri_re_no_path: m = self.uri_re_no_path.match(uri) if m: - route_metadata = m.groupdict() - if route_metadata is None: + route_params = m.groupdict() + if route_params is None: return None if not strict: # When matching URIs, if the URI is a match but is missing some - # metadata, fill those up with empty strings. This can happen if, + # parameters, fill those up with empty strings. This can happen if, # say, a route's pattern is `/foo/%slug%`, and we're matching an # URL like `/foo`. - matched_keys = set(route_metadata.keys()) + matched_keys = set(route_params.keys()) missing_keys = set(self.uri_params) - matched_keys for k in missing_keys: if self.getParameterType(k) != RouteParameter.TYPE_PATH: return None - route_metadata[k] = '' + route_params[k] = '' - for k in route_metadata: - route_metadata[k] = self._coerceRouteParameter( - k, route_metadata[k]) + for k in route_params: + route_params[k] = self._coerceRouteParameter( + k, route_params[k]) - return route_metadata + return route_params - def getUri(self, route_metadata, *, sub_num=1): - route_metadata = dict(route_metadata) - for k in route_metadata: - route_metadata[k] = self._coerceRouteParameter( - k, route_metadata[k]) + def getUri(self, route_params, *, sub_num=1): + route_params = dict(route_params) + for k in route_params: + route_params[k] = self._coerceRouteParameter( + k, route_params[k]) - uri = self.uri_format % route_metadata + uri = self.uri_format % route_params suffix = None if sub_num > 1: # Note that we know the pagination suffix starts with a slash. @@ -258,9 +216,9 @@ if len(args) < fixed_param_count: raise Exception( - "Route function '%s' expected %d arguments, " - "got %d: %s" % - (self.func_name, fixed_param_count, len(args), args)) + "Route function '%s' expected %d arguments, " + "got %d: %s" % + (self.func_name, fixed_param_count, len(args), args)) if self.func_has_variadic_parameter: coerced_args = list(args[:fixed_param_count]) @@ -270,15 +228,14 @@ else: coerced_args = args - metadata = {} + route_params = {} for arg_name, arg_val in zip(self.uri_params, coerced_args): - metadata[arg_name] = self._coerceRouteParameter( - arg_name, arg_val) + route_params[arg_name] = self._coerceRouteParameter( + arg_name, arg_val) - if self.is_generator_route: - self.generator.onRouteFunctionUsed(self, metadata) + self.source.onRouteFunctionUsed(route_params) - return self.getUri(metadata) + return self.getUri(route_params) def _uriFormatRepl(self, m): if m.group('qual') or m.group('var'): @@ -350,32 +307,12 @@ return name -class CompositeRouteFunction(object): - def __init__(self): - self._routes = [] - self._arg_names = None - - def addFunc(self, route): - if self._arg_names is None: - self._arg_names = list(route.uri_params) - - if route.uri_params != self._arg_names: - raise Exception("Cannot merge route function with arguments '%s' " - "with route function with arguments '%s'." % - (route.uri_params, self._arg_names)) - self._routes.append(route) +class RouteFunction: + def __init__(self, route): + self._route = route def __call__(self, *args, **kwargs): - if len(self._routes) == 1 or len(args) == len(self._arg_names): - return self._routes[0].execTemplateFunc(*args, **kwargs) + return self._route.execTemplateFunc(*args, **kwargs) - if len(args) == len(self._arg_names) + 1: - f_args = args[:-1] - for r in self._routes: - if r.source_name == args[-1]: - return r.execTemplateFunc(*f_args, **kwargs) - raise Exception("No such source: %s" % args[-1]) - - raise Exception("Incorrect number of arguments for route function. " - "Expected '%s', got '%s'" % (self._arg_names, args)) - + def _isCompatibleRoute(self, route): + return self._route.uri_pattern == route.uri_pattern diff -r 7a1903ede496 -r 2e5c5d33d62c piecrust/serving/middlewares.py --- a/piecrust/serving/middlewares.py Tue Nov 21 11:00:06 2017 -0800 +++ b/piecrust/serving/middlewares.py Tue Nov 21 22:07:12 2017 -0800 @@ -4,15 +4,15 @@ from werkzeug.wsgi import ClosingIterator from piecrust import RESOURCES_DIR, CACHE_DIR from piecrust.data.builder import ( - DataBuildingContext, build_page_data) + DataBuildingContext, build_page_data) from piecrust.data.debug import build_var_debug_info +from piecrust.page import PageNotFoundError from piecrust.routing import RouteNotFoundError from piecrust.serving.util import ( - make_wrapped_file_response, get_requested_page, get_app_for_server) -from piecrust.sources.pageref import PageNotFoundError + make_wrapped_file_response, get_requested_page, get_app_for_server) -class StaticResourcesMiddleware(object): +class PieCrustStaticResourcesMiddleware(object): """ WSGI middleware that serves static files from the `resources/server` directory in the PieCrust package. """ @@ -29,7 +29,7 @@ full_path = os.path.join(mount, rel_req_path) try: response = make_wrapped_file_response( - environ, request, full_path) + environ, request, full_path) return response(environ, start_response) except OSError: pass @@ -38,7 +38,8 @@ class PieCrustDebugMiddleware(object): - """ WSGI middleware that handles debugging of PieCrust stuff. + """ WSGI middleware that handles debugging of PieCrust stuff, and runs + the asset pipeline in an SSE thread. """ def __init__(self, app, appfactory, run_sse_check=None): @@ -47,11 +48,11 @@ self.run_sse_check = run_sse_check self._proc_loop = None self._out_dir = os.path.join( - appfactory.root_dir, CACHE_DIR, appfactory.cache_key, 'server') + appfactory.root_dir, CACHE_DIR, appfactory.cache_key, 'server') self._handlers = { - 'debug_info': self._getDebugInfo, - 'werkzeug_shutdown': self._shutdownWerkzeug, - 'pipeline_status': self._startSSEProvider} + 'debug_info': self._getDebugInfo, + 'werkzeug_shutdown': self._shutdownWerkzeug, + 'pipeline_status': self._startSSEProvider} if not self.run_sse_check or self.run_sse_check(): # When using a server with code reloading, some implementations @@ -89,8 +90,8 @@ if not found: return NotFound("No such page: %s" % page_path) - ctx = DataBuildingContext(req_page.qualified_page, - page_num=req_page.page_num) + ctx = DataBuildingContext(req_page.page, + sub_num=req_page.sub_num) data = build_page_data(ctx) var_path = request.args.getlist('var') @@ -111,15 +112,15 @@ def _startSSEProvider(self, request, start_response): from piecrust.serving.procloop import ( - PipelineStatusServerSentEventProducer) + PipelineStatusServerSentEventProducer) provider = PipelineStatusServerSentEventProducer( - self._proc_loop) + self._proc_loop) it = provider.run() response = Response(it, mimetype='text/event-stream') response.headers['Cache-Control'] = 'no-cache' response.headers['Last-Event-ID'] = \ self._proc_loop.last_status_id return ClosingIterator( - response(request.environ, start_response), - [provider.close]) + response(request.environ, start_response), + [provider.close]) diff -r 7a1903ede496 -r 2e5c5d33d62c piecrust/serving/mime.types --- a/piecrust/serving/mime.types Tue Nov 21 11:00:06 2017 -0800 +++ b/piecrust/serving/mime.types Tue Nov 21 22:07:12 2017 -0800 @@ -590,6 +590,7 @@ video/gl gl video/mpeg mpeg mpg mpe video/quicktime qt mov +video/mp4 mp4 mkv video/mp4v-es video/parityfec video/pointer diff -r 7a1903ede496 -r 2e5c5d33d62c piecrust/serving/procloop.py --- a/piecrust/serving/procloop.py Tue Nov 21 11:00:06 2017 -0800 +++ b/piecrust/serving/procloop.py Tue Nov 21 22:07:12 2017 -0800 @@ -7,8 +7,12 @@ import itertools import threading from piecrust import CONFIG_PATH, THEME_CONFIG_PATH -from piecrust.app import PieCrust -from piecrust.processing.pipeline import ProcessorPipeline +from piecrust.chefutil import format_timed_scope +from piecrust.pipelines.base import ( + PipelineJobCreateContext, PipelineJobRunContext, PipelineJobResult, + PipelineManager) +from piecrust.pipelines.records import ( + MultiRecord, MultiRecordHistory) logger = logging.getLogger(__name__) @@ -74,25 +78,28 @@ self._running = 2 +class _AssetProcessingInfo: + def __init__(self, source): + self.source = source + self.paths = set() + self.last_bake_time = time.time() + + class ProcessingLoop(threading.Thread): def __init__(self, appfactory, out_dir): - super(ProcessingLoop, self).__init__( - name='pipeline-reloader', daemon=True) + super().__init__(name='pipeline-reloader', daemon=True) self.appfactory = appfactory self.out_dir = out_dir self.last_status_id = 0 self.interval = 1 - self.app = None - self._roots = [] - self._monitor_assets_root = False - self._paths = set() - self._record = None - self._last_bake = 0 + self._app = None + self._proc_infos = None + self._last_records = None self._last_config_mtime = 0 self._obs = [] self._obs_lock = threading.Lock() config_name = ( - THEME_CONFIG_PATH if appfactory.theme_site else CONFIG_PATH) + THEME_CONFIG_PATH if appfactory.theme_site else CONFIG_PATH) self._config_path = os.path.join(appfactory.root_dir, config_name) def addObserver(self, obs): @@ -104,116 +111,124 @@ self._obs.remove(obs) def run(self): - self._initPipeline() + logger.debug("Initializing processing loop with output: %s" % + self.out_dir) + try: + self._init() + except Exception as ex: + logger.error("Error initializing processing loop:") + logger.exception(ex) + return - self._last_bake = time.time() + logger.debug("Doing initial processing loop bake...") + self._runPipelinesSafe() + + logger.debug("Running processing loop...") self._last_config_mtime = os.path.getmtime(self._config_path) - self._record = self.pipeline.run() while True: cur_config_time = os.path.getmtime(self._config_path) if self._last_config_mtime < cur_config_time: logger.info("Site configuration changed, reloading pipeline.") self._last_config_mtime = cur_config_time - self._initPipeline() - for root in self._roots: - self._runPipeline(root) + self._init() + self._runPipelines() continue - if self._monitor_assets_root: - assets_dir = os.path.join(self.app.root_dir, 'assets') - if os.path.isdir(assets_dir): - logger.info("Assets directory was created, reloading " - "pipeline.") - self._initPipeline() - self._runPipeline(assets_dir) - continue - - for root in self._roots: - # For each mount root we try to find the first new or + for procinfo in self._proc_infos.values(): + # For each assets folder we try to find the first new or # modified file. If any, we just run the pipeline on - # that mount. + # that source. found_new_or_modified = False - for dirpath, dirnames, filenames in os.walk(root): - for filename in filenames: - path = os.path.join(dirpath, filename) - if path not in self._paths: - logger.debug("Found new asset: %s" % path) - self._paths.add(path) - found_new_or_modified = True - break - if os.path.getmtime(path) > self._last_bake: - logger.debug("Found modified asset: %s" % path) - found_new_or_modified = True - break - - if found_new_or_modified: + for item in procinfo.source.getAllContents(): + path = item.spec + if path not in procinfo.paths: + logger.debug("Found new asset: %s" % path) + procinfo.paths.add(path) + found_new_or_modified = True break - + if os.path.getmtime(path) > procinfo.last_bake_time: + logger.debug("Found modified asset: %s" % path) + found_new_or_modified = True + break if found_new_or_modified: - self._runPipeline(root) + with format_timed_scope( + logger, + "change detected, reprocessed '%s'." % + procinfo.source.name): + self._runPipelinesSafe(procinfo.source) time.sleep(self.interval) - def _initPipeline(self): - # Create the app and pipeline. - self.app = self.appfactory.create() - self.pipeline = ProcessorPipeline(self.app, self.out_dir) + def _init(self): + self._app = self.appfactory.create() + self._last_records = MultiRecord() + + self._proc_infos = {} + for src in self._app.sources: + if src.config['pipeline'] != 'asset': + continue - # Get the list of assets directories. - self._roots = list(self.pipeline.mounts.keys()) + procinfo = _AssetProcessingInfo(src) + self._proc_infos[src.name] = procinfo + + # Build the list of initial asset files. + for item in src.getAllContents(): + procinfo.paths.add(item.spec) - # The 'assets' folder may not be in the mounts list if it doesn't - # exist yet, but we want to monitor for when the user creates it. - default_root = os.path.join(self.app.root_dir, 'assets') - self._monitor_assets_root = (default_root not in self._roots) + def _runPipelinesSafe(self, only_for_source=None): + try: + self._runPipelines(only_for_source) + except Exception as ex: + logger.error("Error while running asset pipeline:") + logger.exception(ex) - # Build the list of initial asset files. - self._paths = set() - for root in self._roots: - for dirpath, dirnames, filenames in os.walk(root): - self._paths |= set([os.path.join(dirpath, f) - for f in filenames]) + def _runPipelines(self, only_for_source): + from piecrust.baking.baker import Baker + + allowed_sources = None + if only_for_source: + allowed_sources = [only_for_source.name] + baker = Baker( + self.appfactory, self._app, self.out_dir, + allowed_pipelines=['asset'], + allowed_sources=allowed_sources, + rotate_bake_records=False) + records = baker.bake() - def _runPipeline(self, root): - self._last_bake = time.time() - try: - self._record = self.pipeline.run( - root, - previous_record=self._record, - save_record=False) + self._onPipelinesRun(records) + + def _onPipelinesRun(self, records): + self.last_status_id += 1 - status_id = self.last_status_id + 1 - self.last_status_id += 1 - - if self._record.success: + if records.success: + for rec in records.records: changed = filter( - lambda i: not i.was_collapsed_from_last_run, - self._record.entries) + lambda i: not i.was_collapsed_from_last_run, + rec.getEntries()) changed = itertools.chain.from_iterable( - map(lambda i: i.rel_outputs, changed)) + map(lambda i: i.out_paths, changed)) changed = list(changed) item = { - 'id': status_id, - 'type': 'pipeline_success', - 'assets': changed} + 'id': self.last_status_id, + 'type': 'pipeline_success', + 'assets': changed} self._notifyObservers(item) - else: - item = { - 'id': status_id, - 'type': 'pipeline_error', - 'assets': []} - for entry in self._record.entries: + else: + item = { + 'id': self.last_status_id, + 'type': 'pipeline_error', + 'assets': []} + for rec in records.records: + for entry in rec.getEntries(): if entry.errors: asset_item = { - 'path': entry.path, - 'errors': list(entry.errors)} + 'path': entry.item_spec, + 'errors': list(entry.errors)} item['assets'].append(asset_item) self._notifyObservers(item) - except Exception as ex: - logger.exception(ex) def _notifyObservers(self, item): with self._obs_lock: diff -r 7a1903ede496 -r 2e5c5d33d62c piecrust/serving/server.py --- a/piecrust/serving/server.py Tue Nov 21 11:00:06 2017 -0800 +++ b/piecrust/serving/server.py Tue Nov 21 22:07:12 2017 -0800 @@ -6,53 +6,36 @@ import hashlib import logging from werkzeug.exceptions import ( - NotFound, MethodNotAllowed, InternalServerError, HTTPException) + NotFound, MethodNotAllowed, InternalServerError, HTTPException) from werkzeug.wrappers import Request, Response from jinja2 import FileSystemLoader, Environment from piecrust import CACHE_DIR, RESOURCES_DIR -from piecrust.rendering import PageRenderingContext, render_page +from piecrust.rendering import RenderingContext, render_page from piecrust.routing import RouteNotFoundError from piecrust.serving.util import ( - content_type_map, make_wrapped_file_response, get_requested_page, - get_app_for_server) + content_type_map, make_wrapped_file_response, get_requested_page, + get_app_for_server) from piecrust.sources.base import SourceNotFoundError logger = logging.getLogger(__name__) -class WsgiServer(object): +class PieCrustServer(object): + """ A WSGI application that serves a PieCrust website. + """ def __init__(self, appfactory, **kwargs): - self.server = Server(appfactory, **kwargs) + self.server = _ServerImpl(appfactory, **kwargs) def __call__(self, environ, start_response): return self.server._run_request(environ, start_response) -class ServeRecord(object): - def __init__(self): - self.entries = {} - - def addEntry(self, entry): - key = self._makeKey(entry.uri, entry.sub_num) - self.entries[key] = entry - - def getEntry(self, uri, sub_num): - key = self._makeKey(uri, sub_num) - return self.entries.get(key) - - def _makeKey(self, uri, sub_num): - return "%s:%s" % (uri, sub_num) - - -class ServeRecordPageEntry(object): - def __init__(self, uri, sub_num): - self.uri = uri - self.sub_num = sub_num - self.used_source_names = set() - - class MultipleNotFound(HTTPException): + """ Represents a 404 (not found) error that tried to serve one or + more pages. It will report which pages it tried to serve + before failing. + """ code = 404 def __init__(self, description, nfes): @@ -69,7 +52,9 @@ return desc -class Server(object): +class _ServerImpl(object): + """ The PieCrust server. + """ def __init__(self, appfactory, enable_debug_info=True, root_url='/', @@ -78,12 +63,11 @@ self.enable_debug_info = enable_debug_info self.root_url = root_url self.static_preview = static_preview - self._page_record = ServeRecord() self._out_dir = os.path.join( - appfactory.root_dir, - CACHE_DIR, - (appfactory.cache_key or 'default'), - 'server') + appfactory.root_dir, + CACHE_DIR, + (appfactory.cache_key or 'default'), + 'server') def _run_request(self, environ, start_response): try: @@ -104,28 +88,26 @@ request.method) raise MethodNotAllowed() - # Also handle requests to a pipeline-built asset right away. + # Handle requests to a pipeline-built asset right away. response = self._try_serve_asset(environ, request) if response is not None: return response + # Same for page assets. + response = self._try_serve_page_asset( + self.appfactory.root_dir, environ, request) + if response is not None: + return response + # Create the app for this request. app = get_app_for_server(self.appfactory, root_url=self.root_url) - if (app.config.get('site/enable_debug_info') and + if (app.config.get('server/enable_debug_info') and self.enable_debug_info and '!debug' in request.args): app.config.set('site/show_debug_info', True) - # We'll serve page assets directly from where they are. - app.env.base_asset_url_format = self.root_url + '_asset/%path%' - - # Let's see if it can be a page asset. - response = self._try_serve_page_asset(app, environ, request) - if response is not None: - return response - - # Nope. Let's see if it's an actual page. + # Let's try to serve a page. try: response = self._try_serve_page(app, environ, request) return response @@ -152,62 +134,41 @@ full_path = os.path.join(self._out_dir, rel_req_path) try: - response = make_wrapped_file_response(environ, request, full_path) - return response + return make_wrapped_file_response(environ, request, full_path) except OSError: - pass - return None + return None - def _try_serve_page_asset(self, app, environ, request): + def _try_serve_page_asset(self, app_root_dir, environ, request): if not request.path.startswith(self.root_url + '_asset/'): return None offset = len(self.root_url + '_asset/') - full_path = os.path.join(app.root_dir, request.path[offset:]) - if not os.path.isfile(full_path): + full_path = os.path.join(app_root_dir, request.path[offset:]) + + try: + return make_wrapped_file_response(environ, request, full_path) + except OSError: return None - return make_wrapped_file_response(environ, request, full_path) - def _try_serve_page(self, app, environ, request): # Find a matching page. req_page = get_requested_page(app, request.path) # If we haven't found any good match, report all the places we didn't # find it at. - qp = req_page.qualified_page - if qp is None: + if req_page.page is None: msg = "Can't find path for '%s':" % request.path raise MultipleNotFound(msg, req_page.not_found_errors) # We have a page, let's try to render it. - render_ctx = PageRenderingContext(qp, - page_num=req_page.page_num, - force_render=True, - is_from_request=True) - if qp.route.is_generator_route: - qp.route.generator.prepareRenderContext(render_ctx) - - # See if this page is known to use sources. If that's the case, - # just don't use cached rendered segments for that page (but still - # use them for pages that are included in it). - uri = qp.getUri() - entry = self._page_record.getEntry(uri, req_page.page_num) - if (qp.route.is_generator_route or entry is None or - entry.used_source_names): - cache_key = '%s:%s' % (uri, req_page.page_num) - app.env.rendered_segments_repository.invalidate(cache_key) + render_ctx = RenderingContext(req_page.page, + sub_num=req_page.sub_num, + force_render=True) + req_page.page.source.prepareRenderContext(render_ctx) # Render the page. rendered_page = render_page(render_ctx) - # Remember stuff for next time. - if entry is None: - entry = ServeRecordPageEntry(req_page.req_path, req_page.page_num) - self._page_record.addEntry(entry) - for pinfo in render_ctx.render_passes: - entry.used_source_names |= pinfo.used_source_names - # Start doing stuff. page = rendered_page.page rp_content = rendered_page.content @@ -216,10 +177,10 @@ if app.config.get('site/show_debug_info'): now_time = time.perf_counter() timing_info = ( - '%8.1f ms' % - ((now_time - app.env.start_time) * 1000.0)) + '%8.1f ms' % + ((now_time - app.env.start_time) * 1000.0)) rp_content = rp_content.replace( - '__PIECRUST_TIMING_INFORMATION__', timing_info) + '__PIECRUST_TIMING_INFORMATION__', timing_info) # Build the response. response = Response() @@ -311,4 +272,3 @@ template += '.html' return super(ErrorMessageLoader, self).get_source(env, template) - diff -r 7a1903ede496 -r 2e5c5d33d62c piecrust/serving/util.py --- a/piecrust/serving/util.py Tue Nov 21 11:00:06 2017 -0800 +++ b/piecrust/serving/util.py Tue Nov 21 22:07:12 2017 -0800 @@ -5,11 +5,8 @@ import datetime from werkzeug.wrappers import Response from werkzeug.wsgi import wrap_file -from piecrust.app import PieCrust, apply_variant_and_values -from piecrust.rendering import QualifiedPage +from piecrust.page import PageNotFoundError from piecrust.routing import RouteNotFoundError -from piecrust.sources.base import MODE_PARSING -from piecrust.sources.pageref import PageNotFoundError from piecrust.uriutil import split_sub_uri @@ -20,31 +17,38 @@ app = appfactory.create() app.config.set('site/root', root_url) app.config.set('server/is_serving', True) + # We'll serve page assets directly from where they are. + app.config.set('site/asset_url_format', root_url + '_asset/%path%') return app class RequestedPage(object): def __init__(self): - self.qualified_page = None + self.page = None + self.sub_num = 1 self.req_path = None - self.page_num = 1 self.not_found_errors = [] -def find_routes(routes, uri, is_sub_page=False): - """ Returns routes matching the given URL, but puts generator routes - at the end. +def find_routes(routes, uri, decomposed_uri=None): + """ Returns routes matching the given URL. """ + sub_num = 0 + uri_no_sub = None + if decomposed_uri is not None: + uri_no_sub, sub_num = decomposed_uri + res = [] - gen_res = [] for route in routes: - metadata = route.matchUri(uri) - if metadata is not None: - if route.is_source_route: - res.append((route, metadata, is_sub_page)) - else: - gen_res.append((route, metadata, is_sub_page)) - return res + gen_res + route_params = route.matchUri(uri) + if route_params is not None: + res.append((route, route_params, 1)) + + if sub_num > 1: + route_params = route.matchUri(uri_no_sub) + if route_params is not None: + res.append((route, route_params, sub_num)) + return res def get_requested_page(app, req_path): @@ -54,56 +58,39 @@ req_path = req_path.rstrip('/') # Try to find what matches the requested URL. - routes = find_routes(app.routes, req_path) - # It could also be a sub-page (i.e. the URL ends with a page number), so # we try to also match the base URL (without the number). - req_path_no_num, page_num = split_sub_uri(app, req_path) - if page_num > 1: - routes += find_routes(app.routes, req_path_no_num, True) - + req_path_no_sub, sub_num = split_sub_uri(app, req_path) + routes = find_routes(app.routes, req_path, (req_path_no_sub, sub_num)) if len(routes) == 0: raise RouteNotFoundError("Can't find route for: %s" % req_path) req_page = RequestedPage() - for route, route_metadata, is_sub_page in routes: - try: - cur_req_path = req_path - if is_sub_page: - cur_req_path = req_path_no_num + for route, route_params, route_sub_num in routes: + cur_req_path = req_path + if route_sub_num > 1: + cur_req_path = req_path_no_sub - qp = _get_requested_page_for_route( - app, route, route_metadata, cur_req_path) - if qp is not None: - req_page.qualified_page = qp - req_page.req_path = cur_req_path - if is_sub_page: - req_page.page_num = page_num - break - except PageNotFoundError as nfe: - req_page.not_found_errors.append(nfe) + page = _get_requested_page_for_route(app, route, route_params) + if page is not None: + req_page.page = page + req_page.sub_num = route_sub_num + req_page.req_path = cur_req_path + break + + req_page.not_found_errors.append(PageNotFoundError( + "No path found for '%s' in source '%s'." % + (cur_req_path, route.source_name))) + return req_page -def _get_requested_page_for_route(app, route, route_metadata, req_path): - if not route.is_generator_route: - source = app.getSource(route.source_name) - factory = source.findPageFactory(route_metadata, MODE_PARSING) - if factory is None: - raise PageNotFoundError( - "No path found for '%s' in source '%s'." % - (req_path, source.name)) - else: - factory = route.generator.getPageFactory(route_metadata) - if factory is None: - raise PageNotFoundError( - "No path found for '%s' in generator '%s'." % - (req_path, route.generator.name)) - - # Build the page. - page = factory.buildPage() - qp = QualifiedPage(page, route, route_metadata) - return qp +def _get_requested_page_for_route(app, route, route_params): + source = app.getSource(route.source_name) + item = source.findContentFromRoute(route_params) + if item is not None: + return app.getPage(source, item) + return None def load_mimetype_map(): @@ -137,20 +124,20 @@ response.set_etag(etag) response.last_modified = datetime.datetime.fromtimestamp(mtime) response.mimetype = mimetype_map.get( - ext.lstrip('.'), 'text/plain') + ext.lstrip('.'), 'text/plain') response.direct_passthrough = True return response mimetype_map = load_mimetype_map() content_type_map = { - 'html': 'text/html', - 'xml': 'text/xml', - 'txt': 'text/plain', - 'text': 'text/plain', - 'css': 'text/css', - 'xhtml': 'application/xhtml+xml', - 'atom': 'application/atom+xml', # or 'text/xml'? - 'rss': 'application/rss+xml', # or 'text/xml'? - 'json': 'application/json'} + 'html': 'text/html', + 'xml': 'text/xml', + 'txt': 'text/plain', + 'text': 'text/plain', + 'css': 'text/css', + 'xhtml': 'application/xhtml+xml', + 'atom': 'application/atom+xml', # or 'text/xml'? + 'rss': 'application/rss+xml', # or 'text/xml'? + 'json': 'application/json'} diff -r 7a1903ede496 -r 2e5c5d33d62c piecrust/serving/wrappers.py --- a/piecrust/serving/wrappers.py Tue Nov 21 11:00:06 2017 -0800 +++ b/piecrust/serving/wrappers.py Tue Nov 21 22:07:12 2017 -0800 @@ -1,14 +1,46 @@ import os import signal import logging -import urllib.request logger = logging.getLogger(__name__) -def run_werkzeug_server(appfactory, host, port, - use_debugger=False, use_reloader=False): +def run_piecrust_server(wsgi, appfactory, host, port, + is_cmdline_mode=False, + serve_admin=False, + use_debugger=False, + use_reloader=False): + + if wsgi == 'werkzeug': + _run_werkzeug_server(appfactory, host, port, + is_cmdline_mode=is_cmdline_mode, + serve_admin=serve_admin, + use_debugger=use_debugger, + use_reloader=use_reloader) + + elif wsgi == 'gunicorn': + options = { + 'bind': '%s:%s' % (host, port), + 'accesslog': '-', # print access log to stderr + } + if use_debugger: + options['loglevel'] = 'debug' + if use_reloader: + options['reload'] = True + _run_gunicorn_server(appfactory, + is_cmdline_mode=is_cmdline_mode, + gunicorn_options=options) + + else: + raise Exception("Unknown WSGI server: %s" % wsgi) + + +def _run_werkzeug_server(appfactory, host, port, *, + is_cmdline_mode=False, + serve_admin=False, + use_debugger=False, + use_reloader=False): from werkzeug.serving import run_simple def _run_sse_check(): @@ -22,6 +54,9 @@ os.environ.get('WERKZEUG_RUN_MAIN') == 'true') app = _get_piecrust_server(appfactory, + is_cmdline_mode=is_cmdline_mode, + serve_site=True, + serve_admin=serve_admin, run_sse_check=_run_sse_check) # We need to do a few things to get Werkzeug to properly shutdown or @@ -46,6 +81,10 @@ from piecrust.serving import procloop procloop.server_shutdown = True + if serve_admin: + from piecrust.admin import pubutil + pubutil.server_shutdown = True + def _shutdown_server_and_raise_sigint(): if not use_reloader or os.environ.get('WERKZEUG_RUN_MAIN') == 'true': # We only need to shutdown the SSE requests for the process @@ -58,6 +97,9 @@ signal.signal(signal.SIGINT, lambda *args: _shutdown_server_and_raise_sigint()) + # Disable debugger PIN protection. + os.environ['WERKZEUG_DEBUG_PIN'] = 'off' + try: run_simple(host, port, app, threaded=True, @@ -73,7 +115,9 @@ raise -def run_gunicorn_server(appfactory, gunicorn_options=None): +def _run_gunicorn_server(appfactory, + is_cmdline_mode=False, + gunicorn_options=None): from gunicorn.app.base import BaseApplication class PieCrustGunicornApplication(BaseApplication): @@ -90,20 +134,100 @@ def load(self): return self.app - app = _get_piecrust_server(appfactory) + app = _get_piecrust_server(appfactory, + is_cmdline_mode=is_cmdline_mode) gunicorn_options = gunicorn_options or {} app_wrapper = PieCrustGunicornApplication(app, gunicorn_options) app_wrapper.run() -def _get_piecrust_server(appfactory, run_sse_check=None): - from piecrust.serving.middlewares import ( - StaticResourcesMiddleware, PieCrustDebugMiddleware) - from piecrust.serving.server import WsgiServer - app = WsgiServer(appfactory) - app = StaticResourcesMiddleware(app) - app = PieCrustDebugMiddleware( - app, appfactory, run_sse_check=run_sse_check) +def get_piecrust_server(root_dir, *, + debug=False, + cache_key=None, + serve_site=True, + serve_admin=False, + is_cmdline_mode=False): + from piecrust.app import PieCrustFactory + appfactory = PieCrustFactory(root_dir, + debug=debug, + cache_key=cache_key) + return _get_piecrust_server(appfactory, + serve_site=serve_site, + serve_admin=serve_admin, + is_cmdline_mode=is_cmdline_mode) + + +def _get_piecrust_server(appfactory, *, + serve_site=True, + serve_admin=False, + is_cmdline_mode=False, + admin_root_url=None, + run_sse_check=None): + app = None + + if serve_site: + from piecrust.serving.middlewares import ( + PieCrustStaticResourcesMiddleware, PieCrustDebugMiddleware) + from piecrust.serving.server import PieCrustServer + + app = PieCrustServer(appfactory) + app = PieCrustStaticResourcesMiddleware(app) + + if is_cmdline_mode: + app = PieCrustDebugMiddleware( + app, appfactory, run_sse_check=run_sse_check) + + if serve_admin: + from piecrust.admin.web import create_foodtruck_app + + es = { + 'FOODTRUCK_CMDLINE_MODE': is_cmdline_mode, + 'FOODTRUCK_ROOT_DIR': appfactory.root_dir, + 'FOODTRUCK_ROOT_URL': admin_root_url, + 'DEBUG': appfactory.debug} + if is_cmdline_mode: + es.update({ + 'SECRET_KEY': os.urandom(22), + 'LOGIN_DISABLED': True}) + + if appfactory.debug and is_cmdline_mode: + # Disable PIN protection with Werkzeug's debugger. + os.environ['WERKZEUG_DEBUG_PIN'] = 'off' + + admin_app = create_foodtruck_app(es, url_prefix=admin_root_url) + if app is not None: + admin_app.wsgi_app = _PieCrustSiteOrAdminMiddleware( + app, admin_app.wsgi_app, admin_root_url) + + app = admin_app + return app + +class _PieCrustSiteOrAdminMiddleware: + def __init__(self, main_app, admin_app, admin_root_url): + from werkzeug.exceptions import abort + + def _err_resp(e, sr): + abort(404) + + self.main_app = main_app + self.admin_app = admin_app or _err_resp + self.admin_root_url = admin_root_url + + def __call__(self, environ, start_response): + path_info = environ.get('PATH_INFO', '') + if path_info.startswith(self.admin_root_url): + return self.admin_app(environ, start_response) + return self.main_app(environ, start_response) + + +class _PieCrustAdminScriptNamePatcherMiddleware: + def __init__(self, admin_app, admin_root_url): + self.admin_app = admin_app + self.admin_root_url = '/%s' % admin_root_url.strip('/') + + def __call__(self, environ, start_response): + environ['SCRIPT_NAME'] = self.admin_root_url + return self.admin_app(environ, start_response) diff -r 7a1903ede496 -r 2e5c5d33d62c piecrust/sources/array.py --- a/piecrust/sources/array.py Tue Nov 21 11:00:06 2017 -0800 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,44 +0,0 @@ -from piecrust.sources.base import PageSource -from piecrust.sources.mixins import SimplePaginationSourceMixin -from piecrust.sources.pageref import PageRef - - -class CachedPageFactory(object): - """ A `PageFactory` (in appearance) that already has a page built. - """ - def __init__(self, page): - self._page = page - - @property - def rel_path(self): - return self._page.rel_path - - @property - def metadata(self): - return self._page.source_metadata - - @property - def ref_spec(self): - return self._page.ref_spec - - @property - def path(self): - return self._page.path - - def buildPage(self): - return self._page - - -class ArraySource(PageSource, SimplePaginationSourceMixin): - def __init__(self, app, inner_source, name='array', config=None): - super(ArraySource, self).__init__(app, name, config) - self.inner_source = inner_source - - @property - def page_count(self): - return len(self.inner_source) - - def getPageFactories(self): - for p in self.inner_source: - yield CachedPageFactory(p) - diff -r 7a1903ede496 -r 2e5c5d33d62c piecrust/sources/autoconfig.py --- a/piecrust/sources/autoconfig.py Tue Nov 21 11:00:06 2017 -0800 +++ b/piecrust/sources/autoconfig.py Tue Nov 21 22:07:12 2017 -0800 @@ -3,30 +3,21 @@ import os.path import logging from piecrust.configuration import ConfigurationError -from piecrust.routing import RouteParameter -from piecrust.sources.base import ( - PageSource, PageFactory, InvalidFileSystemEndpointError) -from piecrust.sources.default import ( - filter_page_dirname, filter_page_filename) -from piecrust.sources.interfaces import IListableSource -from piecrust.sources.mixins import SimplePaginationSourceMixin +from piecrust.sources.base import ContentItem +from piecrust.sources.default import DefaultContentSource logger = logging.getLogger(__name__) -class AutoConfigSourceBase(PageSource, SimplePaginationSourceMixin, - IListableSource): - """ Base class for page sources that automatically apply configuration +class AutoConfigContentSourceBase(DefaultContentSource): + """ Base class for content sources that automatically apply configuration settings to their generated pages based on those pages' paths. """ def __init__(self, app, name, config): - super(AutoConfigSourceBase, self).__init__(app, name, config) - self.fs_endpoint = config.get('fs_endpoint', name) - self.fs_endpoint_path = os.path.join(self.root_dir, self.fs_endpoint) - self.supported_extensions = list( - app.config.get('site/auto_formats').keys()) - self.default_auto_format = app.config.get('site/default_auto_format') + super().__init__(app, name, config) + + config.setdefault('data_type', 'page_iterator') self.capture_mode = config.get('capture_mode', 'path') if self.capture_mode not in ['path', 'dirname', 'filename']: @@ -34,91 +25,36 @@ "one of: path, dirname, filename" % name) - def getSupportedRouteParameters(self): - return [ - RouteParameter('slug', RouteParameter.TYPE_PATH)] - - def buildPageFactories(self): - logger.debug("Scanning for pages in: %s" % self.fs_endpoint_path) - if not os.path.isdir(self.fs_endpoint_path): - raise InvalidFileSystemEndpointError(self.name, - self.fs_endpoint_path) - - for dirpath, dirnames, filenames in os.walk(self.fs_endpoint_path): - rel_dirpath = os.path.relpath(dirpath, self.fs_endpoint_path) - dirnames[:] = list(filter(filter_page_dirname, dirnames)) + def _finalizeContent(self, parent_group, items, groups): + super()._finalizeContent(parent_group, items, groups) - # If `capture_mode` is `dirname`, we don't need to recompute it - # for each filename, so we do it here. - if self.capture_mode == 'dirname': - config = self._extractConfigFragment(rel_dirpath) - - for f in filter(filter_page_filename, filenames): - if self.capture_mode == 'path': - path = os.path.join(rel_dirpath, f) - config = self._extractConfigFragment(path) - elif self.capture_mode == 'filename': - config = self._extractConfigFragment(f) - - fac_path = f - if rel_dirpath != '.': - fac_path = os.path.join(rel_dirpath, f) - - slug = self._makeSlug(fac_path) - - metadata = { - 'slug': slug, - 'config': config} - yield PageFactory(self, fac_path, metadata) + # If `capture_mode` is `dirname`, we don't need to recompute it + # for each filename, so we do it here. + if self.capture_mode == 'dirname': + rel_dirpath = '.' + if parent_group is not None: + rel_dirpath = os.path.relpath(parent_group.spec, + self.fs_endpoint_path) + config = self._extractConfigFragment(rel_dirpath) - def resolveRef(self, ref_path): - path = os.path.normpath( - os.path.join(self.fs_endpoint_path, ref_path.lstrip("\\/"))) - - config = None - if self.capture_mode == 'dirname': - config = self._extractConfigFragment(os.path.dirname(ref_path)) - elif self.capture_mode == 'path': - config = self._extractConfigFragment(ref_path) - elif self.capture_mode == 'filename': - config = self._extractConfigFragment(os.path.basename(ref_path)) - - slug = self._makeSlug(ref_path) - metadata = {'slug': slug, 'config': config} - return path, metadata - - def listPath(self, rel_path): - raise NotImplementedError() + for i in items: + # Compute the config for the other capture modes. + if self.capture_mode == 'path': + rel_path = os.path.relpath(i.spec, self.fs_endpoint_path) + config = self._extractConfigFragment(rel_path) + elif self.capture_mode == 'filename': + fname = os.path.basename(i.spec) + config = self._extractConfigFragment(fname) - def getDirpath(self, rel_path): - return os.path.dirname(rel_path) - - def getBasename(self, rel_path): - filename = os.path.basename(rel_path) - name, _ = os.path.splitext(filename) - return name - - def _makeSlug(self, rel_path): - slug = rel_path.replace('\\', '/') - slug = self._cleanSlug(slug) - slug, ext = os.path.splitext(slug) - if ext.lstrip('.') not in self.supported_extensions: - slug += ext - if slug.startswith('./'): - slug = slug[2:] - if slug == '_index': - slug = '' - return slug - - def _cleanSlug(self, slug): - return slug + # Set the config on the content item's metadata. + i.metadata.setdefault('config', {}).update(config) def _extractConfigFragment(self, rel_path): raise NotImplementedError() -class AutoConfigSource(AutoConfigSourceBase): - """ Page source that extracts configuration settings from the sub-folders +class AutoConfigContentSource(AutoConfigContentSourceBase): + """ Content source that extracts configuration settings from the sub-folders each page resides in. This is ideal for setting tags or categories on pages based on the folders they're in. """ @@ -126,13 +62,12 @@ def __init__(self, app, name, config): config['capture_mode'] = 'dirname' - super(AutoConfigSource, self).__init__(app, name, config) + super().__init__(app, name, config) + self.setting_name = config.get('setting_name', name) self.only_single_values = config.get('only_single_values', False) self.collapse_single_values = config.get('collapse_single_values', False) - self.supported_extensions = list( - app.config.get('site/auto_formats').keys()) def _extractConfigFragment(self, rel_path): if rel_path == '.': @@ -157,48 +92,31 @@ return {self.setting_name: values} - def findPageFactory(self, metadata, mode): + def findContentFromRoute(self, route_params): # Pages from this source are effectively flattened, so we need to # find pages using a brute-force kinda way. + route_slug = route_params.get('slug', '') + if not route_slug: + route_slug = '_index' + for dirpath, dirnames, filenames in os.walk(self.fs_endpoint_path): for f in filenames: slug, _ = os.path.splitext(f) - if slug == metadata['slug']: + if slug == route_slug: path = os.path.join(dirpath, f) rel_path = os.path.relpath(path, self.fs_endpoint_path) config = self._extractConfigFragment(rel_path) metadata = {'slug': slug, 'config': config} - return PageFactory(self, rel_path, metadata) + return ContentItem(path, metadata) return None - def listPath(self, rel_path): - rel_path = rel_path.lstrip('\\/') - path = os.path.join(self.fs_endpoint_path, rel_path) - names = sorted(os.listdir(path)) - items = [] - for name in names: - if os.path.isdir(os.path.join(path, name)): - if filter_page_dirname(name): - rel_subdir = os.path.join(rel_path, name) - items.append((True, name, rel_subdir)) - else: - if filter_page_filename(name): - cur_rel_path = os.path.join(rel_path, name) - slug = self._makeSlug(cur_rel_path) - config = self._extractConfigFragment(cur_rel_path) - metadata = {'slug': slug, 'config': config} - fac = PageFactory(self, cur_rel_path, metadata) - - name, _ = os.path.splitext(name) - items.append((False, name, fac)) - return items - - def _cleanSlug(self, slug): + def _makeSlug(self, path): + slug = super()._makeSlug(path) return os.path.basename(slug) -class OrderedPageSource(AutoConfigSourceBase): - """ A page source that assigns an "order" to its pages based on a +class OrderedContentSource(AutoConfigContentSourceBase): + """ A content source that assigns an "order" to its pages based on a numerical prefix in their filename. Page iterators will automatically sort pages using that order. """ @@ -208,14 +126,13 @@ def __init__(self, app, name, config): config['capture_mode'] = 'path' - super(OrderedPageSource, self).__init__(app, name, config) + super().__init__(app, name, config) + self.setting_name = config.get('setting_name', 'order') self.default_value = config.get('default_value', 0) - self.supported_extensions = list( - app.config.get('site/auto_formats').keys()) - def findPageFactory(self, metadata, mode): - uri_path = metadata.get('slug', '') + def findContentFromRoute(self, route_params): + uri_path = route_params.get('slug', '') if uri_path == '': uri_path = '_index' @@ -253,60 +170,16 @@ if not found: return None - fac_path = os.path.relpath(path, self.fs_endpoint_path) - config = self._extractConfigFragment(fac_path) + rel_path = os.path.relpath(path, self.fs_endpoint_path) + config = self._extractConfigFragment(rel_path) metadata = {'slug': uri_path, 'config': config} - - return PageFactory(self, fac_path, metadata) + return ContentItem(path, metadata) def getSorterIterator(self, it): accessor = self.getSettingAccessor() return OrderTrailSortIterator(it, self.setting_name + '_trail', value_accessor=accessor) - def listPath(self, rel_path): - rel_path = rel_path.lstrip('/') - path = self.fs_endpoint_path - if rel_path != '': - parts = rel_path.split('/') - for p in parts: - p_pat = r'(\d+_)?' + re.escape(p) + '$' - for name in os.listdir(path): - if re.match(p_pat, name): - path = os.path.join(path, name) - break - else: - raise Exception("No such path: %s" % rel_path) - - items = [] - names = sorted(os.listdir(path)) - for name in names: - clean_name = self.re_pattern.sub('', name) - clean_name, _ = os.path.splitext(clean_name) - if os.path.isdir(os.path.join(path, name)): - if filter_page_dirname(name): - rel_subdir = os.path.join(rel_path, name) - items.append((True, clean_name, rel_subdir)) - else: - if filter_page_filename(name): - slug = self._makeSlug(os.path.join(rel_path, name)) - - fac_path = name - if rel_path != '.': - fac_path = os.path.join(rel_path, name) - fac_path = fac_path.replace('\\', '/') - - config = self._extractConfigFragment(fac_path) - metadata = {'slug': slug, 'config': config} - fac = PageFactory(self, fac_path, metadata) - - name, _ = os.path.splitext(name) - items.append((False, clean_name, fac)) - return items - - def _cleanSlug(self, slug): - return self.re_pattern.sub(r'\1', slug) - def _extractConfigFragment(self, rel_path): values = [] for m in self.re_pattern.finditer(rel_path): @@ -317,15 +190,12 @@ values.append(self.default_value) return { - self.setting_name: values[-1], - self.setting_name + '_trail': values} + self.setting_name: values[-1], + self.setting_name + '_trail': values} - def _populateMetadata(self, rel_path, metadata, mode=None): - _, filename = os.path.split(rel_path) - config = self._extractConfigFragment(filename) - metadata['config'] = config - slug = metadata['slug'] - metadata['slug'] = self.re_pattern.sub(r'\1', slug) + def _makeSlug(self, path): + slug = super()._makeSlug(path) + return self.re_pattern.sub(r'\1', slug) class OrderTrailSortIterator(object): diff -r 7a1903ede496 -r 2e5c5d33d62c piecrust/sources/base.py --- a/piecrust/sources/base.py Tue Nov 21 11:00:06 2017 -0800 +++ b/piecrust/sources/base.py Tue Nov 21 22:07:12 2017 -0800 @@ -1,137 +1,164 @@ -import copy import logging +import collections from werkzeug.utils import cached_property -from piecrust.page import Page -from piecrust.data.assetor import Assetor +# Source realms, to differentiate sources in the site itself ('User') +# and sources in the site's theme ('Theme'). REALM_USER = 0 REALM_THEME = 1 REALM_NAMES = { - REALM_USER: 'User', - REALM_THEME: 'Theme'} + REALM_USER: 'User', + REALM_THEME: 'Theme'} -MODE_PARSING = 0 -MODE_CREATING = 1 +# Types of relationships a content source can be asked for. +REL_PARENT_GROUP = 1 +REL_LOGICAL_PARENT_ITEM = 2 +REL_LOGICAl_CHILD_GROUP = 3 +REL_ASSETS = 10 logger = logging.getLogger(__name__) -def build_pages(app, factories): - for f in factories: - yield f.buildPage() - - class SourceNotFoundError(Exception): pass -class InvalidFileSystemEndpointError(Exception): - def __init__(self, source_name, fs_endpoint): - super(InvalidFileSystemEndpointError, self).__init__( - "Invalid file-system endpoint for source '%s': %s" % - (source_name, fs_endpoint)) +class InsufficientRouteParameters(Exception): + pass + + +class AbortedSourceUseError(Exception): + pass + + +class GeneratedContentException(Exception): + pass -class PageFactory(object): - """ A class responsible for creating a page. +CONTENT_TYPE_PAGE = 0 +CONTENT_TYPE_ASSET = 1 + + +class ContentItem: + """ Describes a piece of content. + + Some known metadata that PieCrust will use include: + - `date`: A `datetime.date` object that will set the date of the page. + - `datetime`: A `datetime.datetime` object that will set the date and + time of the page. + - `route_params`: A dictionary of route parameters to generate the + URL to the content. + - `config`: A dictionary of configuration settings to merge into the + settings found in the content itself. """ - def __init__(self, source, rel_path, metadata): - self.source = source - self.rel_path = rel_path + def __init__(self, spec, metadata): + self.spec = spec self.metadata = metadata - @cached_property - def ref_spec(self): - return '%s:%s' % (self.source.name, self.rel_path) - - @cached_property - def path(self): - path, _ = self.source.resolveRef(self.rel_path) - return path - - def buildPage(self): - repo = self.source.app.env.page_repository - cache_key = '%s:%s' % (self.source.name, self.rel_path) - return repo.get(cache_key, self._doBuildPage) - - def _doBuildPage(self): - logger.debug("Building page: %s" % self.path) - page = Page(self.source, copy.deepcopy(self.metadata), self.rel_path) - return page + @property + def is_group(self): + return False -class PageSource(object): - """ A source for pages, e.g. a directory with one file per page. +class ContentGroup: + """ Describes a group of `ContentItem`s. """ + def __init__(self, spec, metadata): + self.spec = spec + self.metadata = metadata + + @property + def is_group(self): + return True + + +class ContentSource: + """ A source for content. + """ + SOURCE_NAME = None + DEFAULT_PIPELINE_NAME = None + def __init__(self, app, name, config): self.app = app self.name = name self.config = config or {} - self.config.setdefault('realm', REALM_USER) - self._factories = None - self._provider_type = None - - def __getattr__(self, name): - try: - return self.config[name] - except KeyError: - raise AttributeError() + self._cache = None + self._page_cache = None @property def is_theme_source(self): - return self.realm == REALM_THEME - - @property - def root_dir(self): - if self.is_theme_source: - return self.app.theme_dir - return self.app.root_dir - - def getPages(self): - return build_pages(self.app, self.getPageFactories()) + return self.config['realm'] == REALM_THEME - def getPage(self, metadata): - factory = self.findPageFactory(metadata, MODE_PARSING) - if factory is None: - return None - return factory.buildPage() + @cached_property + def route(self): + return self.app.getSourceRoute(self.name) - def getPageFactories(self): - if self._factories is None: - self._factories = list(self.buildPageFactories()) - return self._factories + def openItem(self, item, mode='r', **kwargs): + raise NotImplementedError() - def getSupportedRouteParameters(self): + def getItemMtime(self, item): raise NotImplementedError() - def buildPageFactories(self): - raise NotImplementedError() - - def buildPageFactory(self, path): - raise NotImplementedError() + def getAllPages(self): + if self._page_cache is not None: + return self._page_cache - def resolveRef(self, ref_path): - """ Returns the full path and source metadata given a source - (relative) path, like a ref-spec. - """ - raise NotImplementedError() + getter = self.app.getPage + self._page_cache = [getter(self, i) for i in self.getAllContents()] + return self._page_cache + + def getAllContents(self): + if self._cache is not None: + return self._cache - def findPageFactory(self, metadata, mode): - raise NotImplementedError() + cache = [] + stack = collections.deque() + stack.append(None) + while len(stack) > 0: + cur = stack.popleft() + try: + contents = self.getContents(cur) + except GeneratedContentException: + continue + if contents is not None: + for c in contents: + if c.is_group: + stack.append(c) + else: + cache.append(c) + self._cache = cache + return cache - def buildDataProvider(self, page, override): - if not self._provider_type: - from piecrust.data.provider import get_data_provider_class - self._provider_type = get_data_provider_class(self.app, - self.data_type) - return self._provider_type(self, page, override) + def getContents(self, group): + raise NotImplementedError( + "'%s' doesn't implement 'getContents'." % self.__class__) + + def getRelatedContents(self, item, relationship): + raise NotImplementedError( + "'%s' doesn't implement 'getRelatedContents'." % self.__class__) - def finalizeConfig(self, page): + def findContentFromSpec(self, spec): + raise NotImplementedError( + "'%s' doesn't implement 'findContentFromSpec'." % self.__class__) + + def findContentFromRoute(self, route_params): + raise NotImplementedError( + "'%s' doesn't implement 'findContentFromRoute'." % self.__class__) + + def getSupportedRouteParameters(self): + raise NotImplementedError( + "'%s' doesn't implement 'getSupportedRouteParameters'." % + self.__class__) + + def prepareRenderContext(self, ctx): pass - def buildAssetor(self, page, uri): - return Assetor(page, uri) + def onRouteFunctionUsed(self, route_params): + pass + def describe(self): + return None + diff -r 7a1903ede496 -r 2e5c5d33d62c piecrust/sources/blogarchives.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/piecrust/sources/blogarchives.py Tue Nov 21 22:07:12 2017 -0800 @@ -0,0 +1,250 @@ +import time +import logging +import datetime +import collections +from piecrust.data.filters import PaginationFilter, IFilterClause +from piecrust.dataproviders.pageiterator import ( + PageIterator, HardCodedFilterIterator, DateSortIterator) +from piecrust.page import Page +from piecrust.pipelines._pagebaker import PageBaker +from piecrust.pipelines._pagerecords import ( + PagePipelineRecordEntry, + add_page_job_result, merge_job_result_into_record_entry) +from piecrust.pipelines.base import ( + ContentPipeline, + create_job, get_record_name_for_source, content_item_from_job) +from piecrust.routing import RouteParameter +from piecrust.sources.base import ContentItem +from piecrust.sources.generator import GeneratorSourceBase +from piecrust.sources.list import ListSource + + +logger = logging.getLogger(__name__) + + +_year_index = """--- +layout: %(template)s +--- +""" + + +class BlogArchivesSource(GeneratorSourceBase): + SOURCE_NAME = 'blog_archives' + DEFAULT_PIPELINE_NAME = 'blog_archives' + + def __init__(self, app, name, config): + super().__init__(app, name, config) + + tpl_name = config.get('template', '_year.html') + self._raw_item = _year_index % {'template': tpl_name} + + def getSupportedRouteParameters(self): + return [RouteParameter('year', RouteParameter.TYPE_INT4)] + + def findContentFromRoute(self, route_params): + year = route_params['year'] + return ContentItem( + '_index', + {'route_params': {'year': year}}) + + def prepareRenderContext(self, ctx): + ctx.pagination_source = self.inner_source + + route_params = ctx.page.source_metadata['route_params'] + year = route_params.get('year') + if year is None: + raise Exception( + "Can't find the archive year in the route metadata") + if type(year) is not int: + raise Exception( + "The route for generator '%s' should specify an integer " + "parameter for 'year'." % self.name) + + flt = PaginationFilter() + flt.addClause(IsFromYearFilterClause(year)) + ctx.pagination_filter = flt + + ctx.custom_data['year'] = year + + flt2 = PaginationFilter() + flt2.addClause(IsFromYearFilterClause(year)) + it = PageIterator(self.inner_source) + it._simpleNonSortedWrap(HardCodedFilterIterator, flt2) + it._wrapAsSort(DateSortIterator, reverse=False) + ctx.custom_data['archives'] = it + + ctx.custom_data['monthly_archives'] = _MonthlyArchiveData( + self.inner_source, year) + + +class IsFromYearFilterClause(IFilterClause): + def __init__(self, year): + self.year = year + + def pageMatches(self, fil, page): + return (page.datetime.year == self.year) + + +class _MonthlyArchiveData(collections.abc.Mapping): + def __init__(self, inner_source, year): + self._inner_source = inner_source + self._year = year + self._months = None + + def __iter__(self): + self._load() + return iter(self._months) + + def __len__(self): + self._load() + return len(self._months) + + def __getitem__(self, i): + self._load() + return self._months[i] + + def _load(self): + if self._months is not None: + return + + month_index = {} + for page in self._inner_source.getAllPages(): + if page.datetime.year != self._year: + continue + + month = page.datetime.month + + posts_this_month = month_index.get(month) + if posts_this_month is None: + posts_this_month = [] + month_index[month] = posts_this_month + posts_this_month.append(page.content_item) + + self._months = [] + for m, ptm in month_index.items(): + timestamp = time.mktime((self._year, m, 1, 0, 0, 0, 0, 0, -1)) + + it = PageIterator(ListSource(self._inner_source, ptm)) + it._wrapAsSort(DateSortIterator, reverse=False) + + self._months.append({ + 'timestamp': timestamp, + 'posts': it + }) + + +class BlogArchivesPipelineRecordEntry(PagePipelineRecordEntry): + def __init__(self): + super().__init__() + self.year = None + + +class BlogArchivesPipeline(ContentPipeline): + PIPELINE_NAME = 'blog_archives' + PASS_NUM = 10 + RECORD_ENTRY_CLASS = BlogArchivesPipelineRecordEntry + + def __init__(self, source, ctx): + if not isinstance(source, BlogArchivesSource): + raise Exception("The blog archives pipeline only supports blog " + "archives content sources.") + + super().__init__(source, ctx) + self.inner_source = source.inner_source + self._tpl_name = source.config['template'] + self._all_years = None + self._dirty_years = None + self._pagebaker = None + + def initialize(self): + self._pagebaker = PageBaker(self.app, + self.ctx.out_dir, + force=self.ctx.force) + self._pagebaker.startWriterQueue() + + def shutdown(self): + self._pagebaker.stopWriterQueue() + + def createJobs(self, ctx): + logger.debug("Caching template page for blog archives '%s'." % + self.inner_source.name) + page = self.app.getPage(self.source, ContentItem('_index', {})) + page._load() + + logger.debug("Building blog archives for: %s" % + self.inner_source.name) + self._buildDirtyYears(ctx) + logger.debug("Got %d dirty years out of %d." % + (len(self._dirty_years), len(self._all_years))) + + jobs = [] + rec_fac = self.createRecordEntry + current_record = ctx.current_record + + for y in self._dirty_years: + record_entry_spec = '_index[%04d]' % y + + jobs.append(create_job(self, '_index', + year=y, + record_entry_spec=record_entry_spec)) + + entry = rec_fac(record_entry_spec) + current_record.addEntry(entry) + + if len(jobs) > 0: + return jobs + return None + + def run(self, job, ctx, result): + year = job['year'] + content_item = ContentItem('_index', + {'year': year, + 'route_params': {'year': year}}) + page = Page(self.source, content_item) + + prev_entry = ctx.previous_entry + rdr_subs = self._pagebaker.bake(page, prev_entry) + + add_page_job_result(result) + result['subs'] = rdr_subs + result['year'] = page.source_metadata['year'] + + def handleJobResult(self, result, ctx): + existing = ctx.record_entry + merge_job_result_into_record_entry(existing, result) + existing.year = result['year'] + + def postJobRun(self, ctx): + # Create bake entries for the years that were *not* dirty. + # Otherwise, when checking for deleted pages, we would not find any + # outputs and would delete those files. + all_str_years = [str(y) for y in self._all_years] + for prev, cur in ctx.record_history.diffs: + if prev and not cur: + y = prev.year + if y in all_str_years: + logger.debug( + "Creating unbaked entry for year %s archive." % y) + cur.year = y + cur.out_paths = list(prev.out_paths) + cur.errors = list(prev.errors) + else: + logger.debug( + "No page references year %s anymore." % y) + + def _buildDirtyYears(self, ctx): + all_years = set() + dirty_years = set() + + record_name = get_record_name_for_source(self.inner_source) + current_records = ctx.record_histories.current + cur_rec = current_records.getRecord(record_name) + for cur_entry in cur_rec.getEntries(): + dt = datetime.datetime.fromtimestamp(cur_entry.timestamp) + all_years.add(dt.year) + if cur_entry.was_any_sub_baked: + dirty_years.add(dt.year) + + self._all_years = all_years + self._dirty_years = dirty_years + diff -r 7a1903ede496 -r 2e5c5d33d62c piecrust/sources/default.py --- a/piecrust/sources/default.py Tue Nov 21 11:00:06 2017 -0800 +++ b/piecrust/sources/default.py Tue Nov 21 22:07:12 2017 -0800 @@ -1,164 +1,51 @@ import os.path import logging -from piecrust import osutil from piecrust.routing import RouteParameter -from piecrust.sources.base import ( - PageFactory, PageSource, InvalidFileSystemEndpointError, - MODE_CREATING) +from piecrust.sources.base import REL_ASSETS, ContentItem +from piecrust.sources.fs import FSContentSource from piecrust.sources.interfaces import ( - IListableSource, IPreparingSource, IInteractiveSource, - InteractiveField) -from piecrust.sources.mixins import SimplePaginationSourceMixin + IPreparingSource, IInteractiveSource, InteractiveField) +from piecrust.sources.mixins import SimpleAssetsSubDirMixin +from piecrust.uriutil import uri_to_title logger = logging.getLogger(__name__) -def filter_page_dirname(d): - return not (d.startswith('.') or d.endswith('-assets')) - - -def filter_page_filename(f): - return (f[0] != '.' and # .DS_store and other crap - f[-1] != '~' and # Vim temp files and what-not - f not in ['Thumbs.db']) # Windows bullshit - - -class DefaultPageSource(PageSource, - IListableSource, IPreparingSource, IInteractiveSource, - SimplePaginationSourceMixin): +class DefaultContentSource(FSContentSource, + SimpleAssetsSubDirMixin, + IPreparingSource, IInteractiveSource): SOURCE_NAME = 'default' + DEFAULT_PIPELINE_NAME = 'page' def __init__(self, app, name, config): - super(DefaultPageSource, self).__init__(app, name, config) - self.fs_endpoint = config.get('fs_endpoint', name) - self.fs_endpoint_path = os.path.join(self.root_dir, self.fs_endpoint) - self.supported_extensions = list( - app.config.get('site/auto_formats').keys()) - self.default_auto_format = app.config.get('site/default_auto_format') - - def getSupportedRouteParameters(self): - return [ - RouteParameter('slug', RouteParameter.TYPE_PATH)] + super().__init__(app, name, config) - def buildPageFactories(self): - logger.debug("Scanning for pages in: %s" % self.fs_endpoint_path) - if not os.path.isdir(self.fs_endpoint_path): - if self.ignore_missing_dir: - return - raise InvalidFileSystemEndpointError(self.name, - self.fs_endpoint_path) - - for dirpath, dirnames, filenames in osutil.walk(self.fs_endpoint_path): - rel_dirpath = os.path.relpath(dirpath, self.fs_endpoint_path) - dirnames[:] = list(filter(filter_page_dirname, dirnames)) - for f in sorted(filter(filter_page_filename, filenames)): - fac_path = f - if rel_dirpath != '.': - fac_path = os.path.join(rel_dirpath, f) + config.setdefault('data_type', 'page_iterator') - slug = self._makeSlug(fac_path) - metadata = {'slug': slug} - fac_path = fac_path.replace('\\', '/') - self._populateMetadata(fac_path, metadata) - yield PageFactory(self, fac_path, metadata) + self.auto_formats = app.config.get('site/auto_formats') + self.default_auto_format = app.config.get('site/default_auto_format') + self.supported_extensions = list(self.auto_formats) - def buildPageFactory(self, path): - if not path.startswith(self.fs_endpoint_path): - raise Exception("Page path '%s' isn't inside '%s'." % ( - path, self.fs_enpoint_path)) - rel_path = path[len(self.fs_endpoint_path):].lstrip('\\/') - slug = self._makeSlug(rel_path) - metadata = {'slug': slug} - fac_path = rel_path.replace('\\', '/') - self._populateMetadata(fac_path, metadata) - return PageFactory(self, fac_path, metadata) - - def resolveRef(self, ref_path): - path = os.path.normpath( - os.path.join(self.fs_endpoint_path, ref_path.lstrip("\\/"))) - slug = self._makeSlug(ref_path) - metadata = {'slug': slug} - self._populateMetadata(ref_path, metadata) - return path, metadata - - def findPageFactory(self, metadata, mode): - uri_path = metadata.get('slug', '') - if not uri_path: - uri_path = '_index' - path = os.path.join(self.fs_endpoint_path, uri_path) - _, ext = os.path.splitext(path) + def _finalizeContent(self, parent_group, items, groups): + SimpleAssetsSubDirMixin._removeAssetGroups(self, groups) - if mode == MODE_CREATING: - if ext == '': - path = '%s.%s' % (path, self.default_auto_format) - rel_path = os.path.relpath(path, self.fs_endpoint_path) - rel_path = rel_path.replace('\\', '/') - self._populateMetadata(rel_path, metadata, mode) - return PageFactory(self, rel_path, metadata) - - if ext == '': - paths_to_check = [ - '%s.%s' % (path, e) - for e in self.supported_extensions] - else: - paths_to_check = [path] - for path in paths_to_check: - if os.path.isfile(path): - rel_path = os.path.relpath(path, self.fs_endpoint_path) - rel_path = rel_path.replace('\\', '/') - self._populateMetadata(rel_path, metadata, mode) - return PageFactory(self, rel_path, metadata) - - return None + def _createItemMetadata(self, path): + slug = self._makeSlug(path) + metadata = { + 'route_params': { + 'slug': slug + } + } + _, ext = os.path.splitext(path) + if ext: + fmt = self.auto_formats.get(ext.lstrip('.')) + if fmt: + metadata['config'] = {'format': fmt} + return metadata - def listPath(self, rel_path): - rel_path = rel_path.lstrip('\\/') - path = os.path.join(self.fs_endpoint_path, rel_path) - names = sorted(osutil.listdir(path)) - items = [] - for name in names: - if os.path.isdir(os.path.join(path, name)): - if filter_page_dirname(name): - rel_subdir = os.path.join(rel_path, name) - items.append((True, name, rel_subdir)) - else: - if filter_page_filename(name): - slug = self._makeSlug(os.path.join(rel_path, name)) - metadata = {'slug': slug} - - fac_path = name - if rel_path != '.': - fac_path = os.path.join(rel_path, name) - fac_path = fac_path.replace('\\', '/') - - self._populateMetadata(fac_path, metadata) - fac = PageFactory(self, fac_path, metadata) - - name, _ = os.path.splitext(name) - items.append((False, name, fac)) - return items - - def getDirpath(self, rel_path): - return os.path.dirname(rel_path) - - def getBasename(self, rel_path): - filename = os.path.basename(rel_path) - name, _ = os.path.splitext(filename) - return name - - def setupPrepareParser(self, parser, app): - parser.add_argument('uri', help='The URI for the new page.') - - def buildMetadata(self, args): - return {'slug': args.uri} - - def getInteractiveFields(self): - return [ - InteractiveField('slug', InteractiveField.TYPE_STRING, - 'new-page')] - - def _makeSlug(self, rel_path): + def _makeSlug(self, path): + rel_path = os.path.relpath(path, self.fs_endpoint_path) slug, ext = os.path.splitext(rel_path) slug = slug.replace('\\', '/') if ext.lstrip('.') not in self.supported_extensions: @@ -169,6 +56,54 @@ slug = '' return slug - def _populateMetadata(self, rel_path, metadata, mode=None): - pass + def getRelatedContents(self, item, relationship): + if relationship == REL_ASSETS: + return SimpleAssetsSubDirMixin._getRelatedAssetsContents( + self, item) + return FSContentSource.getRelatedContents(self, item, relationship) + + def getSupportedRouteParameters(self): + return [ + RouteParameter('slug', RouteParameter.TYPE_PATH)] + + def findContentFromRoute(self, route_params): + uri_path = route_params.get('slug', '') + if not uri_path: + uri_path = '_index' + path = os.path.join(self.fs_endpoint_path, uri_path) + _, ext = os.path.splitext(path) + if ext == '': + paths_to_check = [ + '%s.%s' % (path, e) + for e in self.supported_extensions] + else: + paths_to_check = [path] + for path in paths_to_check: + if os.path.isfile(path): + metadata = self._createItemMetadata(path) + return ContentItem(path, metadata) + return None + + def setupPrepareParser(self, parser, app): + parser.add_argument('slug', help='The slug for the new page.') + + def createContent(self, args): + slug = args.get('slug') + if not slug: + slug = '_index' + path = os.path.join(self.fs_endpoint_path, slug) + _, ext = os.path.splitext(path) + if ext == '': + path = '%s.%s' % (path, self.default_auto_format) + + metadata = self._createItemMetadata(path) + config = metadata.setdefault('config', {}) + config.update({'title': uri_to_title( + os.path.basename(metadata['slug']))}) + return ContentItem(path, metadata) + + def getInteractiveFields(self): + return [ + InteractiveField('slug', InteractiveField.TYPE_STRING, + 'new-page')] diff -r 7a1903ede496 -r 2e5c5d33d62c piecrust/sources/fs.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/piecrust/sources/fs.py Tue Nov 21 22:07:12 2017 -0800 @@ -0,0 +1,191 @@ +import os.path +import re +import glob +import fnmatch +import logging +from piecrust import osutil +from piecrust.routing import RouteParameter +from piecrust.sources.base import ( + ContentItem, ContentGroup, ContentSource, + REL_PARENT_GROUP, REL_LOGICAL_PARENT_ITEM, REL_LOGICAl_CHILD_GROUP) + + +logger = logging.getLogger(__name__) + + +class InvalidFileSystemEndpointError(Exception): + def __init__(self, source_name, fs_endpoint): + super(InvalidFileSystemEndpointError, self).__init__( + "Invalid file-system endpoint for source '%s': %s" % + (source_name, fs_endpoint)) + + +def _filter_crap_files(f): + return (f[-1] != '~' and # Vim temp files and what-not + f not in ['.DS_Store', 'Thumbs.db']) # OSX and Windows bullshit + + +class FSContentSourceBase(ContentSource): + """ Implements some basic stuff for a `ContentSource` that stores its + items as files on disk. + """ + def __init__(self, app, name, config): + super().__init__(app, name, config) + self.fs_endpoint = config.get('fs_endpoint', name) + self.fs_endpoint_path = os.path.join(self.root_dir, self.fs_endpoint) + + @property + def root_dir(self): + if self.is_theme_source: + return self.app.theme_dir + return self.app.root_dir + + def _checkFSEndpoint(self): + if not os.path.isdir(self.fs_endpoint_path): + if self.config.get('ignore_missing_dir'): + return False + raise InvalidFileSystemEndpointError(self.name, + self.fs_endpoint_path) + return True + + def openItem(self, item, mode='r', **kwargs): + for m in 'wxa+': + if m in mode: + # If opening the file for writing, let's make sure the + # directory exists. + dirname = os.path.dirname(item.spec) + if not os.path.exists(dirname): + os.makedirs(dirname, 0o755) + break + return open(item.spec, mode, **kwargs) + + def getItemMtime(self, item): + return os.path.getmtime(item.spec) + + def describe(self): + return {'endpoint_path': self.fs_endpoint_path} + + +class FSContentSource(FSContentSourceBase): + """ Implements a `ContentSource` that simply returns files on disk + under a given root directory. + """ + SOURCE_NAME = 'fs' + + def __init__(self, app, name, config): + super().__init__(app, name, config) + + config.setdefault('data_type', 'asset_iterator') + + ig, ir = _parse_ignores(config.get('ignore')) + self._ignore_globs = ig + self._ignore_regexes = ir + + def getContents(self, group): + if not self._checkFSEndpoint(): + return None + + parent_path = self.fs_endpoint_path + if group is not None: + parent_path = group.spec + + names = filter(_filter_crap_files, osutil.listdir(parent_path)) + + final_names = [] + for name in names: + path = os.path.join(parent_path, name) + if not self._filterIgnored(path): + final_names.append(name) + + items = [] + groups = [] + for name in final_names: + path = os.path.join(parent_path, name) + if os.path.isdir(path): + metadata = self._createGroupMetadata(path) + groups.append(ContentGroup(path, metadata)) + else: + metadata = self._createItemMetadata(path) + items.append(ContentItem(path, metadata)) + self._finalizeContent(group, items, groups) + return items + groups + + def _filterIgnored(self, path): + rel_path = os.path.relpath(path, self.fs_endpoint_path) + for g in self._ignore_globs: + if fnmatch.fnmatch(rel_path, g): + return True + for r in self._ignore_regexes: + if r.search(g): + return True + return False + + def _createGroupMetadata(self, path): + return {} + + def _createItemMetadata(self, path): + return {} + + def _finalizeContent(self, parent_group, items, groups): + pass + + def findContentFromSpec(self, spec): + if os.path.isdir(spec): + metadata = self._createGroupMetadata(spec) + return ContentGroup(spec, metadata) + elif os.path.isfile(spec): + metadata = self._createItemMetadata(spec) + return ContentItem(spec, metadata) + return None + + def getRelatedContents(self, item, relationship): + if relationship == REL_PARENT_GROUP: + parent_dir = os.path.dirname(item.spec) + if len(parent_dir) >= len(self.fs_endpoint_path): + metadata = self._createGroupMetadata(parent_dir) + return ContentGroup(parent_dir, metadata) + + # Don't return a group for paths that are outside of our + # endpoint directory. + return None + + if relationship == REL_LOGICAL_PARENT_ITEM: + # If we want the logical parent item of a folder, we find a + # page file with the same name as the folder. + if not item.is_group: + raise ValueError() + parent_glob = item.spec.rstrip('/\\') + '.*' + for n in glob.iglob(parent_glob): + if os.path.isfile(n): + metadata = self._createItemMetadata(n) + return ContentItem(n, metadata) + return None + + if relationship == REL_LOGICAl_CHILD_GROUP: + # If we want the children items of an item, we look for + # a directory that has the same name as the item's file. + if item.is_group: + raise ValueError() + dir_path, _ = os.path.splitext(item.spec) + if os.path.isdir(dir_path): + metadata = self._createGroupMetadata(dir_path) + return ContentGroup(dir_path, metadata) + return None + + return None + + def getSupportedRouteParameters(self): + return [ + RouteParameter('path', RouteParameter.TYPE_PATH)] + + +def _parse_ignores(patterns): + globs = [] + regexes = [] + if patterns: + for pat in patterns: + if len(pat) > 2 and pat[0] == '/' and pat[-1] == '/': + regexes.append(re.compile(pat[1:-1])) + else: + globs.append(pat) + return globs, regexes diff -r 7a1903ede496 -r 2e5c5d33d62c piecrust/sources/generator.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/piecrust/sources/generator.py Tue Nov 21 22:07:12 2017 -0800 @@ -0,0 +1,36 @@ +import io +import time +from werkzeug.utils import cached_property +from piecrust.configuration import ConfigurationError +from piecrust.sources.base import ContentSource, GeneratedContentException + + +class GeneratorSourceBase(ContentSource): + def __init__(self, app, name, config): + super().__init__(app, name, config) + + source_name = config.get('source') + if source_name is None: + raise ConfigurationError( + "Taxonomy source '%s' requires an inner source." % name) + self._inner_source_name = source_name + + self._raw_item = '' + self._raw_item_time = time.time() + + @cached_property + def inner_source(self): + return self.app.getSource(self._inner_source_name) + + def getContents(self, group): + # Our content is procedurally generated from other content sources, + # so we really don't support listing anything here -- it would be + # typically quite costly. + raise GeneratedContentException() + + def openItem(self, item, mode='r', **kwargs): + return io.StringIO(self._raw_item) + + def getItemMtime(self, item): + return self._raw_item_time + diff -r 7a1903ede496 -r 2e5c5d33d62c piecrust/sources/interfaces.py --- a/piecrust/sources/interfaces.py Tue Nov 21 11:00:06 2017 -0800 +++ b/piecrust/sources/interfaces.py Tue Nov 21 22:07:12 2017 -0800 @@ -1,41 +1,3 @@ - - -class IPaginationSource(object): - """ Defines the interface for a source that can be used as the data - for an iterator or a pagination. - """ - def getItemsPerPage(self): - raise NotImplementedError() - - def getSourceIterator(self): - raise NotImplementedError() - - def getSorterIterator(self, it): - raise NotImplementedError() - - def getTailIterator(self, it): - raise NotImplementedError() - - def getPaginationFilter(self, page): - raise NotImplementedError() - - def getSettingAccessor(self): - raise NotImplementedError() - - -class IListableSource(object): - """ Defines the interface for a source that can be iterated on in a - hierarchical manner, for use with the `family` data endpoint. - """ - def listPath(self, rel_path): - raise NotImplementedError() - - def getDirpath(self, rel_path): - raise NotImplementedError() - - def getBasename(self, rel_path): - raise NotImplementedError() - class IPreparingSource(object): """ Defines the interface for a source whose pages can be created by the @@ -44,11 +6,13 @@ def setupPrepareParser(self, parser, app): raise NotImplementedError() - def buildMetadata(self, args): + def createContent(self, args): raise NotImplementedError() class InteractiveField(object): + """ A field to display in the administration web UI. + """ TYPE_STRING = 0 TYPE_INT = 1 @@ -59,6 +23,8 @@ class IInteractiveSource(object): + """ A content source that a user can interact with in the administration + web UI. + """ def getInteractiveFields(self): raise NotImplementedError() - diff -r 7a1903ede496 -r 2e5c5d33d62c piecrust/sources/list.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/piecrust/sources/list.py Tue Nov 21 22:07:12 2017 -0800 @@ -0,0 +1,31 @@ +from piecrust.sources.base import ContentSource + + +class ListSource(ContentSource): + def __init__(self, inner_source, items): + super().__init__( + inner_source.app, inner_source.name, inner_source.config) + + self.inner_source = inner_source + self.items = items + + def openItem(self, item, mode='r', **kwargs): + return self.inner_source.openItem(item, mode, **kwargs) + + def getItemMtime(self, item): + return self.inner_source.getItemMtime(item) + + def getContents(self, group): + return self.items + + def getRelatedContents(self, item, relationship): + return self.inner_source.getRelatedContents(item, relationship) + + def findContentFromRoute(self, route_params): + # Can't find items... we could find stuff that's not in our list? + raise NotImplementedError( + "The list source doesn't support finding items.") + + def getSupportedRouteParameters(self): + return self.inner_source.getSupportedRouteParameters() + diff -r 7a1903ede496 -r 2e5c5d33d62c piecrust/sources/mixins.py --- a/piecrust/sources/mixins.py Tue Nov 21 11:00:06 2017 -0800 +++ b/piecrust/sources/mixins.py Tue Nov 21 22:07:12 2017 -0800 @@ -1,165 +1,43 @@ -import os import os.path import logging -from piecrust.data.filters import PaginationFilter, page_value_accessor -from piecrust.data.paginationdata import PaginationData -from piecrust.sources.base import PageFactory -from piecrust.sources.interfaces import IPaginationSource, IListableSource -from piecrust.sources.pageref import PageRef +from piecrust import osutil +from piecrust.sources.base import ContentItem logger = logging.getLogger(__name__) - -class SourceFactoryIterator(object): - def __init__(self, source): - self.source = source - - # This is to permit recursive traversal of the - # iterator chain. It acts as the end. - self.it = None - - def __iter__(self): - return self.source.getPages() - - -class SourceFactoryWithoutGeneratorsIterator(object): - def __init__(self, source): - self.source = source - self._generator_pages = None - # See comment above. - self.it = None - - def __iter__(self): - self._cacheGeneratorPages() - for p in self.source.getPages(): - if p.rel_path in self._generator_pages: - continue - yield p - - def _cacheGeneratorPages(self): - if self._generator_pages is not None: - return - - app = self.source.app - self._generator_pages = set() - for src in app.sources: - for gen in app.generators: - for sn, rp in gen.page_ref.possible_split_ref_specs: - if sn == self.source.name: - self._generator_pages.add(rp) - - -class DateSortIterator(object): - def __init__(self, it, reverse=True): - self.it = it - self.reverse = reverse - - def __iter__(self): - return iter(sorted(self.it, - key=lambda x: x.datetime, reverse=self.reverse)) - - -class PaginationDataBuilderIterator(object): - def __init__(self, it): - self.it = it - - def __iter__(self): - for page in self.it: - if page is None: - yield None - else: - yield PaginationData(page) +assets_suffix = '-assets' -class SimplePaginationSourceMixin(IPaginationSource): - """ Implements the `IPaginationSource` interface in a standard way that - should fit most page sources. - """ - def getItemsPerPage(self): - return self.config['items_per_page'] - - def getSourceIterator(self): - if self.config.get('iteration_includes_generator_pages', False): - return SourceFactoryIterator(self) - return SourceFactoryWithoutGeneratorsIterator(self) - - def getSorterIterator(self, it): - return DateSortIterator(it) - - def getTailIterator(self, it): - return PaginationDataBuilderIterator(it) +class SimpleAssetsSubDirMixin: + """ A content source mixin for sources that are file-system-based, + and have item assets stored in a sub-folder that is named after + the item. - def getPaginationFilter(self, page): - conf = (page.config.get('items_filters') or - self.config.get('items_filters')) - if conf == 'none' or conf == 'nil' or conf == '': - conf = None - if conf is not None: - f = PaginationFilter(value_accessor=page_value_accessor) - f.addClausesFromConfig(conf) - return f - return None - - def getSettingAccessor(self): - return page_value_accessor - - -class SimpleListableSourceMixin(IListableSource): - """ Implements the `IListableSource` interface for sources that map to - simple file-system structures. + More specifically, assets are stored in a sub-folder named: + `-assets` """ - def listPath(self, rel_path): - rel_path = rel_path.lstrip('\\/') - path = self._getFullPath(rel_path) - names = self._sortFilenames(os.listdir(path)) + def _getRelatedAssetsContents(self, item): + spec_no_ext, _ = os.path.splitext(item.spec) + assets_dir = spec_no_ext + assets_suffix + try: + asset_files = list(osutil.listdir(assets_dir)) + except (OSError, FileNotFoundError): + return None - items = [] - for name in names: - if os.path.isdir(os.path.join(path, name)): - if self._filterPageDirname(name): - rel_subdir = os.path.join(rel_path, name) - items.append((True, name, rel_subdir)) - else: - if self._filterPageFilename(name): - slug = self._makeSlug(os.path.join(rel_path, name)) - metadata = {'slug': slug} - - fac_path = name - if rel_path != '.': - fac_path = os.path.join(rel_path, name) - fac_path = fac_path.replace('\\', '/') - - self._populateMetadata(fac_path, metadata) - fac = PageFactory(self, fac_path, metadata) - - name, _ = os.path.splitext(name) - items.append((False, name, fac)) - return items + assets = [] + for f in asset_files: + fpath = os.path.join(assets_dir, f) + name, _ = os.path.splitext(f) + assets.append(ContentItem( + fpath, + {'name': name, + 'filename': f, + '__is_asset': True})) + return assets - def getDirpath(self, rel_path): - return os.path.dirname(rel_path) - - def getBasename(self, rel_path): - filename = os.path.basename(rel_path) - name, _ = os.path.splitext(filename) - return name - - def _getFullPath(self, rel_path): - return os.path.join(self.fs_endpoint_path, rel_path) - - def _sortFilenames(self, names): - return sorted(names) - - def _filterPageDirname(self, name): - return True - - def _filterPageFilename(self, name): - return True - - def _makeSlug(self, rel_path): - return rel_path.replace('\\', '/') - - def _populateMetadata(self, rel_path, metadata, mode=None): - pass - + def _removeAssetGroups(self, groups): + asset_groups = [g for g in groups + if g.spec.endswith(assets_suffix)] + for g in asset_groups: + groups.remove(g) diff -r 7a1903ede496 -r 2e5c5d33d62c piecrust/sources/pageref.py --- a/piecrust/sources/pageref.py Tue Nov 21 11:00:06 2017 -0800 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,137 +0,0 @@ -import re -import os.path -import copy -from piecrust.sources.base import PageFactory - - -page_ref_pattern = re.compile(r'(?P[\w]+)\:(?P.*?)(;|$)') - - -class PageNotFoundError(Exception): - pass - - -class PageRef(object): - """ A reference to a page, with support for looking a page in different - realms. - """ - _INDEX_NEEDS_LOADING = -2 - _INDEX_NOT_FOUND = -1 - - class _HitInfo(object): - def __init__(self, source_name, rel_path, path, metadata): - self.source_name = source_name - self.rel_path = rel_path - self.path = path - self.metadata = metadata - - def __init__(self, app, page_ref): - self.app = app - self._page_ref = page_ref - self._hits = None - self._first_valid_hit_index = self._INDEX_NEEDS_LOADING - self._exts = list(app.config.get('site/auto_formats').keys()) - - def __str__(self): - return self._page_ref - - @property - def exists(self): - try: - self._checkHits() - return True - except PageNotFoundError: - return False - - @property - def source_name(self): - return self._first_valid_hit.source_name - - @property - def source(self): - return self.app.getSource(self.source_name) - - @property - def rel_path(self): - return self._first_valid_hit.rel_path - - @property - def path(self): - return self._first_valid_hit.path - - @property - def metadata(self): - return self._first_valid_hit.metadata - - @property - def possible_ref_specs(self): - self._load() - return ['%s:%s' % (h.source_name, h.rel_path) for h in self._hits] - - @property - def possible_split_ref_specs(self): - self._load() - return [(h.source_name, h.rel_path) for h in self._hits] - - @property - def possible_paths(self): - self._load() - return [h.path for h in self._hits] - - def getFactory(self): - return PageFactory(self.source, self.rel_path, - copy.deepcopy(self.metadata)) - - @property - def _first_valid_hit(self): - self._checkHits() - return self._hits[self._first_valid_hit_index] - - def _load(self): - if self._hits is not None: - return - - self._hits = [] - - if self._page_ref is None: - self._first_valid_hit_index = self._INDEX_NOT_FOUND - return - - it = list(page_ref_pattern.finditer(self._page_ref)) - if len(it) == 0: - raise Exception("Invalid page ref: %s" % self._page_ref) - - for m in it: - source_name = m.group('src') - source = self.app.getSource(source_name) - if source is None: - raise Exception("No such source: %s" % source_name) - rel_path = m.group('path') - if '%ext%' in rel_path: - for e in self._exts: - cur_rel_path = rel_path.replace('%ext%', e) - path, metadata = source.resolveRef(cur_rel_path) - self._hits.append(self._HitInfo( - source_name, cur_rel_path, path, metadata)) - else: - path, metadata = source.resolveRef(rel_path) - self._hits.append( - self._HitInfo(source_name, rel_path, path, metadata)) - - def _checkHits(self): - if self._first_valid_hit_index >= 0: - return - - if self._first_valid_hit_index == self._INDEX_NEEDS_LOADING: - self._load() - self._first_valid_hit_index = self._INDEX_NOT_FOUND - for i, hit in enumerate(self._hits): - if os.path.isfile(hit.path): - self._first_valid_hit_index = i - break - - if self._first_valid_hit_index == self._INDEX_NOT_FOUND: - raise PageNotFoundError( - "No valid paths were found for page reference: %s" % - self._page_ref) - diff -r 7a1903ede496 -r 2e5c5d33d62c piecrust/sources/posts.py --- a/piecrust/sources/posts.py Tue Nov 21 11:00:06 2017 -0800 +++ b/piecrust/sources/posts.py Tue Nov 21 22:07:12 2017 -0800 @@ -5,80 +5,60 @@ import datetime from piecrust import osutil from piecrust.routing import RouteParameter -from piecrust.sources.base import ( - PageSource, InvalidFileSystemEndpointError, PageFactory, - MODE_CREATING, MODE_PARSING) +from piecrust.sources.base import REL_PARENT_GROUP, REL_ASSETS, ContentItem +from piecrust.sources.fs import ( + FSContentSource, InvalidFileSystemEndpointError) from piecrust.sources.interfaces import ( - IPreparingSource, IInteractiveSource, InteractiveField) -from piecrust.sources.mixins import SimplePaginationSourceMixin -from piecrust.uriutil import multi_replace + IPreparingSource, IInteractiveSource, InteractiveField) +from piecrust.sources.mixins import SimpleAssetsSubDirMixin +from piecrust.uriutil import uri_to_title logger = logging.getLogger(__name__) -class PostsSource(PageSource, IPreparingSource, IInteractiveSource, - SimplePaginationSourceMixin): +class PostsSource(FSContentSource, + SimpleAssetsSubDirMixin, + IPreparingSource, IInteractiveSource): PATH_FORMAT = None + DEFAULT_PIPELINE_NAME = 'page' def __init__(self, app, name, config): - PageSource.__init__(self, app, name, config) - self.fs_endpoint = config.get('fs_endpoint', name) - self.fs_endpoint_path = os.path.join(self.root_dir, self.fs_endpoint) - self.supported_extensions = list(app.config.get('site/auto_formats').keys()) + super().__init__(app, name, config) + + config.setdefault('data_type', 'page_iterator') + + self.auto_formats = app.config.get('site/auto_formats') self.default_auto_format = app.config.get('site/default_auto_format') - self._source_it_cache = None + self.supported_extensions = list(self.auto_formats) @property def path_format(self): return self.__class__.PATH_FORMAT - def resolveRef(self, ref_path): - path = os.path.normpath(os.path.join(self.fs_endpoint_path, ref_path)) - metadata = self._parseMetadataFromPath(ref_path) - return path, metadata + def _finalizeContent(self, groups): + SimpleAssetsSubDirMixin._removeAssetGroups(self, groups) - def getSupportedRouteParameters(self): - return [ - RouteParameter('slug', RouteParameter.TYPE_STRING), - RouteParameter('day', RouteParameter.TYPE_INT2), - RouteParameter('month', RouteParameter.TYPE_INT2), - RouteParameter('year', RouteParameter.TYPE_INT4)] + def getRelatedContents(self, item, relationship): + if relationship == REL_PARENT_GROUP: + # Logically speaking, all posts are always flattened. + return None + + if relationship == REL_ASSETS: + return SimpleAssetsSubDirMixin._getRelatedAssetsContents( + self, item) - def buildPageFactory(self, path): - if not path.startswith(self.fs_endpoint_path): - raise Exception("Page path '%s' isn't inside '%s'." % ( - path, self.fs_endpoint_path)) - rel_path = path[len(self.fs_endpoint_path):].lstrip('\\/') - pat = self.PATH_FORMAT % { - 'year': 'YEAR', - 'month': 'MONTH', - 'day': 'DAY', - 'slug': 'SLUG', - 'ext': 'EXT'} - pat = re.escape(pat) - pat = multi_replace(pat, { - 'YEAR': '(\d{4})', - 'MONTH': '(\d{2})', - 'DAY': '(\d{2})', - 'SLUG': '(.*)', - 'EXT': '(.*)'}) - m = re.match(pat, rel_path) - if m is None: - raise Exception("'%s' isn't a proper %s page path." % ( - rel_path, self.SOURCE_NAME)) - return self._makeFactory( - rel_path, - m.group(4), - int(m.group(1)), - int(m.group(2)), - int(m.group(3))) + return FSContentSource.getRelatedContents(self, item, relationship) + + def findContentFromSpec(self, spec): + metadata = self._parseMetadataFromPath(spec) + return ContentItem(spec, metadata) - def findPageFactory(self, metadata, mode): - year = metadata.get('year') - month = metadata.get('month') - day = metadata.get('day') - slug = metadata.get('slug') + def findContentFromRoute(self, route_params): + year = route_params.get('year') + month = route_params.get('month') + day = route_params.get('day') + slug = route_params.get('slug') try: if year is not None: @@ -90,20 +70,18 @@ except ValueError: return None - ext = metadata.get('ext') + ext = route_params.get('ext') if ext is None: if len(self.supported_extensions) == 1: ext = self.supported_extensions[0] - elif mode == MODE_CREATING and self.default_auto_format: - ext = self.default_auto_format replacements = { - 'year': '%04d' % year if year is not None else None, - 'month': '%02d' % month if month is not None else None, - 'day': '%02d' % day if day is not None else None, - 'slug': slug, - 'ext': ext - } + 'year': '%04d' % year if year is not None else None, + 'month': '%02d' % month if month is not None else None, + 'day': '%02d' % day if day is not None else None, + 'slug': slug, + 'ext': ext + } needs_recapture = False if year is None: needs_recapture = True @@ -121,57 +99,115 @@ needs_recapture = True replacements['ext'] = '*' path = os.path.normpath(os.path.join( - self.fs_endpoint_path, self.path_format % replacements)) + self.fs_endpoint_path, self.path_format % replacements)) if needs_recapture: - if mode == MODE_CREATING: - raise ValueError("Not enough information to find a post path.") possible_paths = osutil.glob(path) if len(possible_paths) != 1: return None path = possible_paths[0] - elif mode == MODE_PARSING and not os.path.isfile(path): + elif not os.path.isfile(path): return None - rel_path = os.path.relpath(path, self.fs_endpoint_path) - rel_path = rel_path.replace('\\', '/') - fac_metadata = self._parseMetadataFromPath(rel_path) - return PageFactory(self, rel_path, fac_metadata) + metadata = self._parseMetadataFromPath(path) + return ContentItem(path, metadata) + + def _parseMetadataFromPath(self, path): + regex_repl = { + 'year': '(?P\d{4})', + 'month': '(?P\d{2})', + 'day': '(?P\d{2})', + 'slug': '(?P.*)', + 'ext': '(?P.*)' + } + path_format_re = re.sub(r'([\-\.])', r'\\\1', self.path_format) + pattern = path_format_re % regex_repl + '$' + m = re.search(pattern, path.replace('\\', '/')) + if not m: + raise Exception("Expected to be able to match path with path " + "format: %s" % path) - def getSourceIterator(self): - if self._source_it_cache is None: - it = SimplePaginationSourceMixin.getSourceIterator(self) - self._source_it_cache = list(it) - return self._source_it_cache + year = int(m.group('year')) + month = int(m.group('month')) + day = int(m.group('day')) + timestamp = datetime.date(year, month, day) + metadata = { + 'route_params': { + 'year': year, + 'month': month, + 'day': day, + 'slug': m.group('slug') + }, + 'date': timestamp + } + return metadata + + def getSupportedRouteParameters(self): + return [ + RouteParameter('slug', RouteParameter.TYPE_STRING), + RouteParameter('day', RouteParameter.TYPE_INT2), + RouteParameter('month', RouteParameter.TYPE_INT2), + RouteParameter('year', RouteParameter.TYPE_INT4)] def setupPrepareParser(self, parser, app): parser.add_argument( - '-d', '--date', help="The date of the post, " - "in `year/month/day` format (defaults to today).") + '-d', '--date', + default='today', + help=("The date of the post, in `year/month/day` format " + "(defaults to today).")) parser.add_argument('slug', help="The URL slug for the new post.") - def buildMetadata(self, args): + def createContent(self, args): dt = datetime.date.today() - if args.date: - if args.date == 'today': + date = args.get('date') + if isinstance(date, str): + if date == 'today': pass # Keep the default we had. - elif args.date == 'tomorrow': + elif date == 'tomorrow': dt += datetime.timedelta(days=1) - elif args.date.startswith('+'): + elif date.startswith('+'): try: - dt += datetime.timedelta(days=int(args.date.lstrip('+'))) + dt += datetime.timedelta(days=int(date.lstrip('+'))) except ValueError: raise Exception("Date offsets must be numbers.") else: try: - year, month, day = [int(s) for s in args.date.split('/')] + year, month, day = [int(s) for s in date.split('/')] except ValueError: raise Exception("Dates must be of the form: " "YEAR/MONTH/DAY.") dt = datetime.date(year, month, day) + elif isinstance(date, datetime.datetime): + dt = datetime.date(date.year, date.month, date.day) + else: + try: + dt = datetime.date( + int(args.get('year')), + int(args.get('month')), + int(args.get('day'))) + except ValueError: + raise Exception("Incorrect year/month/day values: %s" % + args) + slug = args.get('slug') + if slug is None: + raise Exception("No slug in args: %s" % args) + slug, ext = os.path.splitext(slug) + if not ext: + ext = self.default_auto_format year, month, day = dt.year, dt.month, dt.day - return {'year': year, 'month': month, 'day': day, 'slug': args.slug} + tokens = { + 'slug': args.get('slug'), + 'ext': ext, + 'year': '%04d' % year, + 'month': '%02d' % month, + 'day': '%02d' % day + } + rel_path = self.path_format % tokens + path = os.path.join(self.fs_endpoint_path, rel_path) + metadata = self._parseMetadataFromPath(path) + metadata['config'] = {'title': uri_to_title(slug)} + return ContentItem(path, metadata) def getInteractiveFields(self): dt = datetime.date.today() @@ -185,96 +221,87 @@ if not os.path.isdir(self.fs_endpoint_path): if self.ignore_missing_dir: return False - raise InvalidFileSystemEndpointError(self.name, self.fs_endpoint_path) + raise InvalidFileSystemEndpointError(self.name, + self.fs_endpoint_path) return True - def _parseMetadataFromPath(self, path): - regex_repl = { - 'year': '(?P\d{4})', - 'month': '(?P\d{2})', - 'day': '(?P\d{2})', - 'slug': '(?P.*)', - 'ext': '(?P.*)' - } - path_format_re = re.sub(r'([\-\.])', r'\\\1', self.path_format) - pattern = path_format_re % regex_repl + '$' - m = re.search(pattern, path.replace('\\', '/')) - if not m: - raise Exception("Expected to be able to match path with path " - "format: %s" % path) - - year = int(m.group('year')) - month = int(m.group('month')) - day = int(m.group('day')) + def _makeContentItem(self, rel_path, slug, year, month, day): + path = os.path.join(self.fs_endpoint_path, rel_path) timestamp = datetime.date(year, month, day) metadata = { - 'year': year, - 'month': month, - 'day': day, - 'slug': m.group('slug'), - 'date': timestamp - } - return metadata - - def _makeFactory(self, path, slug, year, month, day): - path = path.replace('\\', '/') - timestamp = datetime.date(year, month, day) - metadata = { + 'route_params': { 'slug': slug, 'year': year, 'month': month, - 'day': day, - 'date': timestamp} - return PageFactory(self, path, metadata) + 'day': day}, + 'date': timestamp + } + + _, ext = os.path.splitext(path) + if ext: + fmt = self.auto_formats.get(ext.lstrip('.')) + if fmt: + metadata['config'] = {'format': fmt} + + return ContentItem(path, metadata) class FlatPostsSource(PostsSource): SOURCE_NAME = 'posts/flat' PATH_FORMAT = '%(year)s-%(month)s-%(day)s_%(slug)s.%(ext)s' + PATTERN = re.compile(r'(\d{4})-(\d{2})-(\d{2})_(.*)\.(\w+)$') def __init__(self, app, name, config): - super(FlatPostsSource, self).__init__(app, name, config) + super().__init__(app, name, config) - def buildPageFactories(self): - if not self._checkFsEndpointPath(): - return - logger.debug("Scanning for posts (flat) in: %s" % self.fs_endpoint_path) - pattern = re.compile(r'(\d{4})-(\d{2})-(\d{2})_(.*)\.(\w+)$') + def getContents(self, group): + if not self._checkFSEndpoint(): + return None + + logger.debug("Scanning for posts (flat) in: %s" % + self.fs_endpoint_path) + pattern = FlatPostsSource.PATTERN _, __, filenames = next(osutil.walk(self.fs_endpoint_path)) for f in filenames: match = pattern.match(f) if match is None: name, ext = os.path.splitext(f) - logger.warning("'%s' is not formatted as 'YYYY-MM-DD_slug-title.%s' " - "and will be ignored. Is that a typo?" % (f, ext)) + logger.warning( + "'%s' is not formatted as 'YYYY-MM-DD_slug-title.%s' " + "and will be ignored. Is that a typo?" % (f, ext)) continue - yield self._makeFactory( - f, - match.group(4), - int(match.group(1)), - int(match.group(2)), - int(match.group(3))) + yield self._makeContentItem( + f, + match.group(4), + int(match.group(1)), + int(match.group(2)), + int(match.group(3))) class ShallowPostsSource(PostsSource): SOURCE_NAME = 'posts/shallow' PATH_FORMAT = '%(year)s/%(month)s-%(day)s_%(slug)s.%(ext)s' + YEAR_PATTERN = re.compile(r'(\d{4})$') + FILE_PATTERN = re.compile(r'(\d{2})-(\d{2})_(.*)\.(\w+)$') def __init__(self, app, name, config): super(ShallowPostsSource, self).__init__(app, name, config) - def buildPageFactories(self): + def getContents(self, group): if not self._checkFsEndpointPath(): return - logger.debug("Scanning for posts (shallow) in: %s" % self.fs_endpoint_path) - year_pattern = re.compile(r'(\d{4})$') - file_pattern = re.compile(r'(\d{2})-(\d{2})_(.*)\.(\w+)$') + + logger.debug("Scanning for posts (shallow) in: %s" % + self.fs_endpoint_path) + year_pattern = ShallowPostsSource.YEAR_PATTERN + file_pattern = ShallowPostsSource.FILE_PATTERN _, year_dirs, __ = next(osutil.walk(self.fs_endpoint_path)) year_dirs = [d for d in year_dirs if year_pattern.match(d)] for yd in year_dirs: if year_pattern.match(yd) is None: - logger.warning("'%s' is not formatted as 'YYYY' and will be ignored. " - "Is that a typo?") + logger.warning( + "'%s' is not formatted as 'YYYY' and will be ignored. " + "Is that a typo?") continue year = int(yd) year_dir = os.path.join(self.fs_endpoint_path, yd) @@ -284,31 +311,37 @@ match = file_pattern.match(f) if match is None: name, ext = os.path.splitext(f) - logger.warning("'%s' is not formatted as 'MM-DD_slug-title.%s' " - "and will be ignored. Is that a typo?" % (f, ext)) + logger.warning( + "'%s' is not formatted as 'MM-DD_slug-title.%s' " + "and will be ignored. Is that a typo?" % (f, ext)) continue - yield self._makeFactory( - os.path.join(yd, f), - match.group(3), - year, - int(match.group(1)), - int(match.group(2))) + yield self._makeContentItem( + os.path.join(yd, f), + match.group(3), + year, + int(match.group(1)), + int(match.group(2))) class HierarchyPostsSource(PostsSource): SOURCE_NAME = 'posts/hierarchy' PATH_FORMAT = '%(year)s/%(month)s/%(day)s_%(slug)s.%(ext)s' + YEAR_PATTERN = re.compile(r'(\d{4})$') + MONTH_PATTERN = re.compile(r'(\d{2})$') + FILE_PATTERN = re.compile(r'(\d{2})_(.*)\.(\w+)$') def __init__(self, app, name, config): super(HierarchyPostsSource, self).__init__(app, name, config) - def buildPageFactories(self): + def getContents(self, group): if not self._checkFsEndpointPath(): return - logger.debug("Scanning for posts (hierarchy) in: %s" % self.fs_endpoint_path) - year_pattern = re.compile(r'(\d{4})$') - month_pattern = re.compile(r'(\d{2})$') - file_pattern = re.compile(r'(\d{2})_(.*)\.(\w+)$') + + logger.debug("Scanning for posts (hierarchy) in: %s" % + self.fs_endpoint_path) + year_pattern = HierarchyPostsSource.YEAR_PATTERN + month_pattern = HierarchyPostsSource.MONTH_PATTERN + file_pattern = HierarchyPostsSource.FILE_PATTERN _, year_dirs, __ = next(osutil.walk(self.fs_endpoint_path)) year_dirs = [d for d in year_dirs if year_pattern.match(d)] for yd in year_dirs: @@ -326,14 +359,15 @@ match = file_pattern.match(f) if match is None: name, ext = os.path.splitext(f) - logger.warning("'%s' is not formatted as 'DD_slug-title.%s' " - "and will be ignored. Is that a typo?" % (f, ext)) + logger.warning( + "'%s' is not formatted as 'DD_slug-title.%s' " + "and will be ignored. Is that a typo?" % (f, ext)) continue rel_name = os.path.join(yd, md, f) - yield self._makeFactory( - rel_name, - match.group(2), - year, - month, - int(match.group(1))) + yield self._makeContentItem( + rel_name, + match.group(2), + year, + month, + int(match.group(1))) diff -r 7a1903ede496 -r 2e5c5d33d62c piecrust/sources/prose.py --- a/piecrust/sources/prose.py Tue Nov 21 11:00:06 2017 -0800 +++ b/piecrust/sources/prose.py Tue Nov 21 22:07:12 2017 -0800 @@ -1,33 +1,28 @@ -import os -import os.path import copy import logging -from piecrust.sources.base import MODE_CREATING, MODE_PARSING -from piecrust.sources.default import DefaultPageSource +from piecrust.sources.default import DefaultContentSource logger = logging.getLogger(__name__) -class ProseSource(DefaultPageSource): +class ProseSource(DefaultContentSource): SOURCE_NAME = 'prose' def __init__(self, app, name, config): - super(ProseSource, self).__init__(app, name, config) + super().__init__(app, name, config) self.config_recipe = config.get('config', {}) - def _populateMetadata(self, rel_path, metadata, mode=None): - metadata['config'] = self._makeConfig(rel_path, mode) + def _doCreateItemMetadata(self, path): + metadata = super()._doCreateItemMetadata(path) + config = metadata.setdefault('config', {}) + config.update(self._makeConfig(path)) + return metadata - def _makeConfig(self, rel_path, mode): + def _makeConfig(self, path): c = copy.deepcopy(self.config_recipe) - if c.get('title') == '%first_line%' and mode != MODE_CREATING: - path = os.path.join(self.fs_endpoint_path, rel_path) - try: - c['title'] = get_first_line(path) - except IOError: - if mode == MODE_PARSING: - raise + if c.get('title') == '%first_line%': + c['title'] = get_first_line(path) return c diff -r 7a1903ede496 -r 2e5c5d33d62c piecrust/sources/taxonomy.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/piecrust/sources/taxonomy.py Tue Nov 21 22:07:12 2017 -0800 @@ -0,0 +1,530 @@ +import re +import copy +import logging +import unidecode +from piecrust.configuration import ConfigurationError +from piecrust.data.filters import ( + PaginationFilter, SettingFilterClause) +from piecrust.page import Page +from piecrust.pipelines._pagebaker import PageBaker +from piecrust.pipelines._pagerecords import ( + PagePipelineRecordEntry, + add_page_job_result, merge_job_result_into_record_entry) +from piecrust.pipelines.base import ( + ContentPipeline, get_record_name_for_source, + create_job, content_item_from_job) +from piecrust.pipelines.records import RecordHistory +from piecrust.routing import RouteParameter +from piecrust.sources.base import ContentItem +from piecrust.sources.generator import GeneratorSourceBase + + +logger = logging.getLogger(__name__) + + +SLUGIFY_ENCODE = 1 +SLUGIFY_TRANSLITERATE = 2 +SLUGIFY_LOWERCASE = 4 +SLUGIFY_DOT_TO_DASH = 8 +SLUGIFY_SPACE_TO_DASH = 16 + + +re_first_dot_to_dash = re.compile(r'^\.+') +re_dot_to_dash = re.compile(r'\.+') +re_space_to_dash = re.compile(r'\s+') + + +class Taxonomy(object): + """ Describes a taxonomy. + """ + def __init__(self, name, config): + self.name = name + self.config = config + self.term_name = config.get('term', name) + self.is_multiple = bool(config.get('multiple', False)) + self.separator = config.get('separator', '/') + self.page_ref = config.get('page') + + @property + def setting_name(self): + if self.is_multiple: + return self.name + return self.term_name + + +_taxonomy_index = """--- +layout: %(template)s +--- +""" + + +class TaxonomySource(GeneratorSourceBase): + """ A content source that generates taxonomy listing pages. + """ + SOURCE_NAME = 'taxonomy' + DEFAULT_PIPELINE_NAME = 'taxonomy' + + def __init__(self, app, name, config): + super().__init__(app, name, config) + + tax_name = config.get('taxonomy') + if tax_name is None: + raise ConfigurationError( + "Taxonomy source '%s' requires a taxonomy name." % name) + self.taxonomy = _get_taxonomy(app, tax_name) + + sm = config.get('slugify_mode') + self.slugifier = _get_slugifier(app, self.taxonomy, sm) + + tpl_name = config.get('template', '_%s.html' % tax_name) + self._raw_item = _taxonomy_index % {'template': tpl_name} + + def getSupportedRouteParameters(self): + name = self.taxonomy.term_name + param_type = (RouteParameter.TYPE_PATH if self.taxonomy.is_multiple + else RouteParameter.TYPE_STRING) + return [RouteParameter(name, param_type, + variadic=self.taxonomy.is_multiple)] + + def findContentFromRoute(self, route_params): + slugified_term = route_params[self.taxonomy.term_name] + spec = '_index' + metadata = {'term': slugified_term, + 'route_params': { + self.taxonomy.term_name: slugified_term} + } + return ContentItem(spec, metadata) + + def slugify(self, term): + return self.slugifier.slugify(term) + + def slugifyMultiple(self, terms): + return self.slugifier.slugifyMultiple(terms) + + def prepareRenderContext(self, ctx): + # Set the pagination source as the source we're generating for. + ctx.pagination_source = self.inner_source + + # Get the taxonomy terms from the route metadata... this can come from + # the browser's URL (while serving) or from the baking (see `bake` + # method below). In both cases, we expect to have the *slugified* + # version of the term, because we're going to set a filter that also + # slugifies the terms found on each page. + # + # This is because: + # * while serving, we get everything from the request URL, so we only + # have the slugified version. + # * if 2 slightly different terms "collide" into the same slugified + # term, we'll get a merge of the 2 on the listing page, which is + # what the user expects. + # + route_params = ctx.page.source_metadata['route_params'] + tax_terms, is_combination = self._getTaxonomyTerms(route_params) + self._setTaxonomyFilter(ctx, tax_terms, is_combination) + + # Add some custom data for rendering. + ctx.custom_data.update({ + self.taxonomy.term_name: tax_terms, + 'is_multiple_%s' % self.taxonomy.term_name: is_combination}) + # Add some "plural" version of the term... so for instance, if this + # is the "tags" taxonomy, "tag" will have one term most of the time, + # except when it's a combination. Here, we add "tags" as something that + # is always a tuple, even when it's not a combination. + if (self.taxonomy.is_multiple and + self.taxonomy.name != self.taxonomy.term_name): + mult_val = tax_terms + if not is_combination: + mult_val = (mult_val,) + ctx.custom_data[self.taxonomy.name] = mult_val + + def _getTaxonomyTerms(self, route_params): + # Get the individual slugified terms from the route metadata. + all_values = route_params.get(self.taxonomy.term_name) + if all_values is None: + raise Exception("'%s' values couldn't be found in route metadata" % + self.taxonomy.term_name) + + # If it's a "multiple" taxonomy, we need to potentially split the + # route value into the individual terms (_e.g._ when listing all pages + # that have 2 given tags, we need to get each of those 2 tags). + if self.taxonomy.is_multiple: + sep = self.taxonomy.separator + if sep in all_values: + return tuple(all_values.split(sep)), True + # Not a "multiple" taxonomy, so there's only the one value. + return all_values, False + + def _setTaxonomyFilter(self, ctx, term_value, is_combination): + # Set up the filter that will check the pages' terms. + flt = PaginationFilter() + flt.addClause(HasTaxonomyTermsFilterClause( + self.taxonomy, self.slugifier.mode, term_value, is_combination)) + ctx.pagination_filter = flt + + def onRouteFunctionUsed(self, route_params): + # Get the values, and slugify them appropriately. + # If this is a "multiple" taxonomy, `values` will be a tuple of + # terms. If not, `values` will just be a term. + values = route_params[self.taxonomy.term_name] + tax_is_multiple = self.taxonomy.is_multiple + if tax_is_multiple: + slugified_values = self.slugifyMultiple((str(v) for v in values)) + route_val = self.taxonomy.separator.join(slugified_values) + else: + slugified_values = self.slugify(str(values)) + route_val = slugified_values + + # We need to register this use of a taxonomy term. + # Because the render info gets serialized across bake worker + # processes, we can only use basic JSON-able structures, which + # excludes `set`... hence the awkward use of `list`. + # Also, note that the tuples we're putting in there will be + # transformed into lists so we'll have to convert back. + rcs = self.app.env.render_ctx_stack + ri = rcs.current_ctx.render_info + utt = ri.get('used_taxonomy_terms') + if utt is None: + ri['used_taxonomy_terms'] = [slugified_values] + else: + if slugified_values not in utt: + utt.append(slugified_values) + + # Put the slugified values in the route metadata so they're used to + # generate the URL. + route_params[self.taxonomy.term_name] = route_val + + +class HasTaxonomyTermsFilterClause(SettingFilterClause): + def __init__(self, taxonomy, slugify_mode, value, is_combination): + super().__init__(taxonomy.setting_name, value) + self._taxonomy = taxonomy + self._is_combination = is_combination + self._slugifier = _Slugifier(taxonomy, slugify_mode) + if taxonomy.is_multiple: + self.pageMatches = self._pageMatchesAny + else: + self.pageMatches = self._pageMatchesSingle + + def _pageMatchesAny(self, fil, page): + # Multiple taxonomy, i.e. it supports multiple terms, like tags. + page_values = page.config.get(self.name) + if page_values is None or not isinstance(page_values, list): + return False + + page_set = set(map(self._slugifier.slugify, page_values)) + if self._is_combination: + # Multiple taxonomy, and multiple terms to match. Check that + # the ones to match are all in the page's terms. + value_set = set(self.value) + return value_set.issubset(page_set) + else: + # Multiple taxonomy, one term to match. + return self.value in page_set + + def _pageMatchesSingle(self, fil, page): + # Single taxonomy. Just compare the values. + page_value = page.config.get(self.name) + if page_value is None: + return False + page_value = self._slugifier.slugify(page_value) + return page_value == self.value + + +def _get_taxonomy(app, tax_name): + tax_config = app.config.get('site/taxonomies/' + tax_name) + if tax_config is None: + raise ConfigurationError("No such taxonomy: %s" % tax_name) + return Taxonomy(tax_name, tax_config) + + +def _get_slugifier(app, taxonomy, slugify_mode=None): + if slugify_mode is None: + slugify_mode = app.config.get('site/slugify_mode', 'encode') + sm = _parse_slugify_mode(slugify_mode) + return _Slugifier(taxonomy, sm) + + +class TaxonomyPipelineRecordEntry(PagePipelineRecordEntry): + def __init__(self): + super().__init__() + self.term = None + + +class TaxonomyPipeline(ContentPipeline): + PIPELINE_NAME = 'taxonomy' + PASS_NUM = 10 + RECORD_ENTRY_CLASS = TaxonomyPipelineRecordEntry + + def __init__(self, source, ctx): + if not isinstance(source, TaxonomySource): + raise Exception("The taxonomy pipeline only supports taxonomy " + "content sources.") + + super().__init__(source, ctx) + self.inner_source = source.inner_source + self.taxonomy = source.taxonomy + self.slugifier = source.slugifier + self._tpl_name = source.config['template'] + self._analyzer = None + self._pagebaker = None + + def initialize(self): + self._pagebaker = PageBaker(self.app, + self.ctx.out_dir, + force=self.ctx.force) + self._pagebaker.startWriterQueue() + + def shutdown(self): + self._pagebaker.stopWriterQueue() + + def createJobs(self, ctx): + logger.debug("Caching template page for taxonomy '%s'." % + self.taxonomy.name) + page = self.app.getPage(self.source, ContentItem('_index', {})) + page._load() + + logger.debug("Building '%s' taxonomy pages for source: %s" % + (self.taxonomy.name, self.inner_source.name)) + self._analyzer = _TaxonomyTermsAnalyzer(self, ctx.record_histories) + self._analyzer.analyze() + + logger.debug("Queuing %d '%s' jobs." % + (len(self._analyzer.dirty_slugified_terms), + self.taxonomy.name)) + jobs = [] + rec_fac = self.createRecordEntry + current_record = ctx.current_record + + for slugified_term in self._analyzer.dirty_slugified_terms: + item_spec = '_index' + record_entry_spec = '_index[%s]' % slugified_term + + jobs.append(create_job(self, item_spec, + term=slugified_term, + record_entry_spec=record_entry_spec)) + + entry = rec_fac(record_entry_spec) + current_record.addEntry(entry) + + if len(jobs) > 0: + return jobs + return None + + def run(self, job, ctx, result): + term = job['term'] + content_item = ContentItem('_index', + {'term': term, + 'route_params': { + self.taxonomy.term_name: term} + }) + page = Page(self.source, content_item) + + logger.debug("Rendering '%s' page: %s" % + (self.taxonomy.name, page.source_metadata['term'])) + prev_entry = ctx.previous_entry + rdr_subs = self._pagebaker.bake(page, prev_entry) + + add_page_job_result(result) + result['subs'] = rdr_subs + result['term'] = page.source_metadata['term'] + + def handleJobResult(self, result, ctx): + existing = ctx.record_entry + merge_job_result_into_record_entry(existing, result) + existing.term = result['term'] + + def postJobRun(self, ctx): + # We create bake entries for all the terms that were *not* dirty. + # This is because otherwise, on the next incremental bake, we wouldn't + # find any entry for those things, and figure that we need to delete + # their outputs. + analyzer = self._analyzer + record = ctx.record_history.current + for prev, cur in ctx.record_history.diffs: + # Only consider entries that don't have any current version + # (i.e. they weren't baked just now). + if prev and not cur: + t = prev.term + if analyzer.isKnownSlugifiedTerm(t): + logger.debug("Creating unbaked entry for '%s' term: %s" % + (self.taxonomy.name, t)) + cur = copy.deepcopy(prev) + cur.flags = \ + PagePipelineRecordEntry.FLAG_COLLAPSED_FROM_LAST_RUN + record.addEntry(cur) + else: + logger.debug("Term '%s' in '%s' isn't used anymore." % + (t, self.taxonomy.name)) + + +class _TaxonomyTermsAnalyzer(object): + def __init__(self, pipeline, record_histories): + self.pipeline = pipeline + self.record_histories = record_histories + self._all_terms = {} + self._single_dirty_slugified_terms = set() + self._all_dirty_slugified_terms = None + + @property + def dirty_slugified_terms(self): + """ Returns the slugified terms that have been 'dirtied' during + this bake. + """ + return self._all_dirty_slugified_terms + + def isKnownSlugifiedTerm(self, term): + """ Returns whether the given slugified term has been seen during + this bake. + """ + return term in self._all_terms + + def analyze(self): + # Build the list of terms for our taxonomy, and figure out which ones + # are 'dirty' for the current bake. + # + # Remember all terms used. + source = self.pipeline.inner_source + taxonomy = self.pipeline.taxonomy + slugifier = self.pipeline.slugifier + + record_name = get_record_name_for_source(source) + current_records = self.record_histories.current + cur_rec = current_records.getRecord(record_name) + for cur_entry in cur_rec.getEntries(): + if not cur_entry.was_overriden: + cur_terms = cur_entry.config.get(taxonomy.setting_name) + if cur_terms: + if not taxonomy.is_multiple: + self._addTerm( + slugifier, cur_entry.item_spec, cur_terms) + else: + self._addTerms( + slugifier, cur_entry.item_spec, cur_terms) + + # Re-bake all taxonomy terms that include new or changed pages, by + # marking them as 'dirty'. + history = self.record_histories.getHistory(record_name).copy() + history.build() + for prev_entry, cur_entry in history.diffs: + entries = [cur_entry] + if prev_entry: + entries.append(prev_entry) + + for e in entries: + if e and e.was_any_sub_baked: + entry_terms = e.config.get(taxonomy.setting_name) + if entry_terms: + if not taxonomy.is_multiple: + self._single_dirty_slugified_terms.add( + slugifier.slugify(entry_terms)) + else: + self._single_dirty_slugified_terms.update( + (slugifier.slugify(t) + for t in entry_terms)) + + self._all_dirty_slugified_terms = list( + self._single_dirty_slugified_terms) + logger.debug("Gathered %d dirty taxonomy terms", + len(self._all_dirty_slugified_terms)) + + # Re-bake the combination pages for terms that are 'dirty'. + # We make all terms into tuple, even those that are not actual + # combinations, so that we have less things to test further down the + # line. + # + # Add the combinations to that list. We get those combinations from + # wherever combinations were used, so they're coming from the + # `onRouteFunctionUsed` method. And because combinations can be used + # by any page in the website (anywhere someone can ask for an URL + # to the combination page), it means we check all the records, not + # just the record for our source. + if taxonomy.is_multiple: + known_combinations = set() + for rec in current_records.records: + # Cheap way to test if a record contains entries that + # are sub-types of a page entry: test the first one. + first_entry = next(iter(rec.getEntries()), None) + if (first_entry is None or + not isinstance(first_entry, PagePipelineRecordEntry)): + continue + + for cur_entry in rec.getEntries(): + used_terms = _get_all_entry_taxonomy_terms(cur_entry) + for terms in used_terms: + if len(terms) > 1: + known_combinations.add(terms) + + dcc = 0 + for terms in known_combinations: + if not self._single_dirty_slugified_terms.isdisjoint( + set(terms)): + self._all_dirty_slugified_terms.append( + taxonomy.separator.join(terms)) + dcc += 1 + logger.debug("Gathered %d term combinations, with %d dirty." % + (len(known_combinations), dcc)) + + def _addTerms(self, slugifier, item_spec, terms): + for t in terms: + self._addTerm(slugifier, item_spec, t) + + def _addTerm(self, slugifier, item_spec, term): + st = slugifier.slugify(term) + orig_terms = self._all_terms.setdefault(st, []) + if orig_terms and orig_terms[0] != term: + logger.warning( + "Term '%s' in '%s' is slugified to '%s' which conflicts with " + "previously existing '%s'. The two will be merged." % + (term, item_spec, st, orig_terms[0])) + orig_terms.append(term) + + +def _get_all_entry_taxonomy_terms(entry): + res = set() + for o in entry.subs: + pinfo = o['render_info'] + terms = pinfo.get('used_taxonomy_terms') + if terms: + res |= set([tuple(t) for t in terms]) + return res + + +class _Slugifier(object): + def __init__(self, taxonomy, mode): + self.taxonomy = taxonomy + self.mode = mode + + def slugifyMultiple(self, terms): + return tuple(map(self.slugify, terms)) + + def slugify(self, term): + if self.mode & SLUGIFY_TRANSLITERATE: + term = unidecode.unidecode(term) + if self.mode & SLUGIFY_LOWERCASE: + term = term.lower() + if self.mode & SLUGIFY_DOT_TO_DASH: + term = re_first_dot_to_dash.sub('', term) + term = re_dot_to_dash.sub('-', term) + if self.mode & SLUGIFY_SPACE_TO_DASH: + term = re_space_to_dash.sub('-', term) + return term + + +def _parse_slugify_mode(value): + mapping = { + 'encode': SLUGIFY_ENCODE, + 'transliterate': SLUGIFY_TRANSLITERATE, + 'lowercase': SLUGIFY_LOWERCASE, + 'dot_to_dash': SLUGIFY_DOT_TO_DASH, + 'space_to_dash': SLUGIFY_SPACE_TO_DASH} + mode = 0 + for v in value.split(','): + f = mapping.get(v.strip()) + if f is None: + if v == 'iconv': + raise Exception("'iconv' is not supported as a slugify mode " + "in PieCrust2. Use 'transliterate'.") + raise Exception("Unknown slugify flag: %s" % v) + mode |= f + return mode + diff -r 7a1903ede496 -r 2e5c5d33d62c piecrust/templating/base.py --- a/piecrust/templating/base.py Tue Nov 21 11:00:06 2017 -0800 +++ b/piecrust/templating/base.py Tue Nov 21 22:07:12 2017 -0800 @@ -27,7 +27,7 @@ def initialize(self, app): self.app = app - def renderSegmentPart(self, path, seg_part, data): + def renderSegment(self, path, segment, data): raise NotImplementedError() def renderFile(self, paths, data): diff -r 7a1903ede496 -r 2e5c5d33d62c piecrust/templating/jinja/environment.py --- a/piecrust/templating/jinja/environment.py Tue Nov 21 11:00:06 2017 -0800 +++ b/piecrust/templating/jinja/environment.py Tue Nov 21 22:07:12 2017 -0800 @@ -2,6 +2,7 @@ import time import email.utils import hashlib +import logging import strict_rfc3339 from jinja2 import Environment from .extensions import get_highlight_css @@ -10,18 +11,23 @@ from piecrust.uriutil import multi_replace +logger = logging.getLogger(__name__) + + class PieCrustEnvironment(Environment): def __init__(self, app, *args, **kwargs): self.app = app # Before we create the base Environement, let's figure out the options # we want to pass to it. - twig_compatibility_mode = app.config.get('jinja/twig_compatibility') - + # # Disable auto-reload when we're baking. if app.config.get('baker/is_baking'): kwargs.setdefault('auto_reload', False) + # Don't unload templates from the cache. + kwargs.setdefault('cache_size', -1) + # Let the user override most Jinja options via the site config. for name in ['block_start_string', 'block_end_string', 'variable_start_string', 'variable_end_string', @@ -33,9 +39,15 @@ if val is not None: kwargs.setdefault(name, val) - # Twig trims blocks. - if twig_compatibility_mode is True: - kwargs['trim_blocks'] = True + # Undefined behaviour. + undef = app.config.get('jinja/undefined') + if undef == 'logging': + from jinja2 import make_logging_undefined + kwargs.setdefault('undefined', + make_logging_undefined(logger)) + elif undef == 'strict': + from jinja2 import StrictUndefined + kwargs.setdefault('undefined', StrictUndefined) # All good! Create the Environment. super(PieCrustEnvironment, self).__init__(*args, **kwargs) @@ -64,18 +76,14 @@ 'emaildate': get_email_date, 'date': get_date}) - # Backwards compatibility with Twig. - if twig_compatibility_mode is True: - self.filters['raw'] = self.filters['safe'] - self.globals['pcfail'] = raise_exception + self.filters['raw'] = self.filters['safe'] def _paginate(self, value, items_per_page=5): - cpi = self.app.env.exec_info_stack.current_page_info - if cpi is None or cpi.page is None or cpi.render_ctx is None: + ctx = self.app.env.render_ctx_stack.current_ctx + if ctx is None or ctx.page is None: raise Exception("Can't paginate when no page has been pushed " "on the execution stack.") - return Paginator(cpi.page, value, - page_num=cpi.render_ctx.page_num, + return Paginator(value, ctx.page, ctx.sub_num, items_per_page=items_per_page) def _formatWith(self, value, format_name): diff -r 7a1903ede496 -r 2e5c5d33d62c piecrust/templating/jinja/extensions.py --- a/piecrust/templating/jinja/extensions.py Tue Nov 21 11:00:06 2017 -0800 +++ b/piecrust/templating/jinja/extensions.py Tue Nov 21 22:07:12 2017 -0800 @@ -2,9 +2,6 @@ from jinja2.lexer import Token, describe_token from jinja2.nodes import CallBlock, Const from compressinja.html import HtmlCompressor, StreamProcessContext -from pygments import highlight -from pygments.formatters import HtmlFormatter -from pygments.lexers import get_lexer_by_name, guess_lexer from piecrust.rendering import format_text @@ -18,9 +15,14 @@ lineno = next(parser.stream).lineno args = [parser.parse_expression()] body = parser.parse_statements(['name:endpcformat'], drop_needle=True) - return CallBlock(self.call_method('_format', args), + return CallBlock(self.call_method('_formatTimed', args), [], [], body).set_lineno(lineno) + def _formatTimed(self, format_name, caller=None): + with self.environment.app.env.stats.timerScope( + 'JinjaTemplateEngine_extensions'): + return self._format(format_name, caller) + def _format(self, format_name, caller=None): body = caller() text = format_text(self.environment.app, @@ -60,11 +62,22 @@ body = parser.parse_statements(['name:endhighlight', 'name:endgeshi'], drop_needle=True) - return CallBlock(self.call_method('_highlight', args, kwargs), + return CallBlock(self.call_method('_highlightTimed', args, kwargs), [], [], body).set_lineno(lineno) + def _highlightTimed(self, lang, line_numbers=False, use_classes=False, + css_class=None, css_id=None, caller=None): + with self.environment.app.env.stats.timerScope( + 'JinjaTemplateEngine_extensions'): + return self._highlight(lang, line_numbers, use_classes, + css_class, css_id, caller) + def _highlight(self, lang, line_numbers=False, use_classes=False, css_class=None, css_id=None, caller=None): + from pygments import highlight + from pygments.formatters import HtmlFormatter + from pygments.lexers import get_lexer_by_name, guess_lexer + # Try to be mostly compatible with Jinja2-highlight's settings. body = caller() @@ -90,6 +103,7 @@ def get_highlight_css(style_name='default', class_name='.highlight'): + from pygments.formatters import HtmlFormatter return HtmlFormatter(style=style_name).get_style_defs(class_name) @@ -118,34 +132,34 @@ body = parser.parse_statements(['name:endpccache', 'name:endcache'], drop_needle=True) - # now return a `CallBlock` node that calls our _cache_support + # now return a `CallBlock` node that calls our _renderCache # helper method on this extension. - return CallBlock(self.call_method('_cache_support', args), + return CallBlock(self.call_method('_renderCacheTimed', args), [], [], body).set_lineno(lineno) - def _cache_support(self, name, caller): + def _renderCacheTimed(self, name, caller): + with self.environment.app.env.stats.timerScope( + 'JinjaTemplateEngine_extensions'): + return self._renderCache(name, caller) + + def _renderCache(self, name, caller): key = self.environment.piecrust_cache_prefix + name - exc_stack = self.environment.app.env.exec_info_stack - render_ctx = exc_stack.current_page_info.render_ctx - rdr_pass = render_ctx.current_pass_info + rcs = self.environment.app.env.render_ctx_stack + ctx = rcs.current_ctx # try to load the block from the cache # if there is no fragment in the cache, render it and store # it in the cache. pair = self.environment.piecrust_cache.get(key) if pair is not None: - rdr_pass.used_source_names.update(pair[1]) + for usn in pair[1]: + ctx.addUsedSource(usn) return pair[0] - pair = self.environment.piecrust_cache.get(key) - if pair is not None: - rdr_pass.used_source_names.update(pair[1]) - return pair[0] - - prev_used = rdr_pass.used_source_names.copy() + prev_used = set(ctx.current_used_source_names) rv = caller() - after_used = rdr_pass.used_source_names.copy() + after_used = set(ctx.current_used_source_names) used_delta = after_used.difference(prev_used) self.environment.piecrust_cache[key] = (rv, used_delta) return rv diff -r 7a1903ede496 -r 2e5c5d33d62c piecrust/templating/jinja/loader.py --- a/piecrust/templating/jinja/loader.py Tue Nov 21 11:00:06 2017 -0800 +++ b/piecrust/templating/jinja/loader.py Tue Nov 21 22:07:12 2017 -0800 @@ -5,11 +5,11 @@ class PieCrustLoader(FileSystemLoader): def __init__(self, searchpath, encoding='utf-8'): super(PieCrustLoader, self).__init__(searchpath, encoding) - self.segment_parts_cache = {} + self.segments_cache = {} def get_source(self, environment, template): - if template.startswith('$part='): - filename, seg_part = self.segment_parts_cache[template] + if template.startswith('$seg='): + filename, seg_content = self.segments_cache[template] mtime = os.path.getmtime(filename) @@ -19,6 +19,6 @@ except OSError: return False - return seg_part, filename, uptodate + return seg_content, filename, uptodate return super(PieCrustLoader, self).get_source(environment, template) diff -r 7a1903ede496 -r 2e5c5d33d62c piecrust/templating/jinjaengine.py --- a/piecrust/templating/jinjaengine.py Tue Nov 21 11:00:06 2017 -0800 +++ b/piecrust/templating/jinjaengine.py Tue Nov 21 22:07:12 2017 -0800 @@ -1,6 +1,6 @@ import os.path import logging -from piecrust.environment import AbortedSourceUseError +from piecrust.sources.base import AbortedSourceUseError from piecrust.templating.base import (TemplateEngine, TemplateNotFoundError, TemplatingError) @@ -9,33 +9,32 @@ class JinjaTemplateEngine(TemplateEngine): - # Name `twig` is for backwards compatibility with PieCrust 1.x. - ENGINE_NAMES = ['jinja', 'jinja2', 'j2', 'twig'] - EXTENSIONS = ['html', 'jinja', 'jinja2', 'j2', 'twig'] + ENGINE_NAMES = ['jinja', 'jinja2', 'j2'] + EXTENSIONS = ['html', 'jinja', 'jinja2', 'j2'] def __init__(self): self.env = None self._jinja_syntax_error = None self._jinja_not_found = None - def renderSegmentPart(self, path, seg_part, data): + def renderSegment(self, path, segment, data): + if not _string_needs_render(segment.content): + return segment.content, False + self._ensureLoaded() - if not _string_needs_render(seg_part.content): - return seg_part.content - - part_path = _make_segment_part_path(path, seg_part.offset) - self.env.loader.segment_parts_cache[part_path] = ( - path, seg_part.content) + seg_path = _make_segment_path(path, segment.offset) + self.env.loader.segments_cache[seg_path] = ( + path, segment.content) try: - tpl = self.env.get_template(part_path) + tpl = self.env.get_template(seg_path) except self._jinja_syntax_error as tse: raise self._getTemplatingError(tse, filename=path) except self._jinja_not_found: raise TemplateNotFoundError() try: - return tpl.render(data) + return tpl.render(data), True except self._jinja_syntax_error as tse: raise self._getTemplatingError(tse) except AbortedSourceUseError: @@ -49,20 +48,12 @@ def renderFile(self, paths, data): self._ensureLoaded() - tpl = None - logger.debug("Looking for template: %s" % paths) - rendered_path = None - for p in paths: - try: - tpl = self.env.get_template(p) - rendered_path = p - break - except self._jinja_syntax_error as tse: - raise self._getTemplatingError(tse) - except self._jinja_not_found: - pass - if tpl is None: + try: + tpl = self.env.select_template(paths) + except self._jinja_syntax_error as tse: + raise self._getTemplatingError(tse) + except self._jinja_not_found: raise TemplateNotFoundError() try: @@ -72,9 +63,11 @@ except AbortedSourceUseError: raise except Exception as ex: + if self.app.debug: + raise msg = "Error rendering Jinja markup" - rel_path = os.path.relpath(rendered_path, self.app.root_dir) - raise TemplatingError(msg, rel_path) from ex + name = getattr(tpl, 'name', '') + raise TemplatingError(msg, name) from ex def _getTemplatingError(self, tse, filename=None): filename = tse.filename or filename @@ -84,21 +77,27 @@ raise err from tse def _ensureLoaded(self): - if self.env: + if self.env is not None: return + stats = self.app.env.stats + stats.registerTimer('JinjaTemplateEngine_setup', + raise_if_registered=False) + stats.registerTimer('JinjaTemplateEngine_extensions', + raise_if_registered=False) + with stats.timerScope('JinjaTemplateEngine_setup'): + self._load() + + def _load(self): + get_config = self.app.config.get + # Get the list of extensions to load. - ext_names = self.app.config.get('jinja/extensions', []) + ext_names = get_config('jinja/extensions', []) if not isinstance(ext_names, list): ext_names = [ext_names] # Turn on autoescape by default. - autoescape = self.app.config.get('twig/auto_escape') - if autoescape is not None: - logger.warning("The `twig/auto_escape` setting is now called " - "`jinja/auto_escape`.") - else: - autoescape = self.app.config.get('jinja/auto_escape', True) + autoescape = get_config('jinja/auto_escape', True) if autoescape: ext_names.append('autoescape') @@ -146,7 +145,6 @@ return False -def _make_segment_part_path(path, start): - return '$part=%s:%d' % (path, start) +def _make_segment_path(path, start): + return '$seg=%s:%d' % (path, start) - diff -r 7a1903ede496 -r 2e5c5d33d62c piecrust/templating/pystacheengine.py --- a/piecrust/templating/pystacheengine.py Tue Nov 21 11:00:06 2017 -0800 +++ b/piecrust/templating/pystacheengine.py Tue Nov 21 22:07:12 2017 -0800 @@ -16,10 +16,10 @@ self._not_found_error = None self._pystache_error = None - def renderSegmentPart(self, path, seg_part, data): + def renderSegment(self, path, segment, data): self._ensureLoaded() try: - return self.renderer.render(seg_part.content, data) + return self.renderer.render(segment.content, data), True except self._not_found_error as ex: raise TemplateNotFoundError() from ex except self._pystache_error as ex: diff -r 7a1903ede496 -r 2e5c5d33d62c piecrust/themes/base.py --- a/piecrust/themes/base.py Tue Nov 21 11:00:06 2017 -0800 +++ b/piecrust/themes/base.py Tue Nov 21 22:07:12 2017 -0800 @@ -1,7 +1,7 @@ import sys import os.path import yaml -from piecrust import CONFIG_PATH, THEME_DIR, THEMES_DIR +from piecrust import CONFIG_PATH, THEMES_DIR class Theme(object): @@ -26,6 +26,8 @@ config_path = os.path.join(self.root_dir, CONFIG_PATH) with open(config_path, 'r', encoding='utf8') as fp: config = yaml.load(fp.read()) + if not config: + return None site_config = config.get('site', {}) theme = site_config.get('theme', None) if theme is None: @@ -53,6 +55,6 @@ return theme_dir raise ThemeNotFoundError( - "Can't find theme '%s'. Looked in: %s" % - (theme, ', '.join(dirs))) + "Can't find theme '%s'. Looked in: %s" % + (theme, ', '.join(dirs))) diff -r 7a1903ede496 -r 2e5c5d33d62c piecrust/uriutil.py --- a/piecrust/uriutil.py Tue Nov 21 11:00:06 2017 -0800 +++ b/piecrust/uriutil.py Tue Nov 21 22:07:12 2017 -0800 @@ -1,6 +1,5 @@ import re import os.path -import string import logging @@ -17,7 +16,8 @@ root = app.config.get('site/root') uri_root = uri[:len(root)] if uri_root != root: - raise Exception("URI '%s' is not a full URI." % uri) + raise Exception("URI '%s' is not a full URI, expected root '%s'." % + (uri, root)) uri = uri[len(root):] return uri_root, uri @@ -25,7 +25,8 @@ def split_sub_uri(app, uri): root = app.config.get('site/root') if not uri.startswith(root): - raise Exception("URI '%s' is not a full URI." % uri) + raise Exception("URI '%s' is not a full URI, expected root '%s'." % + (uri, root)) pretty_urls = app.config.get('site/pretty_urls') trailing_slash = app.config.get('site/trailing_slash') @@ -57,3 +58,8 @@ return uri, page_num + +def uri_to_title(slug): + slug = re.sub(r'[\-_]', ' ', slug) + return slug.title() + diff -r 7a1903ede496 -r 2e5c5d33d62c piecrust/workerpool.py --- a/piecrust/workerpool.py Tue Nov 21 11:00:06 2017 -0800 +++ b/piecrust/workerpool.py Tue Nov 21 22:07:12 2017 -0800 @@ -2,37 +2,62 @@ import os import sys import time -import zlib -import queue import logging -import itertools import threading +import traceback import multiprocessing -from piecrust import fastpickle +from piecrust.environment import ExecutionStats logger = logging.getLogger(__name__) -use_fastqueue = True +use_fastqueue = False +use_fastpickle = False +use_msgpack = False +use_marshall = False +use_json = False class IWorker(object): + """ Interface for a pool worker. + """ def initialize(self): raise NotImplementedError() def process(self, job): raise NotImplementedError() - def getReport(self, pool_reports): + def getStats(self): return None def shutdown(self): pass +def _get_worker_exception_data(wid): + t, v, tb = sys.exc_info() + return { + 'wid': wid, + 'type': str(t), + 'value': '\n'.join(_get_errors(v)), + 'traceback': ''.join(traceback.format_exception(t, v, tb)) + } + + +def _get_errors(ex): + errors = [] + while ex is not None: + msg = str(ex) + errors.append(msg) + ex = ex.__cause__ + return errors + + TASK_JOB = 0 -TASK_BATCH = 1 +TASK_JOB_BATCH = 1 TASK_END = 2 +_TASK_ABORT_WORKER = 10 +_CRITICAL_WORKER_ERROR = 11 def worker_func(params): @@ -52,15 +77,64 @@ def _real_worker_func(params): + try: + _real_worker_func_unsafe(params) + except Exception as ex: + logger.exception(ex) + msg = ("CRITICAL ERROR IN WORKER %d\n%s" % (params.wid, str(ex))) + params.outqueue.put(( + _CRITICAL_WORKER_ERROR, None, False, params.wid, msg)) + + +def _pre_parse_pytest_args(): + # If we are unit-testing, we need to translate our test logging + # arguments into something Chef can understand. + import argparse + parser = argparse.ArgumentParser() + # This is adapted from our `conftest.py`. + parser.add_argument('--log-debug', action='store_true') + parser.add_argument('--log-file') + res, _ = parser.parse_known_args(sys.argv[1:]) + + chef_args = [] + if res.log_debug: + chef_args.append('--debug') + if res.log_file: + chef_args += ['--log', res.log_file] + + root_logger = logging.getLogger() + while len(root_logger.handlers) > 0: + root_logger.removeHandler(root_logger.handlers[0]) + + from piecrust.main import _pre_parse_chef_args + _pre_parse_chef_args(chef_args) + + +def _real_worker_func_unsafe(params): + wid = params.wid + + stats = ExecutionStats() + stats.registerTimer('WorkerInit') + init_start_time = time.perf_counter() + # In a context where `multiprocessing` is using the `spawn` forking model, # the new process doesn't inherit anything, so we lost all our logging # configuration here. Let's set it up again. if (hasattr(multiprocessing, 'get_start_method') and multiprocessing.get_start_method() == 'spawn'): - from piecrust.main import _pre_parse_chef_args - _pre_parse_chef_args(sys.argv[1:]) + if not params.is_unit_testing: + from piecrust.main import _pre_parse_chef_args + _pre_parse_chef_args(sys.argv[1:]) + else: + _pre_parse_pytest_args() + elif params.is_unit_testing: + _pre_parse_pytest_args() - wid = params.wid + from piecrust.main import ColoredFormatter + root_logger = logging.getLogger() + root_logger.handlers[0].setFormatter(ColoredFormatter( + ('[W-%d]' % wid) + '[%(name)s] %(message)s')) + logger.debug("Worker %d initializing..." % wid) # We don't need those. @@ -73,135 +147,109 @@ try: w.initialize() except Exception as ex: - logger.error("Working failed to initialize:") + logger.error("Worker %d failed to initialize." % wid) logger.exception(ex) - params.outqueue.put(None) - return + raise - use_threads = False - if use_threads: - # Create threads to read/write the jobs and results from/to the - # main arbitrator process. - local_job_queue = queue.Queue() - reader_thread = threading.Thread( - target=_job_queue_reader, - args=(params.inqueue.get, local_job_queue), - name="JobQueueReaderThread") - reader_thread.start() - - local_result_queue = queue.Queue() - writer_thread = threading.Thread( - target=_job_results_writer, - args=(local_result_queue, params.outqueue.put), - name="JobResultWriterThread") - writer_thread.start() - - get = local_job_queue.get - put = local_result_queue.put_nowait - else: - get = params.inqueue.get - put = params.outqueue.put + stats.stepTimerSince('WorkerInit', init_start_time) # Start pumping! completed = 0 time_in_get = 0 time_in_put = 0 + get = params.inqueue.get + put = params.outqueue.put + while True: get_start_time = time.perf_counter() task = get() time_in_get += (time.perf_counter() - get_start_time) task_type, task_data = task - if task_type == TASK_END: - logger.debug("Worker %d got end task, exiting." % wid) - wprep = { - 'WorkerTaskGet': time_in_get, - 'WorkerResultPut': time_in_put} - try: - rep = (task_type, True, wid, (wid, w.getReport(wprep))) - except Exception as e: - logger.debug("Error getting report: %s" % e) - if params.wrap_exception: - e = multiprocessing.ExceptionWithTraceback( - e, e.__traceback__) - rep = (task_type, False, wid, (wid, e)) - put(rep) - break + + # Job task(s)... just do it. + if task_type == TASK_JOB or task_type == TASK_JOB_BATCH: + + task_data_list = task_data + if task_type == TASK_JOB: + task_data_list = [task_data] + + result_list = [] - if task_type == TASK_JOB: - task_data = (task_data,) + for td in task_data_list: + try: + res = w.process(td) + result_list.append((td, res, True)) + except Exception as e: + logger.debug( + "Error processing job, sending exception to main process:") + logger.debug(traceback.format_exc()) + we = _get_worker_exception_data(wid) + res = (td, we, False) + result_list.append((td, res, False)) - for t in task_data: - try: - res = (TASK_JOB, True, wid, w.process(t)) - except Exception as e: - if params.wrap_exception: - e = multiprocessing.ExceptionWithTraceback( - e, e.__traceback__) - res = (TASK_JOB, False, wid, e) - + res = (task_type, wid, result_list) put_start_time = time.perf_counter() put(res) time_in_put += (time.perf_counter() - put_start_time) - completed += 1 + completed += len(task_data_list) - if use_threads: - logger.debug("Worker %d waiting for reader/writer threads." % wid) - local_result_queue.put_nowait(None) - reader_thread.join() - writer_thread.join() + # End task... gather stats to send back to the main process. + elif task_type == TASK_END: + logger.debug("Worker %d got end task, exiting." % wid) + stats.registerTimer('WorkerTaskGet', time=time_in_get) + stats.registerTimer('WorkerResultPut', time=time_in_put) + try: + stats.mergeStats(w.getStats()) + stats_data = stats.toData() + rep = (task_type, wid, [(task_data, (wid, stats_data), True)]) + except Exception as e: + logger.debug( + "Error getting report, sending exception to main process:") + logger.debug(traceback.format_exc()) + we = _get_worker_exception_data(wid) + rep = (task_type, wid, [(task_data, (wid, we), False)]) + put(rep) + break - w.shutdown() + # Emergy abort. + elif task_type == _TASK_ABORT_WORKER: + logger.debug("Worker %d got abort signal." % wid) + break + + else: + raise Exception("Unknown task type: %s" % task_type) + + try: + w.shutdown() + except Exception as e: + logger.error("Worker %s failed to shutdown.") + logger.exception(e) + raise logger.debug("Worker %d completed %d tasks." % (wid, completed)) -def _job_queue_reader(getter, out_queue): - while True: - try: - task = getter() - except (EOFError, OSError): - logger.debug("Worker encountered connection problem.") - break - - out_queue.put_nowait(task) - - if task[0] == TASK_END: - # Done reading jobs from the main process. - logger.debug("Got end task, exiting task queue reader thread.") - break - - -def _job_results_writer(in_queue, putter): - while True: - res = in_queue.get() - if res is not None: - putter(res) - in_queue.task_done() - else: - # Got sentinel. Exit. - in_queue.task_done() - break - logger.debug("Exiting result queue writer thread.") - - -class _WorkerParams(object): +class _WorkerParams: def __init__(self, wid, inqueue, outqueue, worker_class, initargs=(), - wrap_exception=False, is_profiling=False): + is_profiling=False, is_unit_testing=False): self.wid = wid self.inqueue = inqueue self.outqueue = outqueue self.worker_class = worker_class self.initargs = initargs - self.wrap_exception = wrap_exception self.is_profiling = is_profiling + self.is_unit_testing = is_unit_testing -class WorkerPool(object): - def __init__(self, worker_class, initargs=(), +class WorkerPool: + def __init__(self, worker_class, initargs=(), *, + callback=None, error_callback=None, worker_count=None, batch_size=None, - wrap_exception=False): + userdata=None): + self.userdata = userdata + worker_count = worker_count or os.cpu_count() or 1 if use_fastqueue: @@ -212,25 +260,32 @@ else: self._task_queue = multiprocessing.SimpleQueue() self._result_queue = multiprocessing.SimpleQueue() - self._quick_put = self._task_queue._writer.send - self._quick_get = self._result_queue._reader.recv + self._quick_put = self._task_queue.put + self._quick_get = self._result_queue.get + self._callback = callback + self._error_callback = error_callback self._batch_size = batch_size - self._callback = None - self._error_callback = None - self._listener = None + self._jobs_left = 0 + self._lock_jobs_left = threading.Lock() + self._lock_workers = threading.Lock() + self._event = threading.Event() + self._error_on_join = None + self._closed = False main_module = sys.modules['__main__'] is_profiling = os.path.basename(main_module.__file__) in [ - 'profile.py', 'cProfile.py'] + 'profile.py', 'cProfile.py'] + is_unit_testing = os.path.basename(main_module.__file__) in [ + 'py.test'] self._pool = [] for i in range(worker_count): worker_params = _WorkerParams( - i, self._task_queue, self._result_queue, - worker_class, initargs, - wrap_exception=wrap_exception, - is_profiling=is_profiling) + i, self._task_queue, self._result_queue, + worker_class, initargs, + is_profiling=is_profiling, + is_unit_testing=is_unit_testing) w = multiprocessing.Process(target=worker_func, args=(worker_params,)) w.name = w.name.replace('Process', 'PoolWorker') @@ -239,77 +294,74 @@ self._pool.append(w) self._result_handler = threading.Thread( - target=WorkerPool._handleResults, - args=(self,)) + target=WorkerPool._handleResults, + args=(self,)) self._result_handler.daemon = True self._result_handler.start() - self._closed = False + def queueJobs(self, jobs): + if self._closed: + if self._error_on_join: + raise self._error_on_join + raise Exception("This worker pool has been closed.") + + jobs = list(jobs) + new_job_count = len(jobs) + if new_job_count > 0: + with self._lock_jobs_left: + self._jobs_left += new_job_count - def setHandler(self, callback=None, error_callback=None): - self._callback = callback - self._error_callback = error_callback + self._event.clear() + bs = self._batch_size + if not bs: + for job in jobs: + self._quick_put((TASK_JOB, job)) + else: + cur_offset = 0 + while cur_offset < new_job_count: + next_batch_idx = min(cur_offset + bs, new_job_count) + job_batch = jobs[cur_offset:next_batch_idx] + self._quick_put((TASK_JOB_BATCH, job_batch)) + cur_offset = next_batch_idx + else: + with self._lock_jobs_left: + done = (self._jobs_left == 0) + if done: + self._event.set() - def queueJobs(self, jobs, handler=None, chunk_size=None): + def wait(self, timeout=None): if self._closed: raise Exception("This worker pool has been closed.") - if self._listener is not None: - raise Exception("A previous job queue has not finished yet.") - if any([not p.is_alive() for p in self._pool]): - raise Exception("Some workers have prematurely exited.") - - if handler is not None: - self.setHandler(handler) - - if not hasattr(jobs, '__len__'): - jobs = list(jobs) - job_count = len(jobs) - - res = AsyncResult(self, job_count) - if res._count == 0: - res._event.set() - return res - - self._listener = res - - if chunk_size is None: - chunk_size = self._batch_size - if chunk_size is None: - chunk_size = max(1, job_count // 50) - logger.debug("Using chunk size of %d" % chunk_size) - - if chunk_size is None or chunk_size == 1: - for job in jobs: - self._quick_put((TASK_JOB, job)) - else: - it = iter(jobs) - while True: - batch = tuple([i for i in itertools.islice(it, chunk_size)]) - if not batch: - break - self._quick_put((TASK_BATCH, batch)) - - return res + ret = self._event.wait(timeout) + if self._error_on_join: + raise self._error_on_join + return ret def close(self): - if self._listener is not None: + if self._closed: + raise Exception("This worker pool has been closed.") + if self._jobs_left > 0: raise Exception("A previous job queue has not finished yet.") + if not self._event.is_set(): + raise Exception("A previous job queue hasn't been cleared.") logger.debug("Closing worker pool...") - handler = _ReportHandler(len(self._pool)) + live_workers = list(filter(lambda w: w is not None, self._pool)) + handler = _ReportHandler(len(live_workers)) self._callback = handler._handle - for w in self._pool: + self._error_callback = handler._handleError + for w in live_workers: self._quick_put((TASK_END, None)) - for w in self._pool: + for w in live_workers: w.join() logger.debug("Waiting for reports...") if not handler.wait(2): missing = handler.reports.index(None) logger.warning( - "Didn't receive all worker reports before timeout. " - "Missing report from worker %d." % missing) + "Didn't receive all worker reports before timeout. " + "Missing report from worker %d." % missing) logger.debug("Exiting result handler thread...") self._result_queue.put(None) @@ -318,8 +370,31 @@ return handler.reports + def _onResultHandlerCriticalError(self, wid): + logger.error("Result handler received a critical error from " + "worker %d." % wid) + with self._lock_workers: + self._pool[wid] = None + if all(map(lambda w: w is None, self._pool)): + logger.error("All workers have died!") + self._closed = True + self._error_on_join = Exception("All workers have died!") + self._event.set() + return False + + return True + + def _onTaskDone(self): + with self._lock_jobs_left: + left = self._jobs_left - 1 + self._jobs_left = left + + if left == 0: + self._event.set() + @staticmethod def _handleResults(pool): + userdata = pool.userdata while True: try: res = pool._quick_get() @@ -330,79 +405,74 @@ if res is None: logger.debug("Result handler exiting.") - break + return - task_type, success, wid, data = res - try: - if success and pool._callback: - pool._callback(data) - elif not success: - if pool._error_callback: - pool._error_callback(data) + task_type, wid, res_data_list = res + for res_data in res_data_list: + try: + task_data, data, success = res_data + if success: + if pool._callback: + pool._callback(task_data, data, userdata) else: - logger.error("Got error data:") - logger.error(data) - except Exception as ex: - logger.exception(ex) + if task_type == _CRITICAL_WORKER_ERROR: + logger.error(data) + do_continue = pool._onResultHandlerCriticalError(wid) + if not do_continue: + logger.debug("Aborting result handling thread.") + return + else: + if pool._error_callback: + pool._error_callback(task_data, data, userdata) + else: + logger.error( + "Worker %d failed to process a job:" % wid) + logger.error(data) + except Exception as ex: + logger.exception(ex) if task_type == TASK_JOB: - pool._listener._onTaskDone() + pool._onTaskDone() -class AsyncResult(object): - def __init__(self, pool, count): - self._pool = pool - self._count = count - self._event = threading.Event() - - def ready(self): - return self._event.is_set() - - def wait(self, timeout=None): - return self._event.wait(timeout) - - def _onTaskDone(self): - self._count -= 1 - if self._count == 0: - self._pool.setHandler(None) - self._pool._listener = None - self._event.set() - - -class _ReportHandler(object): +class _ReportHandler: def __init__(self, worker_count): self.reports = [None] * worker_count self._count = worker_count self._received = 0 + self._lock = threading.Lock() self._event = threading.Event() def wait(self, timeout=None): return self._event.wait(timeout) - def _handle(self, res): + def _handle(self, job, res, _): wid, data = res if wid < 0 or wid > self._count: logger.error("Ignoring report from unknown worker %d." % wid) return - self._received += 1 - self.reports[wid] = data + stats = ExecutionStats() + stats.fromData(data) - if self._received == self._count: - self._event.set() + with self._lock: + self.reports[wid] = stats + self._received += 1 + if self._received == self._count: + self._event.set() - def _handleError(self, res): - wid, data = res - logger.error("Worker %d failed to send its report." % wid) - logger.exception(data) + def _handleError(self, job, res, _): + logger.error("Worker %d failed to send its report." % res.wid) + logger.error(res) -class FastQueue(object): +class FastQueue: def __init__(self): self._reader, self._writer = multiprocessing.Pipe(duplex=False) self._rlock = multiprocessing.Lock() self._wlock = multiprocessing.Lock() self._initBuffers() + self._initSerializer() def _initBuffers(self): self._rbuf = io.BytesIO() @@ -410,6 +480,9 @@ self._wbuf = io.BytesIO() self._wbuf.truncate(256) + def _initSerializer(self): + pass + def __getstate__(self): return (self._reader, self._writer, self._rlock, self._wlock) @@ -419,6 +492,7 @@ def get(self): with self._rlock: + self._rbuf.seek(0) try: with self._rbuf.getbuffer() as b: bufsize = self._reader.recv_bytes_into(b) @@ -429,11 +503,11 @@ self._rbuf.write(e.args[0]) self._rbuf.seek(0) - return self._unpickle(self._rbuf, bufsize) + return _unpickle(self, self._rbuf, bufsize) def put(self, obj): self._wbuf.seek(0) - self._pickle(obj, self._wbuf) + _pickle(self, obj, self._wbuf) size = self._wbuf.tell() self._wbuf.seek(0) @@ -441,9 +515,85 @@ with self._wbuf.getbuffer() as b: self._writer.send_bytes(b, 0, size) - def _pickle(self, obj, buf): + +class _BufferWrapper: + def __init__(self, buf, read_size=0): + self._buf = buf + self._read_size = read_size + + def write(self, data): + self._buf.write(data.encode('utf8')) + + def read(self): + return self._buf.read(self._read_size).decode('utf8') + + +if use_fastpickle: + from piecrust import fastpickle + + def _pickle_fast(queue, obj, buf): fastpickle.pickle_intob(obj, buf) - def _unpickle(self, buf, bufsize): + def _unpickle_fast(queue, buf, bufsize): return fastpickle.unpickle_fromb(buf, bufsize) + _pickle = _pickle_fast + _unpickle = _unpickle_fast + +elif use_msgpack: + import msgpack + + def _pickle_msgpack(queue, obj, buf): + buf.write(queue._packer.pack(obj)) + + def _unpickle_msgpack(queue, buf, bufsize): + queue._unpacker.feed(buf.getbuffer()) + for o in queue._unpacker: + return o + # return msgpack.unpack(buf) + + def _init_msgpack(queue): + queue._packer = msgpack.Packer() + queue._unpacker = msgpack.Unpacker() + + _pickle = _pickle_msgpack + _unpickle = _unpickle_msgpack + FastQueue._initSerializer = _init_msgpack + +elif use_marshall: + import marshal + + def _pickle_marshal(queue, obj, buf): + marshal.dump(obj, buf) + + def _unpickle_marshal(queue, buf, bufsize): + return marshal.load(buf) + + _pickle = _pickle_marshal + _unpickle = _unpickle_marshal + +elif use_json: + import json + + def _pickle_json(queue, obj, buf): + buf = _BufferWrapper(buf) + json.dump(obj, buf, indent=None, separators=(',', ':')) + + def _unpickle_json(queue, buf, bufsize): + buf = _BufferWrapper(buf, bufsize) + return json.load(buf) + + _pickle = _pickle_json + _unpickle = _unpickle_json + +else: + import pickle + + def _pickle_default(queue, obj, buf): + pickle.dump(obj, buf, pickle.HIGHEST_PROTOCOL) + + def _unpickle_default(queue, buf, bufsize): + return pickle.load(buf) + + _pickle = _pickle_default + _unpickle = _unpickle_default diff -r 7a1903ede496 -r 2e5c5d33d62c piecrust/wsgiutil/__init__.py --- a/piecrust/wsgiutil/__init__.py Tue Nov 21 11:00:06 2017 -0800 +++ b/piecrust/wsgiutil/__init__.py Tue Nov 21 22:07:12 2017 -0800 @@ -1,9 +1,41 @@ -from piecrust.serving.server import WsgiServer +import logging +from piecrust.serving.wrappers import get_piecrust_server + + +def _setup_logging(log_file, log_level, max_log_bytes, log_backup_count): + if log_file: + from logging.handlers import RotatingFileHandler + handler = RotatingFileHandler(log_file, maxBytes=max_log_bytes, + backupCount=log_backup_count) + handler.setLevel(log_level) + logging.getLogger().addHandler(handler) -def get_app(root_dir, cache_key='prod', enable_debug_info=False): - app = WsgiServer(root_dir, - cache_key=cache_key, - enable_debug_info=enable_debug_info) +def get_app(root_dir, *, + cache_key='prod', + serve_admin=False, + log_file=None, + log_level=logging.INFO, + log_backup_count=0, + max_log_bytes=4096): + _setup_logging(log_file, log_level, max_log_bytes, log_backup_count) + app = get_piecrust_server(root_dir, + serve_site=True, + serve_admin=serve_admin, + cache_key=cache_key) return app + +def get_admin_app(root_dir, *, + cache_key='prod', + log_file=None, + log_level=logging.INFO, + log_backup_count=0, + max_log_bytes=4096): + _setup_logging(log_file, log_level, max_log_bytes, log_backup_count) + app = get_piecrust_server(root_dir, + serve_site=False, + serve_admin=True, + cache_key=cache_key) + return app + diff -r 7a1903ede496 -r 2e5c5d33d62c piecrust/wsgiutil/cwdadminapp.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/piecrust/wsgiutil/cwdadminapp.py Tue Nov 21 22:07:12 2017 -0800 @@ -0,0 +1,13 @@ +# This is a utility module that can be used with any WSGI-compatible server +# like Werkzeug or Gunicorn. It returns a WSGI app for serving a PieCrust +# administration panel located in the current working directory. +import os +from piecrust.wsgiutil import get_admin_app + + +root_dir = os.getcwd() +app = get_admin_app(root_dir) +# Add this for `mod_wsgi`. +application = app + + diff -r 7a1903ede496 -r 2e5c5d33d62c requirements.txt --- a/requirements.txt Tue Nov 21 11:00:06 2017 -0800 +++ b/requirements.txt Tue Nov 21 22:07:12 2017 -0800 @@ -1,19 +1,32 @@ +appdirs==1.4.3 +asn1crypto==0.22.0 cffi==1.5.0 colorama==0.3.3 compressinja==0.0.2 +cryptography==1.8.1 Flask==0.10.1 +Flask-IndieAuth==0.0.3.2 Flask-Login==0.3.2 -Jinja2==2.7.3 +hoedown==0.2.3 +idna==2.5 +itsdangerous==0.24 +Jinja2==2.9.6 Markdown==2.6.2 -MarkupSafe==0.23 +MarkupSafe==1.0 +packaging==16.8 paramiko==2.0.0 +py==1.4.33 +pyasn1==0.2.3 +pycparser==2.17 Pygments==2.0.2 +pyparsing==2.2.0 pystache==0.5.4 python-dateutil==2.4.2 PyYAML==3.11 repoze.lru==0.6 +six==1.10.0 smartypants==1.8.6 strict-rfc3339==0.5 textile==2.2.2 Unidecode==0.4.18 -Werkzeug==0.10.4 +Werkzeug==0.12.2 diff -r 7a1903ede496 -r 2e5c5d33d62c setup.py --- a/setup.py Tue Nov 21 11:00:06 2017 -0800 +++ b/setup.py Tue Nov 21 22:07:12 2017 -0800 @@ -42,7 +42,7 @@ class GenerateVersionCommand(Command): description = 'generates a version file' user_options = [ - ('force=', 'f', 'force a specific version number')] + ('force=', 'f', 'force a specific version number')] def initialize_options(self): self.force = None @@ -146,8 +146,8 @@ version = APP_VERSION except ImportError: print( - "WARNING: Can't get version from version file. " - "Will use version `0.0`.") + "WARNING: Can't get version from version file. " + "Will use version `0.0`.") version = '0.0' @@ -156,47 +156,47 @@ setup( - name="PieCrust", - version=version, - description="A powerful static website generator and lightweight CMS.", - long_description=read('README.rst') + '\n\n' + read('CHANGELOG.rst'), - author="Ludovic Chabant", - author_email="ludovic@chabant.com", - license="Apache License 2.0", - url="http://bolt80.com/piecrust", - keywords=' '.join([ - 'python', - 'website', - 'generator', - 'blog', - 'portfolio', - 'gallery', - 'cms' - ]), - packages=find_packages(exclude=['garcon', 'tests']), - include_package_data=True, - zip_safe=False, - install_requires=install_requires, - tests_require=tests_require, - cmdclass={ - 'test': PyTest, - 'version': GenerateVersionCommand - }, - classifiers=[ - 'Development Status :: 4 - Beta', - 'License :: OSI Approved :: Apache Software License', - 'Environment :: Console', - 'Intended Audience :: Developers', - 'Intended Audience :: System Administrators', - 'Natural Language :: English', - 'Operating System :: MacOS :: MacOS X', - 'Operating System :: POSIX :: Linux', - 'Operating System :: Microsoft :: Windows', - 'Programming Language :: Python :: 3 :: Only', - 'Topic :: Internet :: WWW/HTTP :: Site Management' - ], - entry_points={'console_scripts': [ - 'chef = piecrust.main:main' - ]} - ) + name="PieCrust", + version=version, + description="A powerful static website generator and lightweight CMS.", + long_description=read('README.rst') + '\n\n' + read('CHANGELOG.rst'), + author="Ludovic Chabant", + author_email="ludovic@chabant.com", + license="Apache License 2.0", + url="http://bolt80.com/piecrust", + keywords=' '.join([ + 'python', + 'website', + 'generator', + 'blog', + 'portfolio', + 'gallery', + 'cms' + ]), + packages=find_packages(exclude=['garcon', 'tests']), + include_package_data=True, + zip_safe=False, + install_requires=install_requires, + tests_require=tests_require, + cmdclass={ + 'test': PyTest, + 'version': GenerateVersionCommand + }, + classifiers=[ + 'Development Status :: 4 - Beta', + 'License :: OSI Approved :: Apache Software License', + 'Environment :: Console', + 'Intended Audience :: Developers', + 'Intended Audience :: System Administrators', + 'Natural Language :: English', + 'Operating System :: MacOS :: MacOS X', + 'Operating System :: POSIX :: Linux', + 'Operating System :: Microsoft :: Windows', + 'Programming Language :: Python :: 3 :: Only', + 'Topic :: Internet :: WWW/HTTP :: Site Management' + ], + entry_points={'console_scripts': [ + 'chef = piecrust.main:main' + ]} +) diff -r 7a1903ede496 -r 2e5c5d33d62c tests/bakes/test_archives.yaml --- a/tests/bakes/test_archives.yaml Tue Nov 21 11:00:06 2017 -0800 +++ b/tests/bakes/test_archives.yaml Tue Nov 21 22:07:12 2017 -0800 @@ -1,6 +1,6 @@ --- in: - pages/_year.html: | + templates/_year.html: | Posts in {{year}} {% for post in pagination.posts -%} {{post.url}} @@ -27,7 +27,7 @@ /2016/01/01/post1.html --- in: - pages/_year.html: | + templates/_year.html: | Posts in {{year}} {% for post in archives -%} {{post.url}} diff -r 7a1903ede496 -r 2e5c5d33d62c tests/bakes/test_assets.yaml --- a/tests/bakes/test_assets.yaml Tue Nov 21 11:00:06 2017 -0800 +++ b/tests/bakes/test_assets.yaml Tue Nov 21 22:07:12 2017 -0800 @@ -1,15 +1,11 @@ --- in: posts/2010-01-01_post1-assets/blah.png: 'fake image' - posts/2010-01-01_post1.md: 'my image: {{assets.blah}}' - pages/_index.md: 'something' -out: - '2010': - '01': - '01': - post1.html: 'my image: /2010/01/01/post1/blah.png' - post1: - blah.png: 'fake image' + posts/2010-01-01_post1.html: 'my image: {{assets.blah}}' + pages/_index.html: 'something' +outfiles: + 2010/01/01/post1.html: 'my image: /2010/01/01/post1/blah.png' + 2010/01/01/post1/blah.png: 'fake image' index.html: 'something' --- config: @@ -17,14 +13,10 @@ pretty_urls: true in: posts/2010-01-01_post1-assets/blah.png: 'fake image' - posts/2010-01-01_post1.md: 'my image: {{assets.blah}}' - pages/_index.md: 'something' -out: - '2010': - '01': - '01': - 'post1': - index.html: 'my image: /2010/01/01/post1/blah.png' - blah.png: 'fake image' + posts/2010-01-01_post1.html: 'my image: {{assets.blah}}' + pages/_index.html: 'something' +outfiles: + 2010/01/01/post1/index.html: 'my image: /2010/01/01/post1/blah.png' + 2010/01/01/post1/blah.png: 'fake image' index.html: 'something' diff -r 7a1903ede496 -r 2e5c5d33d62c tests/bakes/test_data_provider.yaml --- a/tests/bakes/test_data_provider.yaml Tue Nov 21 11:00:06 2017 -0800 +++ b/tests/bakes/test_data_provider.yaml Tue Nov 21 22:07:12 2017 -0800 @@ -1,10 +1,24 @@ --- in: + pages/_index.html: | + --- + date: '2010/01/05' + --- + The index pages/foo.md: | + --- + date: '2010/01/08' + --- Foo! pages/bar.md: | + --- + date: '2010/01/09' + --- Bar! - pages/allpages.md: | + pages/allpages.html: | + --- + date: '2010/01/10' + --- {% for p in site.pages -%} {{p.url}} {% endfor %} @@ -19,7 +33,7 @@ posts/2016-06-01_one.md: "One!" posts/2016-06-02_two.md: "Two!" posts/2016-06-03_three.md: "Three!" - pages/_index.md: | + pages/_index.html: | {% for p in blog.posts -%} {{p.url}} {% endfor %} @@ -36,7 +50,7 @@ posts/2016-06-01_one.md: "One!" posts/2016-06-02_two.md: "Two!" posts/2016-06-03_three.md: "Three!" - pages/_index.md: | + pages/_index.html: | {{blog.subtitle}} {% for p in blog.posts -%} {{p.url}} @@ -56,7 +70,7 @@ posts/aaa/2016-06-02_two.md: "Two!" posts/xyz/2016-06-01_one-other.md: "One Other!" posts/xyz/2016-06-02_two-other.md: "Two Other!" - pages/_index.md: | + pages/_index.html: | {% for p in aaa.posts -%} {{p.url}} {% endfor %} diff -r 7a1903ede496 -r 2e5c5d33d62c tests/bakes/test_dotfiles.yaml --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/tests/bakes/test_dotfiles.yaml Tue Nov 21 22:07:12 2017 -0800 @@ -0,0 +1,8 @@ +--- +in: + assets/something.txt: Foo bar + assets/.htaccess: "# Apache config" +outfiles: + something.txt: Foo bar + .htaccess: "# Apache config" + diff -r 7a1903ede496 -r 2e5c5d33d62c tests/bakes/test_linker.yaml --- a/tests/bakes/test_linker.yaml Tue Nov 21 11:00:06 2017 -0800 +++ b/tests/bakes/test_linker.yaml Tue Nov 21 22:07:12 2017 -0800 @@ -1,6 +1,6 @@ --- in: - pages/foo.md: | + pages/foo.html: | {%for c in family.children%} {{c.title}} {%endfor%} @@ -8,15 +8,15 @@ foo.html: '' --- in: - pages/foo.md: | + pages/foo.html: | {%for c in family.children-%} {{c.title}} {%endfor%} - pages/foo/one.md: | + pages/foo/one.html: | --- title: One --- - pages/foo/two.md: | + pages/foo/two.html: | --- title: Two --- @@ -26,20 +26,23 @@ Two --- in: - pages/foo.md: | + pages/foo.html: | --- title: Foo + date: '2010/01/07' --- {%for c in family.siblings-%} {{c.title}}{%if c.is_self%} SELFIE!{%endif%} {%endfor%} - pages/bar.md: | + pages/bar.html: | --- title: Bar + date: '2010/01/08' --- - pages/other.md: | + pages/other.html: | --- title: Other + date: '2010/01/06' --- outfiles: foo.html: | @@ -48,27 +51,27 @@ Other --- in: - pages/foo.md: "---\ntitle: Foo\n---\n" - pages/foo/one.md: | + pages/foo.html: "---\ntitle: Foo\n---\n" + pages/foo/one.html: | {{family.parent.url}} {{family.parent.title}} outfiles: foo/one.html: /foo.html Foo --- in: - pages/foo.md: "---\ntitle: Foo\n---\n" - pages/foo/bar.md: "---\ntitle: Bar\n---\n" - pages/foo/bar/one.md: | + pages/foo.html: "---\ntitle: Foo\n---\n" + pages/foo/bar.html: "---\ntitle: Bar\n---\n" + pages/foo/bar/one.html: | {{family.parent.url}} {{family.parent.title}} - {{family.parent.parent.url}} {{family.parent.parent.title}} + {{family.ancestors[1].url}} {{family.ancestors[1].title}} outfiles: foo/bar/one.html: | /foo/bar.html Bar /foo.html Foo --- in: - pages/foo.md: "---\ntitle: Foo\n---\n" - pages/foo/bar.md: "---\ntitle: Bar\n---\n" - pages/foo/bar/one.md: | + pages/foo.html: "---\ntitle: Foo\n---\n" + pages/foo/bar.html: "---\ntitle: Bar\n---\n" + pages/foo/bar/one.html: | {% for p in family.ancestors -%} {{p.url}} {{p.title}} {% endfor %} diff -r 7a1903ede496 -r 2e5c5d33d62c tests/bakes/test_multiblog.yaml --- a/tests/bakes/test_multiblog.yaml Tue Nov 21 11:00:06 2017 -0800 +++ b/tests/bakes/test_multiblog.yaml Tue Nov 21 22:07:12 2017 -0800 @@ -15,14 +15,14 @@ site: blogs: [one, two] one: - func_prefix: pc + func_prefix: pc1 two: - func_prefix: pc + func_prefix: pc2 in: posts/one/2016-01-01_post1.html: '' posts/two/2016-01-02_post2.html: '' - pages/foo-one.html: "---\nblog: one\n---\nLink: {{pcposturl(2016, 01, 01, 'post1', 'one')}}" - pages/foo-two.html: "---\nblog: two\n---\nLink: {{pcposturl(2016, 01, 02, 'post2', 'two')}}" + pages/foo-one.html: "---\nblog: one\n---\nLink: {{pc1posturl(2016, 01, 01, 'post1', 'one')}}" + pages/foo-two.html: "---\nblog: two\n---\nLink: {{pc2posturl(2016, 01, 02, 'post2', 'two')}}" outfiles: foo-one.html: "Link: /one/2016/01/01/post1.html" foo-two.html: "Link: /two/2016/01/02/post2.html" diff -r 7a1903ede496 -r 2e5c5d33d62c tests/bakes/test_pagination.yaml --- a/tests/bakes/test_pagination.yaml Tue Nov 21 11:00:06 2017 -0800 +++ b/tests/bakes/test_pagination.yaml Tue Nov 21 22:07:12 2017 -0800 @@ -4,15 +4,15 @@ posts_per_page: 3 pagination_suffix: /page%num% in: - posts/2015-03-01_post01.md: "---\ntitle: Post 01\n---\n" - posts/2015-03-02_post02.md: "---\ntitle: Post 02\n---\n" - posts/2015-03-03_post03.md: "---\ntitle: Post 03\n---\n" - posts/2015-03-04_post04.md: "---\ntitle: Post 04\n---\n" - posts/2015-03-05_post05.md: "---\ntitle: Post 05\n---\n" - posts/2015-03-06_post06.md: "---\ntitle: Post 06\n---\n" - posts/2015-03-07_post07.md: "---\ntitle: Post 07\n---\n" - pages/_index.md: '' - pages/foo.md: | + posts/2015-03-01_post01.html: "---\ntitle: Post 01\n---\n" + posts/2015-03-02_post02.html: "---\ntitle: Post 02\n---\n" + posts/2015-03-03_post03.html: "---\ntitle: Post 03\n---\n" + posts/2015-03-04_post04.html: "---\ntitle: Post 04\n---\n" + posts/2015-03-05_post05.html: "---\ntitle: Post 05\n---\n" + posts/2015-03-06_post06.html: "---\ntitle: Post 06\n---\n" + posts/2015-03-07_post07.html: "---\ntitle: Post 07\n---\n" + pages/_index.html: '' + pages/foo.html: | {%- for p in pagination.items -%} {{p.url}} {{p.title}} {% endfor -%} @@ -44,33 +44,33 @@ site: posts_per_page: 3 in: - posts/2015-03-01_post01.md: | + posts/2015-03-01_post01.html: | --- title: Post 01 tags: [foo] --- - posts/2015-03-02_post02.md: | + posts/2015-03-02_post02.html: | --- title: Post 02 tags: [foo] --- - posts/2015-03-03_post03.md: | + posts/2015-03-03_post03.html: | --- title: Post 03 tags: [foo] --- - posts/2015-03-04_post04.md: | + posts/2015-03-04_post04.html: | --- title: Post 04 tags: [foo] --- - posts/2015-03-05_post05.md: | + posts/2015-03-05_post05.html: | --- title: Post 05 tags: [foo] --- - pages/_index.md: '' - pages/_tag.md: | + pages/_index.html: '' + templates/_tag.html: | Posts with {{tag}} {% for p in pagination.items -%} {{p.url}} {{p.title}} diff -r 7a1903ede496 -r 2e5c5d33d62c tests/bakes/test_relative_pagination.yaml --- a/tests/bakes/test_relative_pagination.yaml Tue Nov 21 11:00:06 2017 -0800 +++ b/tests/bakes/test_relative_pagination.yaml Tue Nov 21 22:07:12 2017 -0800 @@ -3,9 +3,9 @@ site: default_post_layout: post in: - posts/2015-03-01_post01.md: "---\ntitle: Post 01\n---\nContent 01" - posts/2015-03-02_post02.md: "---\ntitle: Post 02\n---\nContent 02" - posts/2015-03-03_post03.md: "---\ntitle: Post 03\n---\nContent 03" + posts/2015-03-01_post01.html: "---\ntitle: Post 01\n---\nContent 01" + posts/2015-03-02_post02.html: "---\ntitle: Post 02\n---\nContent 02" + posts/2015-03-03_post03.html: "---\ntitle: Post 03\n---\nContent 03" templates/post.html: | BLAH {{content|safe}} {{pagination.prev_item.url}} {{pagination.prev_item.title}} diff -r 7a1903ede496 -r 2e5c5d33d62c tests/bakes/test_simple.yaml --- a/tests/bakes/test_simple.yaml Tue Nov 21 11:00:06 2017 -0800 +++ b/tests/bakes/test_simple.yaml Tue Nov 21 22:07:12 2017 -0800 @@ -1,13 +1,10 @@ --- in: - posts/2010-01-01_post1.md: 'post one' - pages/about.md: 'URL: {{page.url}}' - pages/_index.md: 'something' -out: - '2010': - '01': - '01': - post1.html: 'post one' + posts/2010-01-01_post1.html: 'post one' + pages/about.html: 'URL: {{page.url}}' + pages/_index.html: 'something' +outfiles: + 2010/01/01/post1.html: 'post one' about.html: 'URL: /about.html' index.html: 'something' --- @@ -15,19 +12,16 @@ site: root: /whatever in: - posts/2010-01-01_post1.md: 'post one' - pages/about.md: 'URL: {{page.url}}' - pages/_index.md: 'something' -out: - '2010': - '01': - '01': - post1.html: 'post one' + posts/2010-01-01_post1.html: 'post one' + pages/about.html: 'URL: {{page.url}}' + pages/_index.html: 'something' +outfiles: + 2010/01/01/post1.html: 'post one' about.html: 'URL: /whatever/about.html' index.html: 'something' --- in: - pages/foo.md: | + pages/foo.html: | This page is {{page.url}} outfiles: foo.html: | @@ -37,7 +31,7 @@ site: author: Amélie Poulain in: - pages/foo.md: 'Site by {{site.author}}' + pages/foo.html: 'Site by {{site.author}}' outfiles: foo.html: 'Site by Amélie Poulain' diff -r 7a1903ede496 -r 2e5c5d33d62c tests/bakes/test_simple_categories.yaml --- a/tests/bakes/test_simple_categories.yaml Tue Nov 21 11:00:06 2017 -0800 +++ b/tests/bakes/test_simple_categories.yaml Tue Nov 21 22:07:12 2017 -0800 @@ -3,45 +3,39 @@ site: category_url: cat/%category% in: - posts/2015-03-01_post01.md: | + posts/2015-03-01_post01.html: | --- title: Post 01 category: foo --- - posts/2015-03-02_post02.md: | + posts/2015-03-02_post02.html: | --- title: Post 02 category: bar --- - posts/2015-03-03_post03.md: | + posts/2015-03-03_post03.html: | --- title: Post 03 category: foo --- - pages/_category.md: | + templates/_category.html: | Pages in {{category}} {% for p in pagination.posts -%} {{p.title}} {% endfor %} - pages/link.md: 'Link: {{pccaturl("bar")}}' - pages/_index.md: '' -out: + pages/link.html: 'Link: {{pccaturl("bar")}}' + pages/_index.html: '' +outfiles: index.html: '' - '2015': - '03': - '01': - post01.html: '' - '02': - post02.html: '' - '03': - post03.html: '' + '2015/03/01/post01.html': '' + '2015/03/02/post02.html': '' + '2015/03/03/post03.html': '' link.html: 'Link: /cat/bar.html' - cat: - foo.html: | - Pages in foo - Post 03 - Post 01 - bar.html: | - Pages in bar - Post 02 + 'cat/foo.html': | + Pages in foo + Post 03 + Post 01 + 'cat/bar.html': | + Pages in bar + Post 02 diff -r 7a1903ede496 -r 2e5c5d33d62c tests/bakes/test_simple_tags.yaml --- a/tests/bakes/test_simple_tags.yaml Tue Nov 21 11:00:06 2017 -0800 +++ b/tests/bakes/test_simple_tags.yaml Tue Nov 21 22:07:12 2017 -0800 @@ -1,7 +1,7 @@ --- in: - posts/2015-03-01_post01.md: "---\ntitle: Post 01\n---\nContent 01" - pages/_index.md: | + posts/2015-03-01_post01.html: "---\ntitle: Post 01\n---\nContent 01" + pages/_index.html: | {%for p in pagination.items -%} {{p.content|safe}} {%if p.tags%}{{p.tags}}{%else%}No tags{%endif%} @@ -12,67 +12,61 @@ No tags --- in: - posts/2015-03-01_post01.md: | + posts/2015-03-01_post01.html: | --- title: Post 01 tags: [foo] --- - posts/2015-03-02_post02.md: | + posts/2015-03-02_post02.html: | --- title: Post 02 tags: [bar, whatever] --- - posts/2015-03-03_post03.md: | + posts/2015-03-03_post03.html: | --- title: Post 03 tags: [foo, bar] --- - pages/_tag.md: | + templates/_tag.html: | Pages in {{tag}} {% for p in pagination.posts -%} {{p.title}} {% endfor %} - pages/_index.md: '' -out: + pages/_index.html: '' +outfiles: index.html: '' - '2015': - '03': - '01': - post01.html: '' - '02': - post02.html: '' - '03': - post03.html: '' - tag: - foo.html: | - Pages in foo - Post 03 - Post 01 - bar.html: | - Pages in bar - Post 03 - Post 02 - whatever.html: | - Pages in whatever - Post 02 + 2015/03/01/post01.html: '' + 2015/03/02/post02.html: '' + 2015/03/03/post03.html: '' + tag/foo.html: | + Pages in foo + Post 03 + Post 01 + tag/bar.html: | + Pages in bar + Post 03 + Post 02 + tag/whatever.html: | + Pages in whatever + Post 02 --- in: - posts/2016-06-01_post01.md: | + posts/2016-06-01_post01.html: | --- title: Post 01 tags: [foo, bar] --- - posts/2016-06-02_post02.md: | + posts/2016-06-02_post02.html: | --- title: Post 02 tags: [bar, foo] --- - pages/_tag.md: | + templates/_tag.html: | Pages in {{tags|join(', ')}} {% for p in pagination.posts -%} {{p.title}} {% endfor %} - pages/blah.md: | + pages/blah.html: | Link to: {{pctagurl('foo', 'bar')}} outfiles: blah.html: | @@ -94,17 +88,17 @@ site: slugify_mode: space_to_dash in: - posts/2016-09-01_post01.md: | + posts/2016-09-01_post01.html: | --- title: Post 01 tags: [foo bar] --- - posts/2016-09-02_post2.md: | + posts/2016-09-02_post2.html: | --- title: Post 02 tags: ['foo-bar'] --- - pages/_tag.md: | + templates/_tag.html: | Pages in {{pctagurl(tag)}} {% for p in pagination.posts -%} {{p.title}} diff -r 7a1903ede496 -r 2e5c5d33d62c tests/bakes/test_sitemap.yaml --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/tests/bakes/test_sitemap.yaml Tue Nov 21 22:07:12 2017 -0800 @@ -0,0 +1,40 @@ +--- +in: + assets/sitemap.sitemap: | + autogen: [pages, theme_pages] + pages/foo.md: This is a foo +outfiles: + sitemap.xml: | + + + + /foo.html + %test_time_iso8601% + + + / + %test_time_iso8601% + + +--- +in: + assets/sitemap.sitemap: | + autogen: [pages] + pages/foo.md: | + --- + sitemap: + changefreq: monthly + priority: 0.8 + --- + This is a foo +outfiles: + sitemap.xml: | + + + + /foo.html + %test_time_iso8601% + monthly + 0.8 + + diff -r 7a1903ede496 -r 2e5c5d33d62c tests/bakes/test_special_root.yaml --- a/tests/bakes/test_special_root.yaml Tue Nov 21 11:00:06 2017 -0800 +++ b/tests/bakes/test_special_root.yaml Tue Nov 21 22:07:12 2017 -0800 @@ -3,8 +3,8 @@ site: root: /~john/public/ in: - pages/about.md: 'URL: {{page.url}}, LINK: {{pcurl("missing")}}' - pages/_index.md: 'URL: {{page.url}}' -out: + pages/about.html: 'URL: {{page.url}}, LINK: {{pcurl("missing")}}' + pages/_index.html: 'URL: {{page.url}}' +outfiles: about.html: 'URL: /%7Ejohn/public/about.html, LINK: /%7Ejohn/public/missing.html' index.html: 'URL: /%7Ejohn/public/' diff -r 7a1903ede496 -r 2e5c5d33d62c tests/bakes/test_theme.yaml --- a/tests/bakes/test_theme.yaml Tue Nov 21 11:00:06 2017 -0800 +++ b/tests/bakes/test_theme.yaml Tue Nov 21 22:07:12 2017 -0800 @@ -5,11 +5,11 @@ default_page_layout: 'none' foo: bar in: - pages/foo.md: "This is: {{foo}}, with no template" + pages/foo.html: "This is: {{foo}}, with no template" theme/theme_config.yml: "name: testtheme" - theme/pages/_index.md: "This is {{site.title}} by {{name}}, with theme template" + theme/pages/_index.html: "This is {{site.title}} by {{name}}, with theme template" theme/templates/default.html: "THEME: {{content}}" -out: +outfiles: index.html: "THEME: This is Some Test by testtheme, with theme template" foo.html: "This is: bar, with no template" --- @@ -17,14 +17,14 @@ site: default_page_layout: 'custom' in: - pages/foo.md: "FOO" - pages/bar.md: "---\nlayout: blah\n---\nBAR" + pages/foo.html: "FOO" + pages/bar.html: "---\nlayout: blah\n---\nBAR" templates/custom.html: "CUSTOM: {{content}}" theme/theme_config.yml: "site: {sources: {theme_pages: {default_layout: blah}}}" - theme/pages/_index.md: "theme index" - theme/pages/about.md: "about" + theme/pages/_index.html: "theme index" + theme/pages/about.html: "about" theme/templates/blah.html: "THEME: {{content}}" -out: +outfiles: index.html: "THEME: theme index" about.html: "THEME: about" foo.html: "CUSTOM: FOO" diff -r 7a1903ede496 -r 2e5c5d33d62c tests/bakes/test_theme_site.yaml --- a/tests/bakes/test_theme_site.yaml Tue Nov 21 11:00:06 2017 -0800 +++ b/tests/bakes/test_theme_site.yaml Tue Nov 21 22:07:12 2017 -0800 @@ -3,7 +3,7 @@ site: title: "Some Test Theme" in: - pages/foo.md: "This is: {{site.title}}" + pages/foo.html: "This is: {{site.title}}" outfiles: foo.html: "This is: Some Test Theme" --- @@ -11,7 +11,7 @@ site: title: "Some Test Theme" in: - pages/foo.md: "This is: {{foo}}" + pages/foo.html: "This is: {{foo}}" configs/theme_preview.yml: "foo: bar" outfiles: foo.html: "This is: bar" diff -r 7a1903ede496 -r 2e5c5d33d62c tests/bakes/test_unicode.yaml --- a/tests/bakes/test_unicode.yaml Tue Nov 21 11:00:06 2017 -0800 +++ b/tests/bakes/test_unicode.yaml Tue Nov 21 22:07:12 2017 -0800 @@ -1,21 +1,17 @@ --- in: - posts/2010-01-01_déjà-des-accents.md: 'POST URL: {{page.url}}' - pages/présentation.md: 'PAGE URL: {{page.url}}' - pages/_index.md: '' -out: - '2010': - '01': - '01': - déjà-des-accents.html: 'POST URL: /2010/01/01/d%C3%A9j%C3%A0-des-accents.html' + posts/2010-01-01_déjà-des-accents.html: 'POST URL: {{page.url}}' + pages/présentation.html: 'PAGE URL: {{page.url}}' + pages/_index.html: '' +outfiles: + 2010/01/01/déjà-des-accents.html: 'POST URL: /2010/01/01/d%C3%A9j%C3%A0-des-accents.html' présentation.html: 'PAGE URL: /pr%C3%A9sentation.html' index.html: '' --- in: - pages/special/Это тэг.md: 'PAGE URL: {{page.url}}' - pages/_index.md: '' -out: - special: - Это тэг.html: 'PAGE URL: /special/%D0%AD%D1%82%D0%BE%20%D1%82%D1%8D%D0%B3.html' + pages/special/Это тэг.html: 'PAGE URL: {{page.url}}' + pages/_index.html: '' +outfiles: + special/Это тэг.html: 'PAGE URL: /special/%D0%AD%D1%82%D0%BE%20%D1%82%D1%8D%D0%B3.html' index.html: '' diff -r 7a1903ede496 -r 2e5c5d33d62c tests/bakes/test_unicode_tags.yaml --- a/tests/bakes/test_unicode_tags.yaml Tue Nov 21 11:00:06 2017 -0800 +++ b/tests/bakes/test_unicode_tags.yaml Tue Nov 21 22:07:12 2017 -0800 @@ -1,21 +1,21 @@ --- in: - posts/2015-03-01_post01.md: | + posts/2015-03-01_post01.html: | --- title: Post 01 tags: [étrange] --- - posts/2015-03-02_post02.md: | + posts/2015-03-02_post02.html: | --- title: Post 02 tags: [étrange, sévère] --- - pages/_tag.md: | + templates/_tag.html: | Pages in {{pctagurl(tag)}} with {{tag}} {% for p in pagination.posts -%} {{p.title}} {% endfor %} - pages/_index.md: '' + pages/_index.html: '' outfiles: tag/étrange.html: | Pages in /tag/%C3%A9trange.html with étrange @@ -26,17 +26,17 @@ Post 02 --- in: - posts/2015-03-01_post01.md: | + posts/2015-03-01_post01.html: | --- title: Post 01 tags: [Это тэг] --- - pages/_tag.md: | + templates/_tag.html: | Pages in {{pctagurl(tag)}} {% for p in pagination.posts -%} {{p.title}} {% endfor %} - pages/_index.md: '' + pages/_index.html: '' outfiles: tag/Это тэг.html: | Pages in /tag/%D0%AD%D1%82%D0%BE%20%D1%82%D1%8D%D0%B3.html @@ -46,17 +46,17 @@ site: slugify_mode: lowercase,encode in: - posts/2015-03-01_post01.md: | + posts/2015-03-01_post01.html: | --- title: Post 01 tags: [Это тэг] --- - pages/_tag.md: | + templates/_tag.html: | Pages in {{pctagurl(tag)}} {% for p in pagination.posts -%} {{p.title}} {% endfor %} - pages/_index.md: '' + pages/_index.html: '' outfiles: tag/это тэг.html: | Pages in /tag/%D1%8D%D1%82%D0%BE%20%D1%82%D1%8D%D0%B3.html @@ -66,22 +66,22 @@ site: slugify_mode: lowercase,transliterate in: - posts/2015-03-01_post01.md: | + posts/2015-03-01_post01.html: | --- title: Post 01 tags: [étrange] --- - posts/2015-03-02_post02.md: | + posts/2015-03-02_post02.html: | --- title: Post 02 tags: [étrange, sévère] --- - pages/_tag.md: | + templates/_tag.html: | Pages in {{pctagurl(tag)}} {% for p in pagination.posts -%} {{p.title}} {% endfor %} - pages/_index.md: '' + pages/_index.html: '' outfiles: tag/etrange.html: | Pages in /tag/etrange.html @@ -95,17 +95,17 @@ site: slugify_mode: lowercase,transliterate,space_to_dash in: - posts/2015-03-01_post01.md: | + posts/2015-03-01_post01.html: | --- title: Post 01 tags: [Это тэг] --- - pages/_tag.md: | + templates/_tag.html: | Pages in {{pctagurl(tag)}} {% for p in pagination.posts -%} {{p.title}} {% endfor %} - pages/_index.md: '' + pages/_index.html: '' outfiles: tag/eto-teg.html: | Pages in /tag/eto-teg.html diff -r 7a1903ede496 -r 2e5c5d33d62c tests/bakes/test_variant.yaml --- a/tests/bakes/test_variant.yaml Tue Nov 21 11:00:06 2017 -0800 +++ b/tests/bakes/test_variant.yaml Tue Nov 21 22:07:12 2017 -0800 @@ -1,9 +1,9 @@ --- config: what: not good -config_variant: test +config_variants: [test] in: - pages/_index.md: 'This is {{what}}.' + pages/_index.html: 'This is {{what}}.' configs/test.yml: 'what: awesome' out: index.html: 'This is awesome.' @@ -13,7 +13,7 @@ config_values: what: awesome in: - pages/_index.md: 'This is {{what}}.' + pages/_index.html: 'This is {{what}}.' out: index.html: 'This is awesome.' diff -r 7a1903ede496 -r 2e5c5d33d62c tests/basefs.py --- a/tests/basefs.py Tue Nov 21 11:00:06 2017 -0800 +++ b/tests/basefs.py Tue Nov 21 22:07:12 2017 -0800 @@ -1,9 +1,14 @@ import os.path import yaml from piecrust.app import PieCrust +from piecrust.sources.base import ContentItem class TestFileSystemBase(object): + _use_chef_debug = False + _pytest_log_handler = None + _leave_mockfs = False + def __init__(self): pass @@ -47,13 +52,13 @@ if config is None: config = {} return self.withFile( - 'kitchen/config.yml', - yaml.dump(config)) + 'kitchen/config.yml', + yaml.dump(config)) def withThemeConfig(self, config): return self.withFile( - 'kitchen/theme_config.yml', - yaml.dump(config)) + 'kitchen/theme_config.yml', + yaml.dump(config)) def withPage(self, url, config=None, contents=None): config = config or {} @@ -74,7 +79,7 @@ url_base, ext = os.path.splitext(page_url) dirname = url_base + '-assets' return self.withAsset( - '%s/%s' % (dirname, name), contents) + '%s/%s' % (dirname, name), contents) def withPages(self, num, url_factory, config_factory=None, contents_factory=None): @@ -95,3 +100,41 @@ self.withPage(url, config, contents) return self + def runChef(self, *args): + root_dir = self.path('/kitchen') + chef_args = ['--root', root_dir] + if self._use_chef_debug: + chef_args += ['--debug'] + chef_args += list(args) + + import logging + from piecrust.main import ( + _make_chef_state, _recover_pre_chef_state, + _pre_parse_chef_args, _run_chef) + + # If py.test added a log handler, remove it because Chef will + # add its own logger. + if self._pytest_log_handler: + logging.getLogger().removeHandler( + self._pytest_log_handler) + + state = _make_chef_state() + pre_args = _pre_parse_chef_args(chef_args, state=state) + exit_code = _run_chef(pre_args, chef_args) + _recover_pre_chef_state(state) + + if self._pytest_log_handler: + logging.getLogger().addHandler( + self._pytest_log_handler) + + assert exit_code == 0 + + def getSimplePage(self, rel_path): + app = self.getApp() + source = app.getSource('pages') + content_item = ContentItem( + os.path.join(source.fs_endpoint_path, rel_path), + {'route_params': { + 'slug': os.path.splitext(rel_path)[0]}}) + return app.getPage(source, content_item) + diff -r 7a1903ede496 -r 2e5c5d33d62c tests/conftest.py --- a/tests/conftest.py Tue Nov 21 11:00:06 2017 -0800 +++ b/tests/conftest.py Tue Nov 21 22:07:12 2017 -0800 @@ -8,7 +8,7 @@ import yaml import colorama from werkzeug.exceptions import HTTPException -from piecrust.app import apply_variant_and_values +from piecrust.app import PieCrustFactory, apply_variants_and_values from piecrust.configuration import merge_dicts from .mockutil import mock_fs, mock_fs_scope @@ -19,20 +19,42 @@ def pytest_addoption(parser): parser.addoption( - '--log-debug', - action='store_true', - help="Sets the PieCrust logger to output debug info to stdout.") + '--log-debug', + action='store_true', + help="Sets the PieCrust logger to output debug info to stdout.") + parser.addoption( + '--log-file', + help="Sets the PieCrust logger to write to a file.") parser.addoption( - '--mock-debug', - action='store_true', - help="Prints contents of the mock file-system.") + '--mock-debug', + action='store_true', + help="Prints contents of the mock file-system.") + parser.addoption( + '--leave-mockfs', + action='store_true', + help="Leave the contents of the mock file-system on disk.") def pytest_configure(config): if config.getoption('--log-debug'): + root_logger = logging.getLogger() hdl = logging.StreamHandler(stream=sys.stdout) - logging.getLogger('piecrust').addHandler(hdl) - logging.getLogger('piecrust').setLevel(logging.DEBUG) + root_logger.addHandler(hdl) + root_logger.setLevel(logging.DEBUG) + + from .basefs import TestFileSystemBase + TestFileSystemBase._use_chef_debug = True + TestFileSystemBase._pytest_log_handler = hdl + + log_file = config.getoption('--log-file') + if log_file: + hdl = logging.StreamHandler( + stream=open(log_file, 'w', encoding='utf8')) + logging.getLogger().addHandler(hdl) + + if config.getoption('--leave-mockfs'): + from .basefs import TestFileSystemBase + TestFileSystemBase._leave_mockfs = True def pytest_collect_file(parent, path): @@ -40,8 +62,6 @@ category = os.path.basename(path.dirname) if category == 'bakes': return BakeTestFile(path, parent) - elif category == 'procs': - return PipelineTestFile(path, parent) elif category == 'cli': return ChefTestFile(path, parent) elif category == 'servings': @@ -55,8 +75,8 @@ import traceback ex = excinfo.value return '\n'.join( - traceback.format_exception( - type(ex), ex, ex.__traceback__)) + traceback.format_exception( + type(ex), ex, ex.__traceback__)) return '' @@ -89,11 +109,11 @@ # Suppress any formatting or layout so we can compare # much simpler strings. config = { - 'site': { - 'default_format': 'none', - 'default_page_layout': 'none', - 'default_post_layout': 'none'} - } + 'site': { + 'default_format': 'none', + 'default_page_layout': 'none', + 'default_post_layout': 'none'} + } # Website or theme config. test_theme_config = self.spec.get('theme_config') @@ -251,21 +271,24 @@ out_dir = fs.path('kitchen/_counter') app = fs.getApp(theme_site=self.is_theme_site) - variant = self.spec.get('config_variant') values = self.spec.get('config_values') if values is not None: values = list(values.items()) - apply_variant_and_values(app, variant, values) + variants = self.spec.get('config_variants') + apply_variants_and_values(app, variants, values) - baker = Baker(app, out_dir, - applied_config_variant=variant, - applied_config_values=values) - record = baker.bake() + appfactory = PieCrustFactory(app.root_dir, + theme_site=self.is_theme_site, + config_variants=variants, + config_values=values) + baker = Baker(appfactory, app, out_dir) + records = baker.bake() - if not record.success: + if not records.success: errors = [] - for e in record.entries: - errors += e.getAllErrors() + for r in records.records: + for e in r.getEntries(): + errors += e.getAllErrors() raise BakeError(errors) check_expected_outputs(self.spec, fs, ExpectedBakeOutputError) @@ -300,69 +323,7 @@ __item_class__ = BakeTestItem -class PipelineTestItem(YamlTestItemBase): - def runtest(self): - fs = self._prepareMockFs() - - from piecrust.processing.pipeline import ProcessorPipeline - with mock_fs_scope(fs, keep=self.mock_debug): - out_dir = fs.path('kitchen/_counter') - app = fs.getApp(theme_site=self.is_theme_site) - pipeline = ProcessorPipeline(app, out_dir) - - proc_names = self.spec.get('processors') - if proc_names: - pipeline.enabled_processors = proc_names - - record = pipeline.run() - - if not record.success: - errors = [] - for e in record.entries: - errors += e.errors - raise PipelineError(errors) - - check_expected_outputs(self.spec, fs, ExpectedPipelineOutputError) - - def reportinfo(self): - return self.fspath, 0, "pipeline: %s" % self.name - - def repr_failure(self, excinfo): - if isinstance(excinfo.value, ExpectedPipelineOutputError): - return ('\n'.join( - ['Unexpected pipeline output. Left is expected output, ' - 'right is actual output'] + - excinfo.value.args[0])) - elif isinstance(excinfo.value, PipelineError): - res = ('\n'.join( - ['Errors occured during processing:'] + - excinfo.value.args[0])) - res += repr_nested_failure(excinfo) - return res - return super(PipelineTestItem, self).repr_failure(excinfo) - - -class PipelineError(Exception): - pass - - -class ExpectedPipelineOutputError(Exception): - pass - - -class PipelineTestFile(YamlTestFileBase): - __item_class__ = PipelineTestItem - - class ServeTestItem(YamlTestItemBase): - class _TestApp(object): - def __init__(self, server): - self.server = server - - def __call__(self, environ, start_response): - response = self.server._try_run_request(environ) - return response(environ, start_response) - def runtest(self): fs = self._prepareMockFs() @@ -374,28 +335,19 @@ expected_headers = self.spec.get('headers') expected_output = self.spec.get('out') expected_contains = self.spec.get('out_contains') - is_admin_test = self.spec.get('admin') is True from werkzeug.test import Client from werkzeug.wrappers import BaseResponse + from piecrust.app import PieCrustFactory + from piecrust.serving.server import PieCrustServer + with mock_fs_scope(fs, keep=self.mock_debug): - if is_admin_test: - from piecrust.admin.web import create_foodtruck_app - s = { - 'FOODTRUCK_CMDLINE_MODE': True, - 'FOODTRUCK_ROOT': fs.path('/kitchen') - } - test_app = create_foodtruck_app(s) - else: - from piecrust.app import PieCrustFactory - from piecrust.serving.server import Server - appfactory = PieCrustFactory( - fs.path('/kitchen'), - theme_site=self.is_theme_site) - server = Server(appfactory) - test_app = self._TestApp(server) + appfactory = PieCrustFactory( + fs.path('/kitchen'), + theme_site=self.is_theme_site) + server = PieCrustServer(appfactory) - client = Client(test_app, BaseResponse) + client = Client(server, BaseResponse) resp = client.get(url) assert expected_status == resp.status_code @@ -417,15 +369,15 @@ from piecrust.serving.server import MultipleNotFound if isinstance(excinfo.value, MultipleNotFound): res = '\n'.join( - ["HTTP error 404 returned:", - str(excinfo.value)] + - [str(e) for e in excinfo.value._nfes]) + ["HTTP error 404 returned:", + str(excinfo.value)] + + [str(e) for e in excinfo.value._nfes]) res += repr_nested_failure(excinfo) return res elif isinstance(excinfo.value, HTTPException): res = '\n'.join( - ["HTTP error %s returned:" % excinfo.value.code, - excinfo.value.description]) + ["HTTP error %s returned:" % excinfo.value.code, + excinfo.value.description]) res += repr_nested_failure(excinfo) return res return super(ServeTestItem, self).repr_failure(excinfo) @@ -451,8 +403,8 @@ def createChildContext(self, name): ctx = CompareContext( - path='%s/%s' % (self.path, name), - t=self.time) + path='%s/%s' % (self.path, name), + t=self.time) return ctx @@ -547,9 +499,15 @@ right_time_str = right[i:i + len(test_time_iso8601)] right_time = time.strptime(right_time_str, '%Y-%m-%dT%H:%M:%SZ') left_time = time.gmtime(ctx.time) + # Need to patch the daylist-savings-time flag because it can + # mess up the computation of the time difference. + right_time = (right_time[0], right_time[1], right_time[2], + right_time[3], right_time[4], right_time[5], + right_time[6], right_time[7], + left_time.tm_isdst) difference = time.mktime(left_time) - time.mktime(right_time) print("Got time difference: %d" % difference) - if abs(difference) <= 2: + if abs(difference) <= 1: print("(good enough, moving to end of timestamp)") skip_for = len(test_time_iso8601) - 1 diff -r 7a1903ede496 -r 2e5c5d33d62c tests/mockutil.py --- a/tests/mockutil.py Tue Nov 21 11:00:06 2017 -0800 +++ b/tests/mockutil.py Tue Nov 21 22:07:12 2017 -0800 @@ -1,8 +1,6 @@ -import os.path import mock -from piecrust.app import PieCrust, PieCrustConfiguration -from piecrust.page import Page -from piecrust.rendering import QualifiedPage, PageRenderingContext, render_page +from piecrust.app import PieCrust +from piecrust.appconfig import PieCrustConfiguration def get_mock_app(config=None): @@ -11,20 +9,21 @@ return app -def get_simple_page(app, rel_path): - source = app.getSource('pages') - metadata = {'slug': os.path.splitext(rel_path)[0]} - return Page(source, metadata, rel_path) +def get_simple_content_item(app, slug): + src = app.getSource('pages') + assert src is not None + + item = src.findContentFromRoute({'slug': slug}) + assert item is not None + return item -def render_simple_page(page, route, route_metadata): - qp = QualifiedPage(page, route, route_metadata) - ctx = PageRenderingContext(qp) - rp = render_page(ctx) - return rp.content +def get_simple_page(app, slug): + src = app.getSource('pages') + item = get_simple_content_item(app, slug) + return app.getPage(src, item) -from .tmpfs import ( - TempDirFileSystem as mock_fs, - TempDirScope as mock_fs_scope) - +from .tmpfs import ( # NOQA + TempDirFileSystem as mock_fs, + TempDirScope as mock_fs_scope) diff -r 7a1903ede496 -r 2e5c5d33d62c tests/procs/test_dotfiles.yaml --- a/tests/procs/test_dotfiles.yaml Tue Nov 21 11:00:06 2017 -0800 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,8 +0,0 @@ ---- -in: - assets/something.txt: Foo bar - assets/.htaccess: "# Apache config" -outfiles: - something.txt: Foo bar - .htaccess: "# Apache config" - diff -r 7a1903ede496 -r 2e5c5d33d62c tests/procs/test_sitemap.yaml --- a/tests/procs/test_sitemap.yaml Tue Nov 21 11:00:06 2017 -0800 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,40 +0,0 @@ ---- -in: - assets/sitemap.sitemap: | - autogen: [pages, theme_pages] - pages/foo.md: This is a foo -outfiles: - sitemap.xml: | - - - - /foo.html - %test_time_iso8601% - - - / - %test_time_iso8601% - - ---- -in: - assets/sitemap.sitemap: | - autogen: [pages] - pages/foo.md: | - --- - sitemap: - changefreq: monthly - priority: 0.8 - --- - This is a foo -outfiles: - sitemap.xml: | - - - - /foo.html - %test_time_iso8601% - monthly - 0.8 - - diff -r 7a1903ede496 -r 2e5c5d33d62c tests/rdrutil.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/tests/rdrutil.py Tue Nov 21 22:07:12 2017 -0800 @@ -0,0 +1,8 @@ +from piecrust.rendering import RenderingContext, render_page + + +def render_simple_page(page): + ctx = RenderingContext(page) + rp = render_page(ctx) + return rp.content + diff -r 7a1903ede496 -r 2e5c5d33d62c tests/servings/test_admin.yaml --- a/tests/servings/test_admin.yaml Tue Nov 21 11:00:06 2017 -0800 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,9 +0,0 @@ ---- -admin: true -url: / -in: - pages/one.html: '' - posts/2016-01-01_post1.html: '' -out_contains: | - 1 pages - diff -r 7a1903ede496 -r 2e5c5d33d62c tests/servings/test_archives.yaml --- a/tests/servings/test_archives.yaml Tue Nov 21 11:00:06 2017 -0800 +++ b/tests/servings/test_archives.yaml Tue Nov 21 22:07:12 2017 -0800 @@ -1,7 +1,7 @@ --- url: /archives/2016.html in: - pages/_year.html: | + templates/_year.html: | Posts in {{year}} {% for post in pagination.posts -%} {{post.url}} diff -r 7a1903ede496 -r 2e5c5d33d62c tests/servings/test_debug_info.yaml --- a/tests/servings/test_debug_info.yaml Tue Nov 21 11:00:06 2017 -0800 +++ b/tests/servings/test_debug_info.yaml Tue Nov 21 22:07:12 2017 -0800 @@ -1,14 +1,14 @@ --- url: /foo in: - pages/foo.md: | + pages/foo.html: | BLAH {{piecrust.debug_info}} out: BLAH --- url: /foo?!debug in: - pages/foo.md: | + pages/foo.html: | BLAH {{piecrust.debug_info}} out_contains: | @@ -17,12 +17,12 @@ --- url: /foo in: - pages/foo.md: BLAH {{pcurl('bar')}} + pages/foo.html: BLAH {{pcurl('bar')}} out: BLAH /bar.html --- url: /foo?!debug in: - pages/foo.md: BLAH {{pcurl('bar')}} + pages/foo.html: BLAH {{pcurl('bar')}} out: BLAH /bar.html?!debug --- url: /foo @@ -30,7 +30,7 @@ site: pretty_urls: true in: - pages/foo.md: BLAH {{pcurl('bar')}} + pages/foo.html: BLAH {{pcurl('bar')}} out: BLAH /bar --- url: /foo?!debug @@ -38,6 +38,6 @@ site: pretty_urls: true in: - pages/foo.md: BLAH {{pcurl('bar')}} + pages/foo.html: BLAH {{pcurl('bar')}} out: BLAH /bar?!debug diff -r 7a1903ede496 -r 2e5c5d33d62c tests/servings/test_theme.yaml --- a/tests/servings/test_theme.yaml Tue Nov 21 11:00:06 2017 -0800 +++ b/tests/servings/test_theme.yaml Tue Nov 21 22:07:12 2017 -0800 @@ -5,7 +5,7 @@ title: "Some Test" in: theme/theme_config.yml: "name: testtheme" - theme/pages/_index.md: "This is {{site.title}} by {{name}}" + theme/pages/_index.html: "This is {{site.title}} by {{name}}" theme/templates/default.html: "THEME: {{content}}" out: "THEME: This is Some Test by testtheme" --- @@ -15,7 +15,7 @@ title: "Some Test" foo: bar in: - pages/foo.md: "This is: {{foo}} by {{name}}" + pages/foo.html: "This is: {{foo}} by {{name}}" theme/theme_config.yml: "name: testtheme" out: "This is: bar by testtheme" diff -r 7a1903ede496 -r 2e5c5d33d62c tests/servings/test_theme_site.yaml --- a/tests/servings/test_theme_site.yaml Tue Nov 21 11:00:06 2017 -0800 +++ b/tests/servings/test_theme_site.yaml Tue Nov 21 22:07:12 2017 -0800 @@ -4,7 +4,7 @@ site: title: "Some Test Theme" in: - pages/foo.md: "This is: {{site.title}}" + pages/foo.html: "This is: {{site.title}}" out: "This is: Some Test Theme" --- url: /foo.html @@ -12,7 +12,7 @@ site: title: "Some Test Theme" in: - pages/foo.md: "This is: {{foo}}" + pages/foo.html: "This is: {{foo}}" configs/theme_preview.yml: "foo: bar" out: "This is: bar" diff -r 7a1903ede496 -r 2e5c5d33d62c tests/servings/test_unicode.yaml --- a/tests/servings/test_unicode.yaml Tue Nov 21 11:00:06 2017 -0800 +++ b/tests/servings/test_unicode.yaml Tue Nov 21 22:07:12 2017 -0800 @@ -1,16 +1,16 @@ --- url: /pr%C3%A9sentation.html in: - pages/présentation.md: 'PAGE URL: {{page.url}}' + pages/présentation.html: 'PAGE URL: {{page.url}}' out: 'PAGE URL: /pr%C3%A9sentation.html' --- url: /2010/01/01/d%C3%A9j%C3%A0-des-accents.html in: - posts/2010-01-01_déjà-des-accents.md: 'POST URL: {{page.url}}' + posts/2010-01-01_déjà-des-accents.html: 'POST URL: {{page.url}}' out: 'POST URL: /2010/01/01/d%C3%A9j%C3%A0-des-accents.html' --- url: /special/%D0%AD%D1%82%D0%BE%20%D1%82%D1%8D%D0%B3.html in: - pages/special/Это тэг.md: 'PAGE URL: {{page.url}}' + pages/special/Это тэг.html: 'PAGE URL: {{page.url}}' out: 'PAGE URL: /special/%D0%AD%D1%82%D0%BE%20%D1%82%D1%8D%D0%B3.html' diff -r 7a1903ede496 -r 2e5c5d33d62c tests/servings/test_unicode_tags.yaml --- a/tests/servings/test_unicode_tags.yaml Tue Nov 21 11:00:06 2017 -0800 +++ b/tests/servings/test_unicode_tags.yaml Tue Nov 21 22:07:12 2017 -0800 @@ -1,22 +1,22 @@ --- url: /tag/%C3%A9trange.html in: - posts/2015-03-01_post01.md: | + posts/2015-03-01_post01.html: | --- title: Post 01 tags: [étrange] --- - posts/2015-03-02_post02.md: | + posts/2015-03-02_post02.html: | --- title: Post 02 tags: [étrange, sévère] --- - pages/_tag.md: | + templates/_tag.html: | Pages in {{pctagurl(tag)}} {% for p in pagination.posts -%} {{p.title}} {% endfor %} - pages/_index.md: '' + pages/_index.html: '' out: | Pages in /tag/%C3%A9trange.html Post 02 @@ -24,39 +24,39 @@ --- url: /tag/s%C3%A9v%C3%A8re.html in: - posts/2015-03-01_post01.md: | + posts/2015-03-01_post01.html: | --- title: Post 01 tags: [étrange] --- - posts/2015-03-02_post02.md: | + posts/2015-03-02_post02.html: | --- title: Post 02 tags: [étrange, sévère] --- - pages/_tag.md: | + templates/_tag.html: | Pages in {{pctagurl(tag)}} {% for p in pagination.posts -%} {{p.title}} {% endfor %} - pages/_index.md: '' + pages/_index.html: '' out: | Pages in /tag/s%C3%A9v%C3%A8re.html Post 02 --- url: /tag/%D0%AD%D1%82%D0%BE%20%D1%82%D1%8D%D0%B3.html in: - posts/2015-03-01_post01.md: | + posts/2015-03-01_post01.html: | --- title: Post 01 tags: [Это тэг] --- - pages/_tag.md: | + templates/_tag.html: | Pages in {{pctagurl(tag)}} {% for p in pagination.posts -%} {{p.title}} {% endfor %} - pages/_index.md: '' + pages/_index.html: '' out: | Pages in /tag/%D0%AD%D1%82%D0%BE%20%D1%82%D1%8D%D0%B3.html Post 01 @@ -66,22 +66,22 @@ slugify_mode: lowercase,transliterate url: /tag/etrange.html in: - posts/2015-03-01_post01.md: | + posts/2015-03-01_post01.html: | --- title: Post 01 tags: [étrange] --- - posts/2015-03-02_post02.md: | + posts/2015-03-02_post02.html: | --- title: Post 02 tags: [étrange, sévère] --- - pages/_tag.md: | + templates/_tag.html: | Pages in {{pctagurl(tag)}} {% for p in pagination.posts -%} {{p.title}} {% endfor %} - pages/_index.md: '' + pages/_index.html: '' out: | Pages in /tag/etrange.html Post 02 @@ -92,22 +92,22 @@ slugify_mode: lowercase,transliterate url: /tag/severe.html in: - posts/2015-03-01_post01.md: | + posts/2015-03-01_post01.html: | --- title: Post 01 tags: [étrange] --- - posts/2015-03-02_post02.md: | + posts/2015-03-02_post02.html: | --- title: Post 02 tags: [étrange, sévère] --- - pages/_tag.md: | + templates/_tag.html: | Pages in {{pctagurl(tag)}} {% for p in pagination.posts -%} {{p.title}} {% endfor %} - pages/_index.md: '' + pages/_index.html: '' out: | Pages in /tag/severe.html Post 02 @@ -117,17 +117,17 @@ slugify_mode: lowercase,transliterate,space_to_dash url: /tag/eto-teg.html in: - posts/2015-03-01_post01.md: | + posts/2015-03-01_post01.html: | --- title: Post 01 tags: [Это тэг] --- - pages/_tag.md: | + templates/_tag.html: | Pages in {{pctagurl(tag)}} {% for p in pagination.posts -%} {{p.title}} {% endfor %} - pages/_index.md: '' + pages/_index.html: '' out: | Pages in /tag/eto-teg.html Post 01 diff -r 7a1903ede496 -r 2e5c5d33d62c tests/test_appconfig.py --- a/tests/test_appconfig.py Tue Nov 21 11:00:06 2017 -0800 +++ b/tests/test_appconfig.py Tue Nov 21 22:07:12 2017 -0800 @@ -7,7 +7,9 @@ values = {} config = PieCrustConfiguration(values=values) assert config.get('site/root') == '/' - assert len(config.get('site/sources')) == 3 # pages, posts, theme_pages + assert len(config.get('site/sources').keys()) == \ + len(['theme_assets', 'assets', 'theme_pages', 'pages', 'posts', + 'tags', 'categories', 'archives']) def test_config_site_override_title(): @@ -29,11 +31,11 @@ assert app.config.get('site/default_post_layout') == 'bar' assert app.config.get('site/sources/pages/default_layout') == 'foo' assert app.config.get('site/sources/pages/items_per_page') == 5 + assert app.config.get('site/sources/posts/default_layout') == 'bar' + assert app.config.get('site/sources/posts/items_per_page') == 2 assert app.config.get( 'site/sources/theme_pages/default_layout') == 'default' assert app.config.get('site/sources/theme_pages/items_per_page') == 5 - assert app.config.get('site/sources/posts/default_layout') == 'bar' - assert app.config.get('site/sources/posts/items_per_page') == 2 def test_config_site_add_source(): @@ -53,13 +55,16 @@ 'notes', 'posts', 'posts_archives', 'posts_tags', 'posts_categories', 'pages', 'theme_pages']) assert set(app.config.get('site/sources').keys()) == set([ - 'theme_pages', 'pages', 'posts', 'notes']) + 'theme_pages', 'theme_assets', 'pages', 'posts', 'assets', + 'posts_tags', 'posts_categories', 'posts_archives', + 'notes']) def test_config_site_add_source_in_both_site_and_theme(): theme_config = {'site': { 'sources': {'theme_notes': {}}, - 'routes': [{'url': '/theme_notes/%path:slug%', 'source': 'theme_notes'}] + 'routes': [{'url': '/theme_notes/%path:slug%', + 'source': 'theme_notes'}] }} config = {'site': { 'sources': {'notes': {}}, @@ -81,7 +86,9 @@ 'posts_categories', 'pages', 'theme_notes', 'theme_pages']) assert set(app.config.get('site/sources').keys()) == set([ - 'theme_pages', 'theme_notes', 'pages', 'posts', 'notes']) + 'theme_pages', 'theme_assets', 'theme_notes', + 'pages', 'posts', 'assets', 'posts_tags', 'posts_categories', + 'posts_archives', 'notes']) def test_multiple_blogs(): @@ -99,7 +106,10 @@ 'bbb', 'bbb_archives', 'bbb_tags', 'bbb_categories', 'pages', 'theme_pages']) assert set(app.config.get('site/sources').keys()) == set([ - 'aaa', 'bbb', 'pages', 'theme_pages']) + 'aaa', 'aaa_tags', 'aaa_categories', 'aaa_archives', + 'bbb', 'bbb_tags', 'bbb_categories', 'bbb_archives', + 'pages', 'assets', + 'theme_pages', 'theme_assets']) def test_custom_list_setting(): diff -r 7a1903ede496 -r 2e5c5d33d62c tests/test_baking_baker.py --- a/tests/test_baking_baker.py Tue Nov 21 11:00:06 2017 -0800 +++ b/tests/test_baking_baker.py Tue Nov 21 22:07:12 2017 -0800 @@ -1,108 +1,66 @@ import time -import os.path -import urllib.parse -import pytest -from piecrust.baking.baker import Baker -from piecrust.baking.single import PageBaker -from piecrust.baking.records import BakeRecord from .mockutil import get_mock_app, mock_fs, mock_fs_scope -@pytest.mark.parametrize('uri, pretty, expected', [ - # Pretty URLs - ('', True, 'index.html'), - ('2', True, '2/index.html'), - ('foo', True, 'foo/index.html'), - ('foo/2', True, 'foo/2/index.html'), - ('foo/bar', True, 'foo/bar/index.html'), - ('foo/bar/2', True, 'foo/bar/2/index.html'), - ('foo.ext', True, 'foo.ext/index.html'), - ('foo.ext/2', True, 'foo.ext/2/index.html'), - ('foo/bar.ext', True, 'foo/bar.ext/index.html'), - ('foo/bar.ext/2', True, 'foo/bar.ext/2/index.html'), - ('foo.bar.ext', True, 'foo.bar.ext/index.html'), - ('foo.bar.ext/2', True, 'foo.bar.ext/2/index.html'), - # Ugly URLs - ('', False, 'index.html'), - ('2.html', False, '2.html'), - ('foo.html', False, 'foo.html'), - ('foo/2.html', False, 'foo/2.html'), - ('foo/bar.html', False, 'foo/bar.html'), - ('foo/bar/2.html', False, 'foo/bar/2.html'), - ('foo.ext', False, 'foo.ext'), - ('foo/2.ext', False, 'foo/2.ext'), - ('foo/bar.ext', False, 'foo/bar.ext'), - ('foo/bar/2.ext', False, 'foo/bar/2.ext'), - ('foo.bar.ext', False, 'foo.bar.ext'), - ('foo.bar/2.ext', False, 'foo.bar/2.ext') - ]) -def test_get_output_path(uri, pretty, expected): - app = get_mock_app() - if pretty: - app.config.set('site/pretty_urls', True) - assert app.config.get('site/pretty_urls') == pretty +def test_bake_and_add_post(): + fs = (mock_fs() + .withConfig() + .withPage('pages/_index.html', {'layout': 'none', 'format': 'none'}, + "{% for p in pagination.posts -%}\n" + "{{p.title}}\n" + "{% endfor %}") + .withPage('posts/2017-01-01_first.html', {'title': "First"}, + "something")) + with mock_fs_scope(fs): + fs.runChef('bake') + structure = fs.getStructure('kitchen/_counter') + assert structure['index.html'] == 'First\n' - for site_root in ['/', '/whatever/', '/~johndoe/']: - app.config.set('site/root', urllib.parse.quote(site_root)) - baker = PageBaker(app, '/destination') - try: - path = baker.getOutputPath(urllib.parse.quote(site_root) + uri, - pretty) - expected = os.path.normpath( - os.path.join('/destination', expected)) - assert expected == path - finally: - baker.shutdown() + time.sleep(1) + fs.withPage('posts/2017-01-02_second.html', {'title': "Second"}, + "something else") + fs.runChef('bake') + structure = fs.getStructure('kitchen/_counter') + assert structure['index.html'] == 'Second\nFirst\n' -def test_removed(): +def test_bake_four_times(): fs = (mock_fs() - .withConfig() - .withPage('pages/foo.md', {'layout': 'none', 'format': 'none'}, 'a foo page') - .withPage('pages/_index.md', {'layout': 'none', 'format': 'none'}, "something")) + .withConfig({'site': { + 'default_format': 'none', + 'default_page_layout': 'none', + 'default_post_layout': 'none', + }}) + .withPage('pages/_index.html', {'layout': 'none', 'format': 'none'}, + "{% for p in pagination.posts -%}\n" + "{{p.title}}\n" + "{% endfor %}") + .withPage('posts/2017-01-01_first.html', {'title': "First"}, + "something 1") + .withPage('posts/2017-01-02_second.html', {'title': "Second"}, + "something 2")) with mock_fs_scope(fs): - out_dir = fs.path('kitchen/_counter') - app = fs.getApp() - app.config.set('baker/workers', 1) - baker = Baker(app, out_dir) - baker.bake() + fs.runChef('bake') structure = fs.getStructure('kitchen/_counter') - assert structure == { - 'foo.html': 'a foo page', - 'index.html': 'something'} - - os.remove(fs.path('kitchen/pages/foo.md')) - app = fs.getApp() - baker = Baker(app, out_dir) - baker.bake() - structure = fs.getStructure('kitchen/_counter') - assert structure == { - 'index.html': 'something'} - + assert structure['index.html'] == 'Second\nFirst\n' + assert structure['2017']['01']['01']['first.html'] == 'something 1' + assert structure['2017']['01']['02']['second.html'] == 'something 2' -def test_record_version_change(): - fs = (mock_fs() - .withConfig() - .withPage('pages/foo.md', {'layout': 'none', 'format': 'none'}, 'a foo page')) - with mock_fs_scope(fs): - out_dir = fs.path('kitchen/_counter') - app = fs.getApp() - baker = Baker(app, out_dir) - baker.bake() - mtime = os.path.getmtime(fs.path('kitchen/_counter/foo.html')) - time.sleep(1) + fs.runChef('bake') + structure = fs.getStructure('kitchen/_counter') + assert structure['index.html'] == 'Second\nFirst\n' + assert structure['2017']['01']['01']['first.html'] == 'something 1' + assert structure['2017']['01']['02']['second.html'] == 'something 2' - app = fs.getApp() - baker = Baker(app, out_dir) - baker.bake() - assert mtime == os.path.getmtime(fs.path('kitchen/_counter/foo.html')) + fs.runChef('bake') + structure = fs.getStructure('kitchen/_counter') + assert structure['index.html'] == 'Second\nFirst\n' + assert structure['2017']['01']['01']['first.html'] == 'something 1' + assert structure['2017']['01']['02']['second.html'] == 'something 2' - BakeRecord.RECORD_VERSION += 1 - try: - app = fs.getApp() - baker = Baker(app, out_dir) - baker.bake() - assert mtime < os.path.getmtime(fs.path('kitchen/_counter/foo.html')) - finally: - BakeRecord.RECORD_VERSION -= 1 + fs.runChef('bake') + structure = fs.getStructure('kitchen/_counter') + assert structure['index.html'] == 'Second\nFirst\n' + assert structure['2017']['01']['01']['first.html'] == 'something 1' + assert structure['2017']['01']['02']['second.html'] == 'something 2' diff -r 7a1903ede496 -r 2e5c5d33d62c tests/test_configuration.py --- a/tests/test_configuration.py Tue Nov 21 11:00:06 2017 -0800 +++ b/tests/test_configuration.py Tue Nov 21 22:07:12 2017 -0800 @@ -1,16 +1,16 @@ import copy -import datetime import yaml import pytest from collections import OrderedDict -from piecrust.configuration import (Configuration, ConfigurationLoader, - merge_dicts) +from piecrust.configuration import ( + Configuration, ConfigurationLoader, merge_dicts, + MERGE_APPEND_LISTS, MERGE_PREPEND_LISTS, MERGE_OVERWRITE_VALUES) @pytest.mark.parametrize('values, expected', [ - (None, {}), - ({'foo': 'bar'}, {'foo': 'bar'}) - ]) + (None, {}), + ({'foo': 'bar'}, {'foo': 'bar'}) +]) def test_config_init(values, expected): config = Configuration(values) assert config.getAll() == expected @@ -33,12 +33,12 @@ def test_config_get_and_set_nested(): config = Configuration({ - 'foo': [4, 2], - 'bar': { - 'child1': 'one', - 'child2': 'two' - } - }) + 'foo': [4, 2], + 'bar': { + 'child1': 'one', + 'child2': 'two' + } + }) assert config.get('foo') == [4, 2] assert config.get('bar/child1') == 'one' assert config.get('bar/child2') == 'two' @@ -81,16 +81,18 @@ @pytest.mark.parametrize('local, incoming, expected', [ - ({}, {}, {}), - ({'foo': 'bar'}, {}, {'foo': 'bar'}), - ({}, {'foo': 'bar'}, {'foo': 'bar'}), - ({'foo': 'bar'}, {'foo': 'other'}, {'foo': 'other'}), - ({'foo': [1, 2]}, {'foo': [3]}, {'foo': [3, 1, 2]}), - ({'foo': [1, 2]}, {'foo': 'bar'}, {'foo': 'bar'}), - ({'foo': {'bar': 1, 'baz': 2}}, {'foo': 'bar'}, {'foo': 'bar'}), - ({'foo': {'bar': 1, 'baz': 2}}, {'foo': {'other': 3}}, {'foo': {'bar': 1, 'baz': 2, 'other': 3}}), - ({'foo': {'bar': 1, 'baz': 2}}, {'foo': {'baz': 10}}, {'foo': {'bar': 1, 'baz': 10}}) - ]) + ({}, {}, {}), + ({'foo': 'bar'}, {}, {'foo': 'bar'}), + ({}, {'foo': 'bar'}, {'foo': 'bar'}), + ({'foo': 'bar'}, {'foo': 'other'}, {'foo': 'other'}), + ({'foo': [1, 2]}, {'foo': [3]}, {'foo': [3, 1, 2]}), + ({'foo': [1, 2]}, {'foo': 'bar'}, {'foo': 'bar'}), + ({'foo': {'bar': 1, 'baz': 2}}, {'foo': 'bar'}, {'foo': 'bar'}), + ({'foo': {'bar': 1, 'baz': 2}}, {'foo': {'other': 3}}, + {'foo': {'bar': 1, 'baz': 2, 'other': 3}}), + ({'foo': {'bar': 1, 'baz': 2}}, {'foo': {'baz': 10}}, + {'foo': {'bar': 1, 'baz': 10}}) +]) def test_merge_dicts(local, incoming, expected): local2 = copy.deepcopy(local) merge_dicts(local2, incoming) @@ -99,32 +101,50 @@ def test_config_merge(): config = Configuration({ - 'foo': [4, 2], - 'bar': { - 'child1': 'one', - 'child2': 'two' - } - }) + 'foo': [4, 2], + 'bar': { + 'child1': 'one', + 'child2': 'two' + } + }) other = Configuration({ - 'baz': True, - 'blah': 'blah blah', - 'bar': { - 'child1': 'other one', - 'child10': 'ten' - } - }) + 'baz': True, + 'blah': 'blah blah', + 'bar': { + 'child1': 'other one', + 'child10': 'ten' + } + }) config.merge(other) expected = { - 'foo': [4, 2], - 'baz': True, - 'blah': 'blah blah', - 'bar': { - 'child1': 'other one', - 'child2': 'two', - 'child10': 'ten' - } - } + 'foo': [4, 2], + 'baz': True, + 'blah': 'blah blah', + 'bar': { + 'child1': 'other one', + 'child2': 'two', + 'child10': 'ten' + } + } + assert config.getAll() == expected + + +@pytest.mark.parametrize('mode, expected', [ + (MERGE_APPEND_LISTS, + {'foo': [4, 2, 1, 0], 'bar': 'something'}), + (MERGE_PREPEND_LISTS, + {'foo': [1, 0, 4, 2], 'bar': 'something'}), + (MERGE_OVERWRITE_VALUES, + {'foo': [4, 2], 'bar': 'other thing'}) +]) +def test_config_merge_with_mode(mode, expected): + config = Configuration({ + 'foo': [4, 2], + 'bar': 'something' + }) + other = {'foo': [1, 0], 'bar': 'other thing'} + config.merge(other, mode=mode) assert config.getAll() == expected diff -r 7a1903ede496 -r 2e5c5d33d62c tests/test_data_assetor.py --- a/tests/test_data_assetor.py Tue Nov 21 11:00:06 2017 -0800 +++ b/tests/test_data_assetor.py Tue Nov 21 22:07:12 2017 -0800 @@ -1,95 +1,77 @@ import pytest -from mock import MagicMock -from piecrust.data.assetor import ( - Assetor, UnsupportedAssetsError, build_base_url) -from .mockutil import mock_fs, mock_fs_scope +from piecrust.data.assetor import Assetor, UnsupportedAssetsError +from .mockutil import mock_fs, mock_fs_scope, get_simple_page @pytest.mark.parametrize('fs_fac, site_root, expected', [ - (lambda: mock_fs().withPage('pages/foo/bar'), '/', {}), - (lambda: mock_fs() - .withPage('pages/foo/bar') - .withPageAsset('pages/foo/bar', 'one.txt', 'one'), - '/', - {'one': 'one'}), - (lambda: mock_fs() - .withPage('pages/foo/bar') - .withPageAsset('pages/foo/bar', 'one.txt', 'one') - .withPageAsset('pages/foo/bar', 'two.txt', 'two'), - '/', - {'one': 'one', 'two': 'two'}), + (lambda: mock_fs().withPage('pages/foo/bar'), '/', {}), + (lambda: mock_fs() + .withPage('pages/foo/bar') + .withPageAsset('pages/foo/bar', 'one.txt', 'one'), + '/', + {'one': 'one'}), + (lambda: mock_fs() + .withPage('pages/foo/bar') + .withPageAsset('pages/foo/bar', 'one.txt', 'one') + .withPageAsset('pages/foo/bar', 'two.txt', 'two'), + '/', + {'one': 'one', 'two': 'two'}), - (lambda: mock_fs().withPage('pages/foo/bar'), '/whatever', {}), - (lambda: mock_fs() - .withPage('pages/foo/bar') - .withPageAsset('pages/foo/bar', 'one.txt', 'one'), - '/whatever', - {'one': 'one'}), - (lambda: mock_fs() - .withPage('pages/foo/bar') - .withPageAsset('pages/foo/bar', 'one.txt', 'one') - .withPageAsset('pages/foo/bar', 'two.txt', 'two'), - '/whatever', - {'one': 'one', 'two': 'two'}) - ]) + (lambda: mock_fs().withPage('pages/foo/bar'), '/whatever', {}), + (lambda: mock_fs() + .withPage('pages/foo/bar') + .withPageAsset('pages/foo/bar', 'one.txt', 'one'), + '/whatever', + {'one': 'one'}), + (lambda: mock_fs() + .withPage('pages/foo/bar') + .withPageAsset('pages/foo/bar', 'one.txt', 'one') + .withPageAsset('pages/foo/bar', 'two.txt', 'two'), + '/whatever', + {'one': 'one', 'two': 'two'}) +]) def test_assets(fs_fac, site_root, expected): fs = fs_fac() fs.withConfig({'site': {'root': site_root}}) with mock_fs_scope(fs): - page = MagicMock() - page.app = fs.getApp(cache=False) - page.app.env.base_asset_url_format = '%uri%' - page.path = fs.path('/kitchen/pages/foo/bar.md') - assetor = Assetor(page, site_root.rstrip('/') + '/foo/bar') + app = fs.getApp() + app.config.set('site/asset_url_format', '%page_uri%/%filename%') + page = get_simple_page(app, 'foo/bar') + + assetor = Assetor(page) for en in expected.keys(): + assert en in assetor assert hasattr(assetor, en) path = site_root.rstrip('/') + '/foo/bar/%s.txt' % en + assert assetor[en] == path assert getattr(assetor, en) == path - assert assetor[en] == path def test_missing_asset(): with pytest.raises(KeyError): fs = (mock_fs() - .withConfig() - .withPage('pages/foo/bar')) + .withConfig() + .withPage('pages/foo/bar')) with mock_fs_scope(fs): - page = MagicMock() - page.app = fs.getApp(cache=False) - page.path = fs.path('/kitchen/pages/foo/bar.md') - assetor = Assetor(page, '/foo/bar') + app = fs.getApp() + app.config.set('site/asset_url_format', '%page_uri%/%filename%') + page = get_simple_page(app, 'foo/bar') + + assetor = Assetor(page) assetor['this_doesnt_exist'] def test_multiple_assets_with_same_name(): with pytest.raises(UnsupportedAssetsError): fs = (mock_fs() - .withConfig() - .withPage('pages/foo/bar') - .withPageAsset('pages/foo/bar', 'one.txt', 'one text') - .withPageAsset('pages/foo/bar', 'one.jpg', 'one picture')) + .withConfig() + .withPage('pages/foo/bar') + .withPageAsset('pages/foo/bar', 'one.txt', 'one text') + .withPageAsset('pages/foo/bar', 'one.jpg', 'one picture')) with mock_fs_scope(fs): - page = MagicMock() - page.app = fs.getApp(cache=False) - page.path = fs.path('/kitchen/pages/foo/bar.md') - assetor = Assetor(page, '/foo/bar') - assetor['one'] - + app = fs.getApp() + app.config.set('site/asset_url_format', '%page_uri%/%filename%') + page = get_simple_page(app, 'foo/bar') -@pytest.mark.parametrize('url_format, pretty_urls, uri, expected', [ - ('%uri%', True, '/foo', '/foo/'), - ('%uri%', True, '/foo.ext', '/foo.ext/'), - ('%uri%', False, '/foo.html', '/foo/'), - ('%uri%', False, '/foo.ext', '/foo/'), - ]) -def test_build_base_url(url_format, pretty_urls, uri, expected): - app = MagicMock() - app.env = MagicMock() - app.env.base_asset_url_format = url_format - app.config = { - 'site/root': '/', - 'site/pretty_urls': pretty_urls} - assets_path = 'foo/bar-assets' - actual = build_base_url(app, uri, assets_path) - assert actual == expected - + assetor = Assetor(page) + assetor['one'] diff -r 7a1903ede496 -r 2e5c5d33d62c tests/test_data_iterators.py --- a/tests/test_data_iterators.py Tue Nov 21 11:00:06 2017 -0800 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,89 +0,0 @@ -import mock -from piecrust.data.iterators import PageIterator -from piecrust.page import Page, PageConfiguration - - -def test_skip(): - it = PageIterator(range(12)) - it.skip(5) - assert it.total_count == 12 - assert len(it) == 7 - assert list(it) == list(range(5, 12)) - - -def test_limit(): - it = PageIterator(range(12)) - it.limit(4) - assert it.total_count == 12 - assert len(it) == 4 - assert list(it) == list(range(4)) - - -def test_slice(): - it = PageIterator(range(12)) - it.slice(3, 4) - assert it.total_count == 12 - assert len(it) == 4 - assert list(it) == list(range(3, 7)) - - -def test_natural_sort(): - it = PageIterator([4, 3, 1, 2, 0]) - it.sort() - assert it.total_count == 5 - assert len(it) == 5 - assert list(it) == list(range(5)) - - -def test_natural_sort_reversed(): - it = PageIterator([4, 3, 1, 2, 0]) - it.sort(reverse=True) - assert it.total_count == 5 - assert len(it) == 5 - assert list(it) == list(reversed(range(5))) - - -class TestItem(object): - def __init__(self, value): - self.name = str(value) - self.foo = value - - def __eq__(self, other): - return other.name == self.name - - -def test_setting_sort(): - it = PageIterator([TestItem(v) for v in [4, 3, 1, 2, 0]]) - it.sort('foo') - assert it.total_count == 5 - assert len(it) == 5 - assert list(it) == [TestItem(v) for v in range(5)] - - -def test_setting_sort_reversed(): - it = PageIterator([TestItem(v) for v in [4, 3, 1, 2, 0]]) - it.sort('foo', reverse=True) - assert it.total_count == 5 - assert len(it) == 5 - assert list(it) == [TestItem(v) for v in reversed(range(5))] - - -def test_filter(): - page = mock.MagicMock(spec=Page) - page.config = PageConfiguration() - page.config.set('threes', {'is_foo': 3}) - it = PageIterator([TestItem(v) for v in [3, 2, 3, 1, 4, 3]], - current_page=page) - it.filter('threes') - assert it.total_count == 3 - assert len(it) == 3 - assert list(it) == [TestItem(3), TestItem(3), TestItem(3)] - - -def test_magic_filter(): - it = PageIterator([TestItem(v) for v in [3, 2, 3, 1, 4, 3]]) - it.is_foo(3) - assert it.total_count == 3 - assert len(it) == 3 - assert list(it) == [TestItem(3), TestItem(3), TestItem(3)] - diff -r 7a1903ede496 -r 2e5c5d33d62c tests/test_data_linker.py --- a/tests/test_data_linker.py Tue Nov 21 11:00:06 2017 -0800 +++ b/tests/test_data_linker.py Tue Nov 21 22:07:12 2017 -0800 @@ -1,101 +1,73 @@ -import os.path import pytest from piecrust.data.linker import Linker -from .mockutil import mock_fs, mock_fs_scope +from .mockutil import mock_fs, mock_fs_scope, get_simple_content_item @pytest.mark.parametrize( 'fs_fac, page_path, expected', [ - (lambda: mock_fs().withPage('pages/foo'), 'foo.md', - # is_dir, name, is_self, data - [(False, 'foo', True, '/foo')]), + (lambda: mock_fs().withPage('pages/foo'), 'foo', + ['/foo']), ((lambda: mock_fs() - .withPage('pages/foo') - .withPage('pages/bar')), - 'foo.md', - [(False, 'bar', False, '/bar'), (False, 'foo', True, '/foo')]), + .withPage('pages/foo') + .withPage('pages/bar')), + 'foo', + ['/bar', '/foo']), ((lambda: mock_fs() - .withPage('pages/baz') - .withPage('pages/something') - .withPage('pages/something/else') - .withPage('pages/foo') - .withPage('pages/bar')), - 'foo.md', - [(False, 'bar', False, '/bar'), - (False, 'baz', False, '/baz'), - (False, 'foo', True, '/foo'), - (True, 'something', False, '/something')]), + .withPage('pages/baz') + .withPage('pages/something') + .withPage('pages/something/else') + .withPage('pages/foo') + .withPage('pages/bar')), + 'foo', + ['/bar', '/baz', '/foo', '/something']), ((lambda: mock_fs() - .withPage('pages/something/else') - .withPage('pages/foo') - .withPage('pages/something/good') - .withPage('pages/bar')), - 'something/else.md', - [(False, 'else', True, '/something/else'), - (False, 'good', False, '/something/good')]) + .withPage('pages/something/else') + .withPage('pages/foo') + .withPage('pages/something/good') + .withPage('pages/bar')), + 'something/else', + ['/something/else', '/something/good']) ]) -def test_linker_iteration(fs_fac, page_path, expected): +def test_linker_siblings(fs_fac, page_path, expected): fs = fs_fac() fs.withConfig() with mock_fs_scope(fs): app = fs.getApp() app.config.set('site/pretty_urls', True) src = app.getSource('pages') - linker = Linker(src, os.path.dirname(page_path), - root_page_path=page_path) - actual = list(iter(linker)) - - assert len(actual) == len(expected) - for (a, e) in zip(actual, expected): - is_dir, name, is_self, url = e - assert a.is_dir == is_dir - assert a.name == name - assert a.is_self == is_self - assert a.url == url + item = get_simple_content_item(app, page_path) + linker = Linker(src, item) + actual = list(linker.siblings) + assert sorted(map(lambda i: i.url, actual)) == sorted(expected) @pytest.mark.parametrize( - 'fs_fac, page_path, expected', - [ - (lambda: mock_fs().withPage('pages/foo'), 'foo.md', - [('/foo', True)]), - ((lambda: mock_fs() - .withPage('pages/foo') - .withPage('pages/bar')), - 'foo.md', - [('/bar', False), ('/foo', True)]), - ((lambda: mock_fs() - .withPage('pages/baz') - .withPage('pages/something/else') - .withPage('pages/foo') - .withPage('pages/bar')), - 'foo.md', - [('/bar', False), ('/baz', False), - ('/foo', True), ('/something/else', False)]), - ((lambda: mock_fs() - .withPage('pages/something/else') - .withPage('pages/foo') - .withPage('pages/something/good') - .withPage('pages/bar')), - 'something/else.md', - [('/something/else', True), - ('/something/good', False)]) - ]) -def test_recursive_linker_iteration(fs_fac, page_path, expected): + 'fs_fac, page_path, expected', + [ + (lambda: mock_fs().withPage('pages/foo'), 'foo.md', + []), + ((lambda: mock_fs() + .withPage('pages/foo') + .withPage('pages/bar')), + 'foo', + []), + ((lambda: mock_fs() + .withPage('pages/baz') + .withPage('pages/foo') + .withPage('pages/foo/more') + .withPage('pages/foo/even_more')), + 'foo', + ['/foo/more', '/foo/even_more']) + ]) +def test_linker_children(fs_fac, page_path, expected): fs = fs_fac() fs.withConfig() with mock_fs_scope(fs): app = fs.getApp() app.config.set('site/pretty_urls', True) src = app.getSource('pages') - linker = Linker(src, os.path.dirname(page_path), - root_page_path=page_path) - actual = list(iter(linker.allpages)) - - assert len(actual) == len(expected) - for i, (a, e) in enumerate(zip(actual, expected)): - assert a.is_dir is False - assert a.url == e[0] - assert a.is_self == e[1] - + item = get_simple_content_item(app, page_path) + linker = Linker(src, item) + actual = list(linker.children) + assert sorted(map(lambda i: i.url, actual)) == sorted(expected) diff -r 7a1903ede496 -r 2e5c5d33d62c tests/test_data_paginator.py --- a/tests/test_data_paginator.py Tue Nov 21 11:00:06 2017 -0800 +++ b/tests/test_data_paginator.py Tue Nov 21 22:07:12 2017 -0800 @@ -1,49 +1,32 @@ import math -import mock import pytest from piecrust.data.paginator import Paginator -from piecrust.sources.interfaces import IPaginationSource -class MockSource(list, IPaginationSource): +class MockSource(list): def __init__(self, count): for i in range(count): self.append('item %d' % i) - def getItemsPerPage(self): - return 5 - - def getSourceIterator(self): - return None - - def getSorterIterator(self, it): - return None - - def getTailIterator(self, it): - return None - - def getPaginationFilter(self, page): - return None - @pytest.mark.parametrize('uri, page_num, count', [ - ('', 1, 0), - ('', 1, 4), - ('', 1, 5), - ('', 1, 8), - ('', 1, 14), - ('', 2, 8), - ('', 2, 14), - ('', 3, 14), - ('blog', 1, 0), - ('blog', 1, 4), - ('blog', 1, 5), - ('blog', 1, 8), - ('blog', 1, 14), - ('blog', 2, 8), - ('blog', 2, 14), - ('blog', 3, 14) - ]) + ('', 1, 0), + ('', 1, 4), + ('', 1, 5), + ('', 1, 8), + ('', 1, 14), + ('', 2, 8), + ('', 2, 14), + ('', 3, 14), + ('blog', 1, 0), + ('blog', 1, 4), + ('blog', 1, 5), + ('blog', 1, 8), + ('blog', 1, 14), + ('blog', 2, 8), + ('blog', 2, 14), + ('blog', 3, 14) +]) def test_paginator(uri, page_num, count): def _get_mock_uri(sub_num): res = uri @@ -54,7 +37,8 @@ return res source = MockSource(count) - p = Paginator(None, source, page_num=page_num) + p = Paginator(source, None, page_num) + p._items_per_page = 5 p._getPageUri = _get_mock_uri if count <= 5: @@ -81,12 +65,12 @@ assert p.prev_page == uri else: pp = str(page_num - 1) if uri == '' else ( - '%s/%d' % (uri, page_num - 1)) + '%s/%d' % (uri, page_num - 1)) assert p.prev_page == pp assert p.this_page_number == page_num tp = str(page_num) if uri == '' else ( - '%s/%d' % (uri, page_num)) + '%s/%d' % (uri, page_num)) assert p.this_page == tp if page_num * 5 > count: @@ -95,7 +79,7 @@ else: assert p.next_page_number == page_num + 1 np = str(page_num + 1) if uri == '' else ( - '%s/%d' % (uri, page_num + 1)) + '%s/%d' % (uri, page_num + 1)) assert p.next_page == np assert p.total_post_count == count @@ -118,7 +102,8 @@ nums = list(range(1, to_add + 1)) + nums else: to_add = min(to_add, page_count - nums[-1]) - nums = nums + list(range(nums[-1] + 1, nums[-1] + to_add + 1)) + nums = nums + list(range(nums[-1] + 1, + nums[-1] + to_add + 1)) assert nums == p.all_page_numbers(radius) itp = count @@ -130,7 +115,7 @@ assert p.items_this_page == itp indices = list(range(count)) - indices = indices[(page_num - 1) * 5 : (page_num - 1) * 5 + itp] + indices = indices[(page_num - 1) * 5:(page_num - 1) * 5 + itp] expected = list(['item %d' % i for i in indices]) items = list(p.items) assert items == expected diff -r 7a1903ede496 -r 2e5c5d33d62c tests/test_data_provider.py --- a/tests/test_data_provider.py Tue Nov 21 11:00:06 2017 -0800 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,29 +0,0 @@ -from piecrust.rendering import QualifiedPage, PageRenderingContext, render_page -from .mockutil import mock_fs, mock_fs_scope - - -def test_blog_provider(): - fs = (mock_fs() - .withConfig() - .withPage('posts/2015-03-01_one.md', - {'title': 'One', 'tags': ['Foo']}) - .withPage('posts/2015-03-02_two.md', - {'title': 'Two', 'tags': ['Foo']}) - .withPage('posts/2015-03-03_three.md', - {'title': 'Three', 'tags': ['Bar']}) - .withPage('pages/tags.md', - {'format': 'none', 'layout': 'none'}, - "{%for c in blog.tags%}\n" - "{{c.name}} ({{c.post_count}})\n" - "{%endfor%}\n")) - with mock_fs_scope(fs): - app = fs.getApp() - page = app.getSource('pages').getPage({'slug': 'tags'}) - route = app.getSourceRoute('pages', None) - route_metadata = {'slug': 'tags'} - qp = QualifiedPage(page, route, route_metadata) - ctx = PageRenderingContext(qp) - rp = render_page(ctx) - expected = "\nBar (1)\n\nFoo (2)\n" - assert rp.content == expected - diff -r 7a1903ede496 -r 2e5c5d33d62c tests/test_dataproviders_blog.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/tests/test_dataproviders_blog.py Tue Nov 21 22:07:12 2017 -0800 @@ -0,0 +1,123 @@ +from .mockutil import mock_fs, mock_fs_scope +from .rdrutil import render_simple_page + + +def _get_post_tokens(i, posts_per_month=2, posts_per_year=5, first_year=2001): + year = first_year + int(i / posts_per_year) + i_in_year = i % posts_per_year + month = int(i_in_year / posts_per_month) + 1 + day = i_in_year % posts_per_month + 1 + return (year, month, day, i + 1) + + +def test_blog_provider_archives(): + fs = (mock_fs() + .withConfig({ + 'site': { + 'default_layout': 'none', + 'default_format': 'none' + } + }) + .withPages( + 20, + lambda i: ('posts/%04d-%02d-%02d_post-%d.md' % + _get_post_tokens(i)), + lambda i: {'title': "Post %02d" % (i + 1), 'format': 'none'}, + lambda i: "This is post %02d" % (i + 1)) + .withPage('pages/allposts.html', + {'layout': 'none'}, + "{%for p in blog.posts-%}\n" + "{{p.title}}\n" + "{%endfor%}\n") + .withPage('pages/allyears.html', + {'layout': 'none'}, + "{%for y in blog.years-%}\n" + "YEAR={{y}}\n" + "{%for p in y.posts-%}\n" + "{{p.title}}\n" + "{%endfor%}\n" + "{%endfor%}") + .withFile('kitchen/templates/_year.html', + "YEAR={{year}}\n" + "{%for p in archives-%}\n" + "{{p.title}}\n" + "{%endfor%}\n" + "\n" + "{%for m in monthly_archives-%}\n" + "MONTH={{m.timestamp|date('%m')}}\n" + "{%for p in m.posts-%}\n" + "{{p.title}}\n" + "{%endfor%}\n" + "{%endfor%}")) + + with mock_fs_scope(fs): + fs.runChef('bake', '-o', fs.path('counter')) + + # Check `allposts`. + # Should have all the posts. Duh. + expected = '\n'.join(map(lambda i: "Post %02d" % i, + range(20, 0, -1))) + '\n' + actual = fs.getFileEntry('counter/allposts.html') + assert expected == actual + + # Check `allyears`. + # Should have all the years, each with 5 posts in reverse + # chronological order. + expected = '' + cur_index = 20 + for y in range(2004, 2000, -1): + expected += ('YEAR=%04d\n' % y) + '\n'.join( + map(lambda i: "Post %02d" % i, + range(cur_index, cur_index - 5, -1))) + '\n\n' + cur_index -= 5 + actual = fs.getFileEntry('counter/allyears.html') + assert expected == actual + + # Check each yearly page. + # Should have both the posts for that year (5 posts) in + # chronological order, followed by the months for that year + # (3 months) and the posts in each month (2, 2, and 1). + cur_index = 1 + for y in range(2001, 2005): + orig_index = cur_index + expected = ('YEAR=%04d\n' % y) + '\n'.join( + map(lambda i: "Post %02d" % i, + range(cur_index, cur_index + 5))) + '\n' + expected += "\n\n" + orig_final_index = cur_index + cur_index = orig_index + for m in range(1, 4): + expected += 'MONTH=%02d\n' % m + expected += '\n'.join( + map(lambda i: "Post %02d" % i, + range(cur_index, + min(cur_index + 2, orig_index + 5)))) + '\n' + expected += '\n' + cur_index += 2 + cur_index = orig_final_index + + actual = fs.getFileEntry('counter/archives/%04d.html' % y) + assert expected == actual + cur_index += 5 + + +def test_blog_provider_tags(): + fs = (mock_fs() + .withConfig() + .withPage('posts/2015-03-01_one.md', + {'title': 'One', 'tags': ['Foo']}) + .withPage('posts/2015-03-02_two.md', + {'title': 'Two', 'tags': ['Foo']}) + .withPage('posts/2015-03-03_three.md', + {'title': 'Three', 'tags': ['Bar']}) + .withPage('pages/tags.md', + {'format': 'none', 'layout': 'none'}, + "{%for c in blog.tags%}\n" + "{{c.name}} ({{c.post_count}})\n" + "{%endfor%}\n")) + with mock_fs_scope(fs): + page = fs.getSimplePage('tags.md') + actual = render_simple_page(page) + expected = "\nBar (1)\n\nFoo (2)\n" + assert actual == expected + diff -r 7a1903ede496 -r 2e5c5d33d62c tests/test_dataproviders_pageiterator.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/tests/test_dataproviders_pageiterator.py Tue Nov 21 22:07:12 2017 -0800 @@ -0,0 +1,89 @@ +import mock +from piecrust.dataproviders.pageiterator import PageIterator +from piecrust.page import Page, PageConfiguration + + +def test_skip(): + it = PageIterator(range(12)) + it.skip(5) + assert it.total_count == 12 + assert len(it) == 7 + assert list(it) == list(range(5, 12)) + + +def test_limit(): + it = PageIterator(range(12)) + it.limit(4) + assert it.total_count == 12 + assert len(it) == 4 + assert list(it) == list(range(4)) + + +def test_slice(): + it = PageIterator(range(12)) + it.slice(3, 4) + assert it.total_count == 12 + assert len(it) == 4 + assert list(it) == list(range(3, 7)) + + +def test_natural_sort(): + it = PageIterator([4, 3, 1, 2, 0]) + it.sort() + assert it.total_count == 5 + assert len(it) == 5 + assert list(it) == list(range(5)) + + +def test_natural_sort_reversed(): + it = PageIterator([4, 3, 1, 2, 0]) + it.sort(reverse=True) + assert it.total_count == 5 + assert len(it) == 5 + assert list(it) == list(reversed(range(5))) + + +class TestItem(object): + def __init__(self, value): + self.name = str(value) + self.config = {'foo': value} + + def __eq__(self, other): + return other.name == self.name + + +def test_setting_sort(): + it = PageIterator([TestItem(v) for v in [4, 3, 1, 2, 0]]) + it.sort('foo') + assert it.total_count == 5 + assert len(it) == 5 + assert list(it) == [TestItem(v) for v in range(5)] + + +def test_setting_sort_reversed(): + it = PageIterator([TestItem(v) for v in [4, 3, 1, 2, 0]]) + it.sort('foo', reverse=True) + assert it.total_count == 5 + assert len(it) == 5 + assert list(it) == [TestItem(v) for v in reversed(range(5))] + + +def test_filter(): + page = mock.MagicMock(spec=Page) + page.config = PageConfiguration() + page.config.set('threes', {'is_foo': 3}) + it = PageIterator([TestItem(v) for v in [3, 2, 3, 1, 4, 3]], + current_page=page) + it.filter('threes') + assert it.total_count == 3 + assert len(it) == 3 + assert list(it) == [TestItem(3), TestItem(3), TestItem(3)] + + +def test_magic_filter(): + it = PageIterator([TestItem(v) for v in [3, 2, 3, 1, 4, 3]]) + it.is_foo(3) + assert it.total_count == 3 + assert len(it) == 3 + assert list(it) == [TestItem(3), TestItem(3), TestItem(3)] + diff -r 7a1903ede496 -r 2e5c5d33d62c tests/test_fastpickle.py --- a/tests/test_fastpickle.py Tue Nov 21 11:00:06 2017 -0800 +++ b/tests/test_fastpickle.py Tue Nov 21 22:07:12 2017 -0800 @@ -1,6 +1,7 @@ +import io import datetime import pytest -from piecrust.fastpickle import pickle, unpickle, pickle_obj, unpickle_obj +from piecrust.fastpickle import pickle, unpickle, pickle_intob, unpickle_fromb class Foo(object): @@ -36,6 +37,13 @@ actual = unpickle(data) assert actual == expected + with io.BytesIO() as buf: + pickle_intob(obj, buf) + size = buf.tell() + buf.seek(0) + actual = unpickle_fromb(buf, size) + assert actual == expected + def test_objects(): f = Foo('foo') @@ -54,11 +62,10 @@ def test_reentrance(): a = {'test_ints': 42, 'test_set': set([1, 2])} - data = pickle_obj(a) - b = unpickle_obj(data) + data = pickle(a) + b = unpickle(data) assert a == b - other_b = unpickle_obj(data) + other_b = unpickle(data) assert a == other_b - c = unpickle_obj(data) + c = unpickle(data) assert a == c - diff -r 7a1903ede496 -r 2e5c5d33d62c tests/test_page.py --- a/tests/test_page.py Tue Nov 21 11:00:06 2017 -0800 +++ b/tests/test_page.py Tue Nov 21 22:07:12 2017 -0800 @@ -1,72 +1,67 @@ import pytest -from piecrust.page import parse_segments - +from piecrust.page import parse_segments, _count_lines test_parse_segments_data1 = ("", {'content': ''}) test_parse_segments_data2 = ("Foo bar", {'content': 'Foo bar'}) -test_parse_segments_data3 = ("""Something that spans +test_parse_segments_data3 = ( + """Something that spans several lines like this""", - {'content': """Something that spans + {'content': """Something that spans several lines like this"""}) -test_parse_segments_data4 = ("""Blah blah +test_parse_segments_data4 = ( + """Blah blah ---foo--- Something else ---bar--- Last thing """, - { - 'content': "Blah blah\n", - 'foo': "Something else\n", - 'bar': "Last thing\n"}) -test_parse_segments_data5 = ("""Blah blah -<--textile--> -Here's some textile -""", - { - 'content': [ - ("Blah blah\n", None), - ("Here's some textile\n", 'textile')]}) -test_parse_segments_data6 = ("""Blah blah -Whatever -<--textile--> -Oh well, that's good ----foo--- -Another segment -With another... -<--change--> -...of formatting. -""", - { - 'content': [ - ("Blah blah\nWhatever\n", None), - ("Oh well, that's good\n", 'textile')], - 'foo': [ - ("Another segment\nWith another...\n", None), - ("...of formatting.\n", 'change')]}) + { + 'content': "Blah blah\n", + 'foo': "Something else\n", + 'bar': "Last thing\n"}) + @pytest.mark.parametrize('text, expected', [ - test_parse_segments_data1, - test_parse_segments_data2, - test_parse_segments_data3, - test_parse_segments_data4, - test_parse_segments_data5, - test_parse_segments_data6, - ]) + test_parse_segments_data1, + test_parse_segments_data2, + test_parse_segments_data3, + test_parse_segments_data4, +]) def test_parse_segments(text, expected): actual = parse_segments(text) assert actual is not None assert list(actual.keys()) == list(expected.keys()) for key, val in expected.items(): - if isinstance(val, str): - assert len(actual[key].parts) == 1 - assert actual[key].parts[0].content == val - assert actual[key].parts[0].fmt is None - else: - assert len(actual[key].parts) == len(val) - for i, part in enumerate(val): - assert actual[key].parts[i].content == part[0] - assert actual[key].parts[i].fmt == part[1] + assert actual[key].content == val + assert actual[key].fmt is None + +@pytest.mark.parametrize('text, expected', [ + ('', 1), + ('\n', 2), + ('blah foo', 1), + ('blah foo\n', 2), + ('blah foo\nmore here', 2), + ('blah foo\nmore here\n', 3), + ('\nblah foo\nmore here\n', 4), +]) +def test_count_lines(text, expected): + actual = _count_lines(text) + assert actual == expected + + +@pytest.mark.parametrize('text, start, end, expected', [ + ('', 0, -1, 1), + ('\n', 1, -1, 1), + ('blah foo', 2, 4, 1), + ('blah foo\n', 2, 4, 1), + ('blah foo\nmore here', 4, -1, 2), + ('blah foo\nmore here\n', 10, -1, 2), + ('\nblah foo\nmore here\n', 2, -1, 3), +]) +def test_count_lines_with_offsets(text, start, end, expected): + actual = _count_lines(text, start, end) + assert actual == expected diff -r 7a1903ede496 -r 2e5c5d33d62c tests/test_pipelines_asset.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/tests/test_pipelines_asset.py Tue Nov 21 22:07:12 2017 -0800 @@ -0,0 +1,231 @@ +import time +import os.path +import random +import inspect +import pytest +from piecrust.pipelines.asset import get_filtered_processors +from piecrust.pipelines.records import MultiRecord +from piecrust.processing.base import SimpleFileProcessor +from .mockutil import mock_fs, mock_fs_scope + + +class FooProcessor(SimpleFileProcessor): + def __init__(self, name=None, exts=None, open_func=None): + self.PROCESSOR_NAME = name or 'foo' + exts = exts or {'foo': 'foo'} + super().__init__(exts) + self.open_func = open_func or open + + def _doProcess(self, in_path, out_path): + with self.open_func(in_path, 'r') as f: + text = f.read() + with self.open_func(out_path, 'w') as f: + f.write("%s: %s" % (self.PROCESSOR_NAME.upper(), text)) + return True + + +def _get_test_plugin_name(): + return 'foo_%d' % random.randrange(1000) + + +def _get_test_fs(*, plugins=None, processors=None): + plugins = plugins or [] + processors = processors or [] + processors.append('copy') + return (mock_fs() + .withDir('counter') + .withConfig({ + 'site': { + 'plugins': plugins + }, + 'pipelines': { + 'asset': { + 'processors': processors + } + } + })) + + +def _create_test_plugin(fs, plugname, *, foo_name=None, foo_exts=None): + src = [ + 'from piecrust.plugins.base import PieCrustPlugin', + 'from piecrust.processing.base import SimpleFileProcessor'] + + foo_lines = inspect.getsourcelines(FooProcessor) + src += [''] + src += map(lambda l: l.rstrip('\n'), foo_lines[0]) + + src += [ + '', + 'class FooPlugin(PieCrustPlugin):', + ' def getProcessors(self):', + ' yield FooProcessor(%s, %s)' % (repr(foo_name), + repr(foo_exts)), + '', + '__piecrust_plugin__ = FooPlugin'] + + print("Creating plugin with source:\n%s" % '\n'.join(src)) + fs.withFile('kitchen/plugins/%s.py' % plugname, '\n'.join(src)) + + +def _bake_assets(fs): + fs.runChef('bake', '-p', 'asset', '-o', fs.path('counter')) + + +def test_empty(): + fs = _get_test_fs() + with mock_fs_scope(fs): + expected = {} + assert expected == fs.getStructure('counter') + _bake_assets(fs) + expected = {} + assert expected == fs.getStructure('counter') + + +def test_one_file(): + fs = (_get_test_fs() + .withFile('kitchen/assets/something.foo', 'A test file.')) + with mock_fs_scope(fs): + expected = {} + assert expected == fs.getStructure('counter') + _bake_assets(fs) + expected = {'something.foo': 'A test file.'} + assert expected == fs.getStructure('counter') + + +def test_one_level_dirtyness(): + fs = (_get_test_fs() + .withFile('kitchen/assets/blah.foo', 'A test file.')) + with mock_fs_scope(fs): + _bake_assets(fs) + expected = {'blah.foo': 'A test file.'} + assert expected == fs.getStructure('counter') + mtime = os.path.getmtime(fs.path('/counter/blah.foo')) + assert abs(time.time() - mtime) <= 2 + + time.sleep(1) + _bake_assets(fs) + assert expected == fs.getStructure('counter') + assert mtime == os.path.getmtime(fs.path('/counter/blah.foo')) + + time.sleep(1) + fs.withFile('kitchen/assets/blah.foo', 'A new test file.') + _bake_assets(fs) + expected = {'blah.foo': 'A new test file.'} + assert expected == fs.getStructure('counter') + assert mtime < os.path.getmtime(fs.path('/counter/blah.foo')) + + +def test_two_levels_dirtyness(): + plugname = _get_test_plugin_name() + fs = (_get_test_fs(plugins=[plugname], processors=['foo']) + .withFile('kitchen/assets/blah.foo', 'A test file.')) + _create_test_plugin(fs, plugname, foo_exts={'foo': 'bar'}) + with mock_fs_scope(fs): + _bake_assets(fs) + expected = {'blah.bar': 'FOO: A test file.'} + assert expected == fs.getStructure('counter') + mtime = os.path.getmtime(fs.path('/counter/blah.bar')) + assert abs(time.time() - mtime) <= 2 + + time.sleep(1) + _bake_assets(fs) + assert expected == fs.getStructure('counter') + assert mtime == os.path.getmtime(fs.path('/counter/blah.bar')) + + time.sleep(1) + fs.withFile('kitchen/assets/blah.foo', 'A new test file.') + _bake_assets(fs) + expected = {'blah.bar': 'FOO: A new test file.'} + assert expected == fs.getStructure('counter') + assert mtime < os.path.getmtime(fs.path('/counter/blah.bar')) + + +def test_removed(): + fs = (_get_test_fs() + .withFile('kitchen/assets/blah1.foo', 'A test file.') + .withFile('kitchen/assets/blah2.foo', 'Ooops')) + with mock_fs_scope(fs): + expected = { + 'blah1.foo': 'A test file.', + 'blah2.foo': 'Ooops'} + assert expected == fs.getStructure('kitchen/assets') + _bake_assets(fs) + assert expected == fs.getStructure('counter') + + time.sleep(1) + os.remove(fs.path('/kitchen/assets/blah2.foo')) + expected = { + 'blah1.foo': 'A test file.'} + assert expected == fs.getStructure('kitchen/assets') + _bake_assets(fs) + assert expected == fs.getStructure('counter') + + +def test_record_version_change(): + plugname = _get_test_plugin_name() + fs = (_get_test_fs(plugins=[plugname], processors=['foo']) + .withFile('kitchen/assets/blah.foo', 'A test file.')) + _create_test_plugin(fs, plugname) + with mock_fs_scope(fs): + time.sleep(1) + _bake_assets(fs) + time.sleep(0.1) + mtime = os.path.getmtime(fs.path('counter/blah.foo')) + + time.sleep(1) + _bake_assets(fs) + time.sleep(0.1) + assert mtime == os.path.getmtime(fs.path('counter/blah.foo')) + + MultiRecord.RECORD_VERSION += 1 + try: + time.sleep(1) + _bake_assets(fs) + time.sleep(0.1) + assert mtime < os.path.getmtime(fs.path('counter/blah.foo')) + finally: + MultiRecord.RECORD_VERSION -= 1 + + +@pytest.mark.parametrize('patterns, expected', [ + (['_'], + {'something.html': 'A test file.'}), + (['html'], + {}), + (['/^_/'], + {'something.html': 'A test file.', + 'foo': {'_important.html': 'Important!'}}) +]) +def test_ignore_pattern(patterns, expected): + fs = (_get_test_fs() + .withFile('kitchen/assets/something.html', 'A test file.') + .withFile('kitchen/assets/_hidden.html', 'Shhh') + .withFile('kitchen/assets/foo/_important.html', 'Important!')) + fs.withConfig({'pipelines': {'asset': {'ignore': patterns}}}) + with mock_fs_scope(fs): + assert {} == fs.getStructure('counter') + _bake_assets(fs) + assert expected == fs.getStructure('counter') + + +@pytest.mark.parametrize('names, expected', [ + ('all', ['cleancss', 'compass', 'copy', 'concat', 'less', 'requirejs', + 'sass', 'sitemap', 'uglifyjs', 'pygments_style']), + ('all -sitemap', ['cleancss', 'copy', 'compass', 'concat', 'less', + 'requirejs', 'sass', 'uglifyjs', 'pygments_style']), + ('-sitemap -less -sass all', ['cleancss', 'copy', 'compass', 'concat', + 'requirejs', 'uglifyjs', + 'pygments_style']), + ('copy', ['copy']), + ('less sass', ['less', 'sass']) +]) +def test_filter_processor(names, expected): + fs = mock_fs().withConfig() + with mock_fs_scope(fs): + app = fs.getApp() + processors = app.plugin_loader.getProcessors() + procs = get_filtered_processors(processors, names) + actual = [p.PROCESSOR_NAME for p in procs] + assert sorted(actual) == sorted(expected) + diff -r 7a1903ede496 -r 2e5c5d33d62c tests/test_pipelines_page.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/tests/test_pipelines_page.py Tue Nov 21 22:07:12 2017 -0800 @@ -0,0 +1,101 @@ +import time +import os.path +import urllib.parse +import pytest +from piecrust.pipelines.records import MultiRecord +from piecrust.pipelines._pagebaker import get_output_path +from .mockutil import get_mock_app, mock_fs, mock_fs_scope + + +@pytest.mark.parametrize('uri, pretty, expected', [ + # Pretty URLs + ('', True, 'index.html'), + ('2', True, '2/index.html'), + ('foo', True, 'foo/index.html'), + ('foo/2', True, 'foo/2/index.html'), + ('foo/bar', True, 'foo/bar/index.html'), + ('foo/bar/2', True, 'foo/bar/2/index.html'), + ('foo.ext', True, 'foo.ext/index.html'), + ('foo.ext/2', True, 'foo.ext/2/index.html'), + ('foo/bar.ext', True, 'foo/bar.ext/index.html'), + ('foo/bar.ext/2', True, 'foo/bar.ext/2/index.html'), + ('foo.bar.ext', True, 'foo.bar.ext/index.html'), + ('foo.bar.ext/2', True, 'foo.bar.ext/2/index.html'), + # Ugly URLs + ('', False, 'index.html'), + ('2.html', False, '2.html'), + ('foo.html', False, 'foo.html'), + ('foo/2.html', False, 'foo/2.html'), + ('foo/bar.html', False, 'foo/bar.html'), + ('foo/bar/2.html', False, 'foo/bar/2.html'), + ('foo.ext', False, 'foo.ext'), + ('foo/2.ext', False, 'foo/2.ext'), + ('foo/bar.ext', False, 'foo/bar.ext'), + ('foo/bar/2.ext', False, 'foo/bar/2.ext'), + ('foo.bar.ext', False, 'foo.bar.ext'), + ('foo.bar/2.ext', False, 'foo.bar/2.ext') +]) +def test_get_output_path(uri, pretty, expected): + app = get_mock_app() + if pretty: + app.config.set('site/pretty_urls', True) + assert app.config.get('site/pretty_urls') == pretty + + out_dir = '/destination' + + for site_root in ['/', '/whatever/', '/~johndoe/']: + app.config.set('site/root', urllib.parse.quote(site_root)) + path = get_output_path(app, out_dir, + urllib.parse.quote(site_root) + uri, + pretty) + expected = os.path.normpath( + os.path.join('/destination', expected)) + assert expected == path + + +def test_removed(): + fs = (mock_fs() + .withConfig() + .withPage('pages/foo.md', {'layout': 'none', 'format': 'none'}, + "a foo page") + .withPage('pages/_index.md', {'layout': 'none', 'format': 'none'}, + "something")) + with mock_fs_scope(fs): + fs.runChef('bake') + structure = fs.getStructure('kitchen/_counter') + assert structure == { + 'foo.html': 'a foo page', + 'index.html': 'something'} + + os.remove(fs.path('kitchen/pages/foo.md')) + fs.runChef('bake') + structure = fs.getStructure('kitchen/_counter') + assert structure == { + 'index.html': 'something'} + + +def test_record_version_change(): + fs = (mock_fs() + .withConfig() + .withPage('pages/foo.md', {'layout': 'none', 'format': 'none'}, + 'a foo page')) + with mock_fs_scope(fs): + time.sleep(1) + fs.runChef('bake', '-o', fs.path('counter')) + time.sleep(0.1) + mtime = os.path.getmtime(fs.path('counter/foo.html')) + + time.sleep(1) + fs.runChef('bake', '-o', fs.path('counter')) + time.sleep(0.1) + assert mtime == os.path.getmtime(fs.path('counter/foo.html')) + + MultiRecord.RECORD_VERSION += 1 + try: + time.sleep(1) + fs.runChef('bake', '-o', fs.path('counter')) + time.sleep(0.1) + assert mtime < os.path.getmtime(fs.path('counter/foo.html')) + finally: + MultiRecord.RECORD_VERSION -= 1 + diff -r 7a1903ede496 -r 2e5c5d33d62c tests/test_processing_base.py --- a/tests/test_processing_base.py Tue Nov 21 11:00:06 2017 -0800 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,221 +0,0 @@ -import time -import os.path -import shutil -import pytest -from piecrust.processing.base import SimpleFileProcessor -from piecrust.processing.pipeline import ProcessorPipeline -from piecrust.processing.records import ProcessorPipelineRecord -from piecrust.processing.worker import get_filtered_processors -from .mockutil import mock_fs, mock_fs_scope - - -class FooProcessor(SimpleFileProcessor): - def __init__(self, exts=None, open_func=None): - exts = exts or {'foo', 'foo'} - super(FooProcessor, self).__init__({exts[0]: exts[1]}) - self.PROCESSOR_NAME = exts[0] - self.open_func = open_func or open - - def _doProcess(self, in_path, out_path): - with self.open_func(in_path, 'r') as f: - text = f.read() - with self.open_func(out_path, 'w') as f: - f.write("%s: %s" % (self.PROCESSOR_NAME.upper(), text)) - return True - - -class NoopProcessor(SimpleFileProcessor): - def __init__(self, exts): - super(NoopProcessor, self).__init__({exts[0]: exts[1]}) - self.PROCESSOR_NAME = exts[0] - self.processed = [] - - def _doProcess(self, in_path, out_path): - self.processed.append(in_path) - shutil.copyfile(in_path, out_path) - return True - - -def _get_pipeline(fs, app=None): - app = app or fs.getApp() - return ProcessorPipeline(app, fs.path('counter')) - - -def test_empty(): - fs = (mock_fs() - .withDir('counter') - .withConfig()) - with mock_fs_scope(fs): - pp = _get_pipeline(fs) - pp.enabled_processors = ['copy'] - expected = {} - assert expected == fs.getStructure('counter') - pp.run() - expected = {} - assert expected == fs.getStructure('counter') - - -def test_one_file(): - fs = (mock_fs() - .withDir('counter') - .withConfig() - .withFile('kitchen/assets/something.html', 'A test file.')) - with mock_fs_scope(fs): - pp = _get_pipeline(fs) - pp.enabled_processors = ['copy'] - expected = {} - assert expected == fs.getStructure('counter') - pp.run() - expected = {'something.html': 'A test file.'} - assert expected == fs.getStructure('counter') - - -def test_one_level_dirtyness(): - fs = (mock_fs() - .withConfig() - .withFile('kitchen/assets/blah.foo', 'A test file.')) - with mock_fs_scope(fs): - pp = _get_pipeline(fs) - pp.enabled_processors = ['copy'] - pp.run() - expected = {'blah.foo': 'A test file.'} - assert expected == fs.getStructure('counter') - mtime = os.path.getmtime(fs.path('/counter/blah.foo')) - assert abs(time.time() - mtime) <= 2 - - time.sleep(1) - pp.run() - assert expected == fs.getStructure('counter') - assert mtime == os.path.getmtime(fs.path('/counter/blah.foo')) - - time.sleep(1) - fs.withFile('kitchen/assets/blah.foo', 'A new test file.') - pp.run() - expected = {'blah.foo': 'A new test file.'} - assert expected == fs.getStructure('counter') - assert mtime < os.path.getmtime(fs.path('/counter/blah.foo')) - - -def test_two_levels_dirtyness(): - fs = (mock_fs() - .withConfig() - .withFile('kitchen/assets/blah.foo', 'A test file.')) - with mock_fs_scope(fs): - pp = _get_pipeline(fs) - pp.enabled_processors = ['copy'] - pp.additional_processors_factories = [ - lambda: FooProcessor(('foo', 'bar'))] - pp.run() - expected = {'blah.bar': 'FOO: A test file.'} - assert expected == fs.getStructure('counter') - mtime = os.path.getmtime(fs.path('/counter/blah.bar')) - assert abs(time.time() - mtime) <= 2 - - time.sleep(1) - pp.run() - assert expected == fs.getStructure('counter') - assert mtime == os.path.getmtime(fs.path('/counter/blah.bar')) - - time.sleep(1) - fs.withFile('kitchen/assets/blah.foo', 'A new test file.') - pp.run() - expected = {'blah.bar': 'FOO: A new test file.'} - assert expected == fs.getStructure('counter') - assert mtime < os.path.getmtime(fs.path('/counter/blah.bar')) - - -def test_removed(): - fs = (mock_fs() - .withConfig() - .withFile('kitchen/assets/blah1.foo', 'A test file.') - .withFile('kitchen/assets/blah2.foo', 'Ooops')) - with mock_fs_scope(fs): - expected = { - 'blah1.foo': 'A test file.', - 'blah2.foo': 'Ooops'} - assert expected == fs.getStructure('kitchen/assets') - pp = _get_pipeline(fs) - pp.enabled_processors = ['copy'] - pp.run() - assert expected == fs.getStructure('counter') - - time.sleep(1) - os.remove(fs.path('/kitchen/assets/blah2.foo')) - expected = { - 'blah1.foo': 'A test file.'} - assert expected == fs.getStructure('kitchen/assets') - pp.run() - assert expected == fs.getStructure('counter') - - -def test_record_version_change(): - fs = (mock_fs() - .withConfig() - .withFile('kitchen/assets/blah.foo', 'A test file.')) - with mock_fs_scope(fs): - pp = _get_pipeline(fs) - pp.enabled_processors = ['copy'] - pp.additional_processors_factories = [ - lambda: NoopProcessor(('foo', 'foo'))] - pp.run() - assert os.path.exists(fs.path('/counter/blah.foo')) is True - mtime = os.path.getmtime(fs.path('/counter/blah.foo')) - - time.sleep(1) - pp.run() - assert mtime == os.path.getmtime(fs.path('/counter/blah.foo')) - - time.sleep(1) - ProcessorPipelineRecord.RECORD_VERSION += 1 - try: - pp.run() - assert mtime < os.path.getmtime(fs.path('/counter/blah.foo')) - finally: - ProcessorPipelineRecord.RECORD_VERSION -= 1 - - -@pytest.mark.parametrize('patterns, expected', [ - (['_'], - {'something.html': 'A test file.'}), - (['html'], - {}), - (['/^_/'], - {'something.html': 'A test file.', - 'foo': {'_important.html': 'Important!'}}) - ]) -def test_ignore_pattern(patterns, expected): - fs = (mock_fs() - .withDir('counter') - .withConfig() - .withFile('kitchen/assets/something.html', 'A test file.') - .withFile('kitchen/assets/_hidden.html', 'Shhh') - .withFile('kitchen/assets/foo/_important.html', 'Important!')) - with mock_fs_scope(fs): - pp = _get_pipeline(fs) - pp.addIgnorePatterns(patterns) - pp.enabled_processors = ['copy'] - assert {} == fs.getStructure('counter') - pp.run() - assert expected == fs.getStructure('counter') - - -@pytest.mark.parametrize('names, expected', [ - ('all', ['cleancss', 'compass', 'copy', 'concat', 'less', 'requirejs', - 'sass', 'sitemap', 'uglifyjs', 'pygments_style']), - ('all -sitemap', ['cleancss', 'copy', 'compass', 'concat', 'less', - 'requirejs', 'sass', 'uglifyjs', 'pygments_style']), - ('-sitemap -less -sass all', ['cleancss', 'copy', 'compass', 'concat', - 'requirejs', 'uglifyjs', - 'pygments_style']), - ('copy', ['copy']), - ('less sass', ['less', 'sass']) - ]) -def test_filter_processor(names, expected): - fs = mock_fs().withConfig() - with mock_fs_scope(fs): - app = fs.getApp() - processors = app.plugin_loader.getProcessors() - procs = get_filtered_processors(processors, names) - actual = [p.PROCESSOR_NAME for p in procs] - assert sorted(actual) == sorted(expected) - diff -r 7a1903ede496 -r 2e5c5d33d62c tests/test_processing_tree.py --- a/tests/test_processing_tree.py Tue Nov 21 11:00:06 2017 -0800 +++ b/tests/test_processing_tree.py Tue Nov 21 22:07:12 2017 -0800 @@ -1,5 +1,7 @@ -from piecrust.processing.base import CopyFileProcessor, SimpleFileProcessor -from piecrust.processing.tree import ProcessingTreeBuilder, ProcessingTreeNode +from piecrust.processing.base import SimpleFileProcessor +from piecrust.processing.copy import CopyFileProcessor +from piecrust.pipelines._proctree import ( + ProcessingTreeBuilder, ProcessingTreeNode) class MockProcessor(SimpleFileProcessor): diff -r 7a1903ede496 -r 2e5c5d33d62c tests/test_routing.py --- a/tests/test_routing.py Tue Nov 21 11:00:06 2017 -0800 +++ b/tests/test_routing.py Tue Nov 21 22:07:12 2017 -0800 @@ -2,7 +2,7 @@ import mock import pytest from piecrust.routing import Route, RouteParameter -from piecrust.sources.base import PageSource +from piecrust.sources.base import ContentSource from .mockutil import get_mock_app @@ -20,51 +20,48 @@ else: route_params.append(RouteParameter(p, RouteParameter.TYPE_STRING)) - src = mock.MagicMock(spec=PageSource) + src = mock.MagicMock(spec=ContentSource) src.name = name src.getSupportedRouteParameters = lambda: route_params return src @pytest.mark.parametrize( - 'config, metadata, params, expected', - [ - ({'url': '/%foo%'}, - {'foo': 'bar'}, ['foo'], True), - ({'url': '/%foo%'}, - {'zoo': 'zar', 'foo': 'bar'}, ['foo'], True), - ({'url': '/%foo%'}, - {'zoo': 'zar'}, ['foo'], False), - ({'url': '/%foo%/%zoo%'}, - {'zoo': 'zar'}, ['foo', 'zoo'], False) - ]) -def test_matches_metadata(config, metadata, params, expected): + 'config, params, uri_params, expected', + [ + ({'url': '/%foo%'}, ['foo'], {'foo': 'bar'}, True), + ({'url': '/%foo%'}, ['foo'], {'zoo': 'zar', 'foo': 'bar'}, True), + ({'url': '/%foo%'}, ['foo'], {'zoo': 'zar'}, False), + ({'url': '/%foo%/%zoo%'}, ['foo', 'zoo'], {'zoo': 'zar'}, False) + ]) +def test_matches_parameters(config, params, uri_params, expected): app = get_mock_app() app.config.set('site/root', '/') app.sources = [_getMockSource('blah', params)] config.setdefault('source', 'blah') route = Route(app, config) - m = route.matchesMetadata(metadata) + m = route.matchesParameters(uri_params) assert m == expected @pytest.mark.parametrize( - 'site_root, route_pattern, params, expected_func_parameters', - [ - ('/', '/%foo%', ['foo'], ['foo']), - ('/', '/%foo%', [('foo', 'path')], ['foo']), - ('/', '/%foo%/%bar%', ['foo', 'bar'], ['foo', 'bar']), - ('/', '/%foo%/%bar%', ['foo', ('bar', 'path')], ['foo', 'bar']), - ('/something', '/%foo%', ['foo'], ['foo']), - ('/something', '/%foo%', [('foo', 'path')], ['foo']), - ('/something', '/%foo%/%bar%', ['foo', 'bar'], ['foo', 'bar']), - ('/something', '/%foo%/%bar%', ['foo', ('bar', 'path')], ['foo', 'bar']), - ('/~johndoe', '/%foo%', ['foo'], ['foo']), - ('/~johndoe', '/%foo%', [('foo', 'path')], ['foo']), - ('/~johndoe', '/%foo%/%bar%', ['foo', 'bar'], ['foo', 'bar']), - ('/~johndoe', '/%foo%/%bar%', ['foo', ('bar', 'path')], ['foo', 'bar']) - ]) + 'site_root, route_pattern, params, expected_func_parameters', + [ + ('/', '/%foo%', ['foo'], ['foo']), + ('/', '/%foo%', [('foo', 'path')], ['foo']), + ('/', '/%foo%/%bar%', ['foo', 'bar'], ['foo', 'bar']), + ('/', '/%foo%/%bar%', ['foo', ('bar', 'path')], ['foo', 'bar']), + ('/something', '/%foo%', ['foo'], ['foo']), + ('/something', '/%foo%', [('foo', 'path')], ['foo']), + ('/something', '/%foo%/%bar%', ['foo', 'bar'], ['foo', 'bar']), + ('/something', '/%foo%/%bar%', ['foo', ('bar', 'path')], + ['foo', 'bar']), + ('/~johndoe', '/%foo%', ['foo'], ['foo']), + ('/~johndoe', '/%foo%', [('foo', 'path')], ['foo']), + ('/~johndoe', '/%foo%/%bar%', ['foo', 'bar'], ['foo', 'bar']), + ('/~johndoe', '/%foo%/%bar%', ['foo', ('bar', 'path')], ['foo', 'bar']) + ]) def test_required_metadata(site_root, route_pattern, params, expected_func_parameters): app = get_mock_app() @@ -77,95 +74,95 @@ @pytest.mark.parametrize( - 'site_root, config, params, uri, expected_match', - [ - ('/', {'url': '/%foo%'}, - ['foo'], - 'something', - {'foo': 'something'}), - ('/', {'url': '/%foo%'}, - ['foo'], - 'something/other', - None), - ('/', {'url': '/%foo%'}, - [('foo', 'path')], - 'something/other', - {'foo': 'something/other'}), - ('/', {'url': '/%foo%'}, - [('foo', 'path')], - '', - {'foo': ''}), - ('/', {'url': '/prefix/%foo%'}, - [('foo', 'path')], - 'prefix/something/other', - {'foo': 'something/other'}), - ('/', {'url': '/prefix/%foo%'}, - [('foo', 'path')], - 'prefix/', - {'foo': ''}), - ('/', {'url': '/prefix/%foo%'}, - [('foo', 'path')], - 'prefix', - {'foo': ''}), + 'site_root, config, params, uri, expected_match', + [ + ('/', {'url': '/%foo%'}, + ['foo'], + 'something', + {'foo': 'something'}), + ('/', {'url': '/%foo%'}, + ['foo'], + 'something/other', + None), + ('/', {'url': '/%foo%'}, + [('foo', 'path')], + 'something/other', + {'foo': 'something/other'}), + ('/', {'url': '/%foo%'}, + [('foo', 'path')], + '', + {'foo': ''}), + ('/', {'url': '/prefix/%foo%'}, + [('foo', 'path')], + 'prefix/something/other', + {'foo': 'something/other'}), + ('/', {'url': '/prefix/%foo%'}, + [('foo', 'path')], + 'prefix/', + {'foo': ''}), + ('/', {'url': '/prefix/%foo%'}, + [('foo', 'path')], + 'prefix', + {'foo': ''}), - ('/blah', {'url': '/%foo%'}, - ['foo'], - 'something', - {'foo': 'something'}), - ('/blah', {'url': '/%foo%'}, - ['foo'], - 'something/other', - None), - ('/blah', {'url': '/%foo%'}, - [('foo', 'path')], - 'something/other', - {'foo': 'something/other'}), - ('/blah', {'url': '/%foo%'}, - [('foo', 'path')], - '', - {'foo': ''}), - ('/blah', {'url': '/prefix/%foo%'}, - [('foo', 'path')], - 'prefix/something/other', - {'foo': 'something/other'}), - ('/blah', {'url': '/prefix/%foo%'}, - [('foo', 'path')], - 'prefix/', - {'foo': ''}), - ('/blah', {'url': '/prefix/%foo%'}, - [('foo', 'path')], - 'prefix', - {'foo': ''}), + ('/blah', {'url': '/%foo%'}, + ['foo'], + 'something', + {'foo': 'something'}), + ('/blah', {'url': '/%foo%'}, + ['foo'], + 'something/other', + None), + ('/blah', {'url': '/%foo%'}, + [('foo', 'path')], + 'something/other', + {'foo': 'something/other'}), + ('/blah', {'url': '/%foo%'}, + [('foo', 'path')], + '', + {'foo': ''}), + ('/blah', {'url': '/prefix/%foo%'}, + [('foo', 'path')], + 'prefix/something/other', + {'foo': 'something/other'}), + ('/blah', {'url': '/prefix/%foo%'}, + [('foo', 'path')], + 'prefix/', + {'foo': ''}), + ('/blah', {'url': '/prefix/%foo%'}, + [('foo', 'path')], + 'prefix', + {'foo': ''}), - ('/~johndoe', {'url': '/%foo%'}, - ['foo'], - 'something', - {'foo': 'something'}), - ('/~johndoe', {'url': '/%foo%'}, - ['foo'], - 'something/other', - None), - ('/~johndoe', {'url': '/%foo%'}, - [('foo', 'path')], - 'something/other', - {'foo': 'something/other'}), - ('/~johndoe', {'url': '/%foo%'}, - [('foo', 'path')], - '', - {'foo': ''}), - ('/~johndoe', {'url': '/prefix/%foo%'}, - [('foo', 'path')], - 'prefix/something/other', - {'foo': 'something/other'}), - ('/~johndoe', {'url': '/prefix/%foo%'}, - [('foo', 'path')], - 'prefix/', - {'foo': ''}), - ('/~johndoe', {'url': '/prefix/%foo%'}, - [('foo', 'path')], - 'prefix', - {'foo': ''}), - ]) + ('/~johndoe', {'url': '/%foo%'}, + ['foo'], + 'something', + {'foo': 'something'}), + ('/~johndoe', {'url': '/%foo%'}, + ['foo'], + 'something/other', + None), + ('/~johndoe', {'url': '/%foo%'}, + [('foo', 'path')], + 'something/other', + {'foo': 'something/other'}), + ('/~johndoe', {'url': '/%foo%'}, + [('foo', 'path')], + '', + {'foo': ''}), + ('/~johndoe', {'url': '/prefix/%foo%'}, + [('foo', 'path')], + 'prefix/something/other', + {'foo': 'something/other'}), + ('/~johndoe', {'url': '/prefix/%foo%'}, + [('foo', 'path')], + 'prefix/', + {'foo': ''}), + ('/~johndoe', {'url': '/prefix/%foo%'}, + [('foo', 'path')], + 'prefix', + {'foo': ''}), + ]) def test_match_uri(site_root, config, params, uri, expected_match): site_root = site_root.rstrip('/') + '/' app = get_mock_app() @@ -180,12 +177,12 @@ @pytest.mark.parametrize( - 'site_root', - [ - ('/'), - ('/whatever'), - ('/~johndoe') - ]) + 'site_root', + [ + ('/'), + ('/whatever'), + ('/~johndoe') + ]) def test_match_uri_requires_absolute_uri(site_root): with pytest.raises(Exception): app = get_mock_app() @@ -198,35 +195,35 @@ @pytest.mark.parametrize( - 'slug, page_num, pretty, expected', - [ - # Pretty URLs - ('', 1, True, ''), - ('', 2, True, '2'), - ('foo', 1, True, 'foo'), - ('foo', 2, True, 'foo/2'), - ('foo/bar', 1, True, 'foo/bar'), - ('foo/bar', 2, True, 'foo/bar/2'), - ('foo.ext', 1, True, 'foo.ext'), - ('foo.ext', 2, True, 'foo.ext/2'), - ('foo/bar.ext', 1, True, 'foo/bar.ext'), - ('foo/bar.ext', 2, True, 'foo/bar.ext/2'), - ('foo.bar.ext', 1, True, 'foo.bar.ext'), - ('foo.bar.ext', 2, True, 'foo.bar.ext/2'), - # Ugly URLs - ('', 1, False, ''), - ('', 2, False, '2.html'), - ('foo', 1, False, 'foo.html'), - ('foo', 2, False, 'foo/2.html'), - ('foo/bar', 1, False, 'foo/bar.html'), - ('foo/bar', 2, False, 'foo/bar/2.html'), - ('foo.ext', 1, False, 'foo.ext'), - ('foo.ext', 2, False, 'foo/2.ext'), - ('foo/bar.ext', 1, False, 'foo/bar.ext'), - ('foo/bar.ext', 2, False, 'foo/bar/2.ext'), - ('foo.bar.ext', 1, False, 'foo.bar.ext'), - ('foo.bar.ext', 2, False, 'foo.bar/2.ext') - ]) + 'slug, page_num, pretty, expected', + [ + # Pretty URLs + ('', 1, True, ''), + ('', 2, True, '2'), + ('foo', 1, True, 'foo'), + ('foo', 2, True, 'foo/2'), + ('foo/bar', 1, True, 'foo/bar'), + ('foo/bar', 2, True, 'foo/bar/2'), + ('foo.ext', 1, True, 'foo.ext'), + ('foo.ext', 2, True, 'foo.ext/2'), + ('foo/bar.ext', 1, True, 'foo/bar.ext'), + ('foo/bar.ext', 2, True, 'foo/bar.ext/2'), + ('foo.bar.ext', 1, True, 'foo.bar.ext'), + ('foo.bar.ext', 2, True, 'foo.bar.ext/2'), + # Ugly URLs + ('', 1, False, ''), + ('', 2, False, '2.html'), + ('foo', 1, False, 'foo.html'), + ('foo', 2, False, 'foo/2.html'), + ('foo/bar', 1, False, 'foo/bar.html'), + ('foo/bar', 2, False, 'foo/bar/2.html'), + ('foo.ext', 1, False, 'foo.ext'), + ('foo.ext', 2, False, 'foo/2.ext'), + ('foo/bar.ext', 1, False, 'foo/bar.ext'), + ('foo/bar.ext', 2, False, 'foo/bar/2.ext'), + ('foo.bar.ext', 1, False, 'foo.bar.ext'), + ('foo.bar.ext', 2, False, 'foo.bar/2.ext') + ]) def test_get_uri(slug, page_num, pretty, expected): for root in ['/', '/blah/', '/~johndoe/']: app = get_mock_app() diff -r 7a1903ede496 -r 2e5c5d33d62c tests/test_serving.py --- a/tests/test_serving.py Tue Nov 21 11:00:06 2017 -0800 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,145 +0,0 @@ -import re -import pytest -import mock -from piecrust.data.filters import ( - PaginationFilter, HasFilterClause, IsFilterClause, - page_value_accessor) -from piecrust.rendering import QualifiedPage, PageRenderingContext, render_page -from piecrust.serving.util import find_routes -from piecrust.sources.base import REALM_USER, REALM_THEME -from .mockutil import mock_fs, mock_fs_scope - - -@pytest.mark.parametrize('uri, route_specs, expected', - [ - ('/', - [{'src': 'pages', 'pat': '(?P.*)'}], - [('pages', {'path': '/'})]), - ('/', - [{'src': 'pages', 'pat': '(?P.*)'}, - {'src': 'theme', 'pat': '(?P.*)', 'realm': REALM_THEME}], - [('pages', {'path': '/'}), ('theme', {'path': '/'})]) - ]) -def test_find_routes(uri, route_specs, expected): - routes = [] - for rs in route_specs: - m = mock.Mock() - m.source_name = rs['src'] - m.source_realm = rs.setdefault('realm', REALM_USER) - m.uri_re = re.compile(rs['pat']) - m.matchUri = lambda u: m.uri_re.match(u).groupdict() - routes.append(m) - matching = find_routes(routes, uri) - - assert len(matching) == len(expected) - for i in range(len(matching)): - route, metadata, is_sub_page = matching[i] - exp_source, exp_md = expected[i] - assert route.source_name == exp_source - assert metadata == exp_md - - -@pytest.mark.parametrize( - 'tag, expected_indices', - [ - ('foo', [1, 2, 4, 5, 6]), - ('bar', [2, 3, 4, 6, 8]), - ('whatever', [5, 8]), - ('unique', [7]), - ('missing', None) - ]) -def test_serve_tag_page(tag, expected_indices): - tags = [ - ['foo'], - ['foo', 'bar'], - ['bar'], - ['bar', 'foo'], - ['foo', 'whatever'], - ['foo', 'bar'], - ['unique'], - ['whatever', 'bar']] - - def config_factory(i): - c = {'title': 'Post %d' % (i + 1)} - c['tags'] = list(tags[i]) - return c - - fs = (mock_fs() - .withConfig() - .withPages(8, 'posts/2015-03-{idx1:02}_post{idx1:02}.md', - config_factory) - .withPage('pages/_tag.md', {'layout': 'none', 'format': 'none'}, - "Pages in {{tag}}\n" - "{%for p in pagination.posts -%}\n" - "{{p.title}}\n" - "{%endfor%}")) - with mock_fs_scope(fs): - app = fs.getApp() - page = app.getSource('pages').getPage({'slug': '_tag', 'tag': tag}) - route = app.getGeneratorRoute('posts_tags') - assert route is not None - - route_metadata = {'slug': '_tag', 'tag': tag} - qp = QualifiedPage(page, route, route_metadata) - ctx = PageRenderingContext(qp) - route.generator.prepareRenderContext(ctx) - rp = render_page(ctx) - - expected = "Pages in %s\n" % tag - if expected_indices: - for i in reversed(expected_indices): - expected += "Post %d\n" % i - assert expected == rp.content - - -@pytest.mark.parametrize( - 'category, expected_indices', - [ - ('foo', [1, 2, 4]), - ('bar', [3, 6]), - ('missing', None) - ]) -def test_serve_category_page(category, expected_indices): - categories = [ - 'foo', 'foo', 'bar', 'foo', None, 'bar'] - - def config_factory(i): - c = {'title': 'Post %d' % (i + 1)} - if categories[i]: - c['category'] = categories[i] - return c - - fs = (mock_fs() - .withConfig({ - 'site': { - 'taxonomies': { - 'categories': {'term': 'category'} - } - } - }) - .withPages(6, 'posts/2015-03-{idx1:02}_post{idx1:02}.md', - config_factory) - .withPage('pages/_category.md', {'layout': 'none', 'format': 'none'}, - "Pages in {{category}}\n" - "{%for p in pagination.posts -%}\n" - "{{p.title}}\n" - "{%endfor%}")) - with mock_fs_scope(fs): - app = fs.getApp() - page = app.getSource('pages').getPage({'slug': '_category', - 'category': category}) - route = app.getGeneratorRoute('posts_categories') - assert route is not None - - route_metadata = {'slug': '_category', 'category': category} - qp = QualifiedPage(page, route, route_metadata) - ctx = PageRenderingContext(qp) - route.generator.prepareRenderContext(ctx) - rp = render_page(ctx) - - expected = "Pages in %s\n" % category - if expected_indices: - for i in reversed(expected_indices): - expected += "Post %d\n" % i - assert expected == rp.content - diff -r 7a1903ede496 -r 2e5c5d33d62c tests/test_serving_util.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/tests/test_serving_util.py Tue Nov 21 22:07:12 2017 -0800 @@ -0,0 +1,35 @@ +import re +import pytest +import mock +from piecrust.serving.util import find_routes +from piecrust.sources.base import REALM_USER, REALM_THEME + + +@pytest.mark.parametrize( + 'uri, route_specs, expected', + [ + ('/', + [{'src': 'pages', 'pat': '(?P.*)'}], + [('pages', {'path': '/'})]), + ('/', + [{'src': 'pages', 'pat': '(?P.*)'}, + {'src': 'theme', 'pat': '(?P.*)', 'realm': REALM_THEME}], + [('pages', {'path': '/'}), ('theme', {'path': '/'})]) + ]) +def test_find_routes(uri, route_specs, expected): + routes = [] + for rs in route_specs: + m = mock.Mock() + m.source_name = rs['src'] + m.source_realm = rs.setdefault('realm', REALM_USER) + m.uri_re = re.compile(rs['pat']) + m.matchUri = lambda u: m.uri_re.match(u).groupdict() + routes.append(m) + matching = find_routes(routes, uri) + + assert len(matching) == len(expected) + for i in range(len(matching)): + route, metadata, is_sub_page = matching[i] + exp_source, exp_md = expected[i] + assert route.source_name == exp_source + assert metadata == exp_md diff -r 7a1903ede496 -r 2e5c5d33d62c tests/test_sources_autoconfig.py --- a/tests/test_sources_autoconfig.py Tue Nov 21 11:00:06 2017 -0800 +++ b/tests/test_sources_autoconfig.py Tue Nov 21 22:07:12 2017 -0800 @@ -1,62 +1,55 @@ +import os.path import pytest -from piecrust.sources.base import MODE_PARSING from .mockutil import mock_fs, mock_fs_scope from .pathutil import slashfix @pytest.mark.parametrize( - 'fs_fac, src_config, expected_paths, expected_metadata', - [ - (lambda: mock_fs(), {}, [], []), - (lambda: mock_fs().withPage('test/_index.md'), - {}, - ['_index.md'], - [{'slug': '', 'config': {'foo': []}}]), - (lambda: mock_fs().withPage('test/something.md'), - {}, - ['something.md'], - [{'slug': 'something', 'config': {'foo': []}}]), - (lambda: mock_fs().withPage('test/bar/something.md'), - {}, - ['bar/something.md'], - [{'slug': 'something', 'config': {'foo': ['bar']}}]), - (lambda: mock_fs().withPage('test/bar1/bar2/something.md'), - {}, - ['bar1/bar2/something.md'], - [{'slug': 'something', 'config': {'foo': ['bar1', 'bar2']}}]), + 'fs_fac, src_config, expected_path, expected_slug, expected_foos', + [ + (lambda: mock_fs(), + {}, + None, '', []), + (lambda: mock_fs().withPage('test/_index.md'), + {}, + '_index.md', '', []), + (lambda: mock_fs().withPage('test/something.md'), + {}, + 'something.md', 'something', []), + (lambda: mock_fs().withPage('test/bar/something.md'), + {}, + 'bar/something.md', 'something', ['bar']), + (lambda: mock_fs().withPage('test/bar1/bar2/something.md'), + {}, + 'bar1/bar2/something.md', 'something', ['bar1', 'bar2']), - (lambda: mock_fs().withPage('test/something.md'), - {'collapse_single_values': True}, - ['something.md'], - [{'slug': 'something', 'config': {'foo': None}}]), - (lambda: mock_fs().withPage('test/bar/something.md'), - {'collapse_single_values': True}, - ['bar/something.md'], - [{'slug': 'something', 'config': {'foo': 'bar'}}]), - (lambda: mock_fs().withPage('test/bar1/bar2/something.md'), - {'collapse_single_values': True}, - ['bar1/bar2/something.md'], - [{'slug': 'something', 'config': {'foo': ['bar1', 'bar2']}}]), + (lambda: mock_fs().withPage('test/something.md'), + {'collapse_single_values': True}, + 'something.md', 'something', None), + (lambda: mock_fs().withPage('test/bar/something.md'), + {'collapse_single_values': True}, + 'bar/something.md', 'something', 'bar'), + (lambda: mock_fs().withPage('test/bar1/bar2/something.md'), + {'collapse_single_values': True}, + 'bar1/bar2/something.md', 'something', ['bar1', 'bar2']), - (lambda: mock_fs().withPage('test/something.md'), - {'only_single_values': True}, - ['something.md'], - [{'slug': 'something', 'config': {'foo': None}}]), - (lambda: mock_fs().withPage('test/bar/something.md'), - {'only_single_values': True}, - ['bar/something.md'], - [{'slug': 'something', 'config': {'foo': 'bar'}}]), - ]) -def test_autoconfig_source_factories(fs_fac, src_config, expected_paths, - expected_metadata): + (lambda: mock_fs().withPage('test/something.md'), + {'only_single_values': True}, + 'something.md', 'something', None), + (lambda: mock_fs().withPage('test/bar/something.md'), + {'only_single_values': True}, + 'bar/something.md', 'something', 'bar') + ]) +def test_autoconfig_source_items( + fs_fac, src_config, expected_path, expected_slug, expected_foos): site_config = { - 'sources': { - 'test': {'type': 'autoconfig', - 'setting_name': 'foo'} - }, - 'routes': [ - {'url': '/%slug%', 'source': 'test'}] - } + 'sources': { + 'test': {'type': 'autoconfig', + 'setting_name': 'foo'} + }, + 'routes': [ + {'url': '/%slug%', 'source': 'test'}] + } site_config['sources']['test'].update(src_config) fs = fs_fac() fs.withConfig({'site': site_config}) @@ -64,111 +57,124 @@ with mock_fs_scope(fs): app = fs.getApp() s = app.getSource('test') - facs = list(s.buildPageFactories()) - paths = [f.rel_path for f in facs] - assert paths == slashfix(expected_paths) - metadata = [f.metadata for f in facs] - assert metadata == expected_metadata + items = list(s.getAllContents()) + + if expected_path is None: + assert len(items) == 0 + else: + assert len(items) == 1 + path = os.path.relpath(items[0].spec, s.fs_endpoint_path) + assert path == slashfix(expected_path) + slug = items[0].metadata['route_params']['slug'] + assert slug == expected_slug + foos = items[0].metadata['config']['foo'] + assert foos == expected_foos def test_autoconfig_fails_if_multiple_folders(): site_config = { - 'sources': { - 'test': {'type': 'autoconfig', - 'setting_name': 'foo', - 'only_single_values': True} - } - } + 'sources': { + 'test': {'type': 'autoconfig', + 'setting_name': 'foo', + 'only_single_values': True} + } + } fs = mock_fs().withConfig({'site': site_config}) fs.withPage('test/bar1/bar2/something.md') with mock_fs_scope(fs): app = fs.getApp() s = app.getSource('test') with pytest.raises(Exception): - list(s.buildPageFactories()) + list(s.getAllContents()) @pytest.mark.parametrize( - 'fs_fac, expected_paths, expected_metadata', - [ - (lambda: mock_fs(), [], []), - (lambda: mock_fs().withPage('test/_index.md'), - ['_index.md'], - [{'slug': '', - 'config': {'foo': 0, 'foo_trail': [0]}}]), - (lambda: mock_fs().withPage('test/something.md'), - ['something.md'], - [{'slug': 'something', - 'config': {'foo': 0, 'foo_trail': [0]}}]), - (lambda: mock_fs().withPage('test/08_something.md'), - ['08_something.md'], - [{'slug': 'something', - 'config': {'foo': 8, 'foo_trail': [8]}}]), - (lambda: mock_fs().withPage('test/02_there/08_something.md'), - ['02_there/08_something.md'], - [{'slug': 'there/something', - 'config': {'foo': 8, 'foo_trail': [2, 8]}}]), - ]) -def test_ordered_source_factories(fs_fac, expected_paths, expected_metadata): + 'fs_fac, expected_paths, expected_route_params, expected_configs', + [ + (lambda: mock_fs(), [], [], []), + (lambda: mock_fs().withPage('test/_index.md'), + ['_index.md'], + [{'slug': ''}], + [{'foo': 0, 'foo_trail': [0]}]), + (lambda: mock_fs().withPage('test/something.md'), + ['something.md'], + [{'slug': 'something'}], + [{'foo': 0, 'foo_trail': [0]}]), + (lambda: mock_fs().withPage('test/08_something.md'), + ['08_something.md'], + [{'slug': 'something'}], + [{'foo': 8, 'foo_trail': [8]}]), + (lambda: mock_fs().withPage('test/02_there/08_something.md'), + ['02_there/08_something.md'], + [{'slug': 'there/something'}], + [{'foo': 8, 'foo_trail': [2, 8]}]), + ]) +def test_ordered_source_items(fs_fac, expected_paths, expected_route_params, + expected_configs): site_config = { - 'sources': { - 'test': {'type': 'ordered', - 'setting_name': 'foo'} - }, - 'routes': [ - {'url': '/%slug%', 'source': 'test'}] - } + 'sources': { + 'test': {'type': 'ordered', + 'setting_name': 'foo'} + }, + 'routes': [ + {'url': '/%slug%', 'source': 'test'}] + } fs = fs_fac() fs.withConfig({'site': site_config}) fs.withDir('kitchen/test') with mock_fs_scope(fs): app = fs.getApp() s = app.getSource('test') - facs = list(s.buildPageFactories()) - paths = [f.rel_path for f in facs] + items = list(s.getAllContents()) + + paths = [os.path.relpath(f.spec, s.fs_endpoint_path) for f in items] assert paths == slashfix(expected_paths) - metadata = [f.metadata for f in facs] - assert metadata == expected_metadata + metadata = [f.metadata['route_params'] for f in items] + assert metadata == expected_route_params + configs = [f.metadata['config'] for f in items] + for c in configs: + c.pop('format') + assert configs == expected_configs @pytest.mark.parametrize( - 'fs_fac, route_path, expected_path, expected_metadata', - [ - (lambda: mock_fs(), 'missing', None, None), - (lambda: mock_fs().withPage('test/something.md'), - 'something', 'something.md', - {'slug': 'something', - 'config': {'foo': 0, 'foo_trail': [0]}}), - (lambda: mock_fs().withPage('test/bar/something.md'), - 'bar/something', 'bar/something.md', - {'slug': 'bar/something', - 'config': {'foo': 0, 'foo_trail': [0]}}), - (lambda: mock_fs().withPage('test/42_something.md'), - 'something', '42_something.md', - {'slug': 'something', - 'config': {'foo': 42, 'foo_trail': [42]}}), - (lambda: mock_fs().withPage('test/bar/42_something.md'), - 'bar/something', 'bar/42_something.md', - {'slug': 'bar/something', - 'config': {'foo': 42, 'foo_trail': [42]}}), + 'fs_fac, route_path, expected_path, expected_metadata', + [ + (lambda: mock_fs(), 'missing', None, None), + (lambda: mock_fs().withPage('test/something.md'), + 'something', 'something.md', + {'slug': 'something', + 'config': {'foo': 0, 'foo_trail': [0]}}), + (lambda: mock_fs().withPage('test/bar/something.md'), + 'bar/something', 'bar/something.md', + {'slug': 'bar/something', + 'config': {'foo': 0, 'foo_trail': [0]}}), + (lambda: mock_fs().withPage('test/42_something.md'), + 'something', '42_something.md', + {'slug': 'something', + 'config': {'foo': 42, 'foo_trail': [42]}}), + (lambda: mock_fs().withPage('test/bar/42_something.md'), + 'bar/something', 'bar/42_something.md', + {'slug': 'bar/something', + 'config': {'foo': 42, 'foo_trail': [42]}}), - ((lambda: mock_fs() - .withPage('test/42_something.md') - .withPage('test/43_other_something.md')), - 'something', '42_something.md', - {'slug': 'something', - 'config': {'foo': 42, 'foo_trail': [42]}}), - ]) + ((lambda: mock_fs() + .withPage('test/42_something.md') + .withPage('test/43_other_something.md')), + 'something', '42_something.md', + {'slug': 'something', + 'config': {'foo': 42, 'foo_trail': [42]}}), + ]) def test_ordered_source_find(fs_fac, route_path, expected_path, expected_metadata): site_config = { - 'sources': { - 'test': {'type': 'ordered', - 'setting_name': 'foo'} - }, - 'routes': [ - {'url': '/%slug%', 'source': 'test'}] - } + 'sources': { + 'test': {'type': 'ordered', + 'setting_name': 'foo'} + }, + 'routes': [ + {'url': '/%slug%', 'source': 'test'}] + } fs = fs_fac() fs.withConfig({'site': site_config}) fs.withDir('kitchen/test') @@ -176,10 +182,11 @@ app = fs.getApp() s = app.getSource('test') route_metadata = {'slug': route_path} - factory = s.findPageFactory(route_metadata, MODE_PARSING) - if factory is None: + item = s.findContentFromRoute(route_metadata) + if item is None: assert expected_path is None and expected_metadata is None - return - assert factory.rel_path == slashfix(expected_path) - assert factory.metadata == expected_metadata + else: + assert os.path.relpath(item.spec, s.fs_endpoint_path) == \ + slashfix(expected_path) + assert item.metadata == expected_metadata diff -r 7a1903ede496 -r 2e5c5d33d62c tests/test_sources_base.py --- a/tests/test_sources_base.py Tue Nov 21 11:00:06 2017 -0800 +++ b/tests/test_sources_base.py Tue Nov 21 22:07:12 2017 -0800 @@ -1,27 +1,25 @@ import os import pytest -from piecrust.app import PieCrust -from piecrust.sources.pageref import PageRef, PageNotFoundError from .mockutil import mock_fs, mock_fs_scope from .pathutil import slashfix @pytest.mark.parametrize('fs_fac, expected_paths, expected_slugs', [ - (lambda: mock_fs(), [], []), - (lambda: mock_fs().withPage('test/foo.html'), - ['foo.html'], ['foo']), - (lambda: mock_fs().withPage('test/foo.md'), - ['foo.md'], ['foo']), - (lambda: mock_fs().withPage('test/foo.ext'), - ['foo.ext'], ['foo.ext']), - (lambda: mock_fs().withPage('test/foo/bar.html'), - ['foo/bar.html'], ['foo/bar']), - (lambda: mock_fs().withPage('test/foo/bar.md'), - ['foo/bar.md'], ['foo/bar']), - (lambda: mock_fs().withPage('test/foo/bar.ext'), - ['foo/bar.ext'], ['foo/bar.ext']), - ]) -def test_default_source_factories(fs_fac, expected_paths, expected_slugs): + (lambda: mock_fs(), [], []), + (lambda: mock_fs().withPage('test/foo.html'), + ['foo.html'], ['foo']), + (lambda: mock_fs().withPage('test/foo.md'), + ['foo.md'], ['foo']), + (lambda: mock_fs().withPage('test/foo.ext'), + ['foo.ext'], ['foo.ext']), + (lambda: mock_fs().withPage('test/foo/bar.html'), + ['foo/bar.html'], ['foo/bar']), + (lambda: mock_fs().withPage('test/foo/bar.md'), + ['foo/bar.md'], ['foo/bar']), + (lambda: mock_fs().withPage('test/foo/bar.ext'), + ['foo/bar.ext'], ['foo/bar.ext']), +]) +def test_default_source_items(fs_fac, expected_paths, expected_slugs): fs = fs_fac() fs.withConfig({ 'site': { @@ -29,125 +27,47 @@ 'test': {}}, 'routes': [ {'url': '/%path%', 'source': 'test'}] - } - }) + } + }) fs.withDir('kitchen/test') with mock_fs_scope(fs): - app = PieCrust(fs.path('kitchen'), cache=False) + app = fs.getApp() s = app.getSource('test') - facs = list(s.buildPageFactories()) - paths = [f.rel_path for f in facs] - assert paths == expected_paths - slugs = [f.metadata['slug'] for f in facs] + items = list(s.getAllContents()) + paths = [os.path.relpath(f.spec, s.fs_endpoint_path) for f in items] + assert paths == slashfix(expected_paths) + slugs = [f.metadata['route_params']['slug'] for f in items] assert slugs == expected_slugs @pytest.mark.parametrize( - 'ref_path, expected_path, expected_metadata', - [ - ('foo.html', '/kitchen/test/foo.html', {'slug': 'foo'}), - ('foo/bar.html', '/kitchen/test/foo/bar.html', - {'slug': 'foo/bar'}), - ]) -def test_default_source_resolve_ref(ref_path, expected_path, - expected_metadata): - fs = mock_fs() + 'fs_fac, ref_path, expected_path, expected_metadata', [ + (lambda: mock_fs().withPage('test/foo.html'), + 'foo.html', + 'test/foo.html', + {'slug': 'foo'}), + (lambda: mock_fs().withPage('test/foo/bar.html'), + 'foo/bar.html', + 'test/foo/bar.html', + {'slug': 'foo/bar'}), + + ]) +def test_default_source_find_item(fs_fac, ref_path, expected_path, + expected_metadata): + fs = fs_fac() fs.withConfig({ 'site': { 'sources': { 'test': {}}, 'routes': [ {'url': '/%path%', 'source': 'test'}] - } - }) - expected_path = fs.path(expected_path).replace('/', os.sep) - with mock_fs_scope(fs): - app = PieCrust(fs.path('kitchen'), cache=False) - s = app.getSource('test') - actual_path, actual_metadata = s.resolveRef(ref_path) - assert actual_path == expected_path - assert actual_metadata == expected_metadata - - -@pytest.mark.parametrize('page_ref, expected_source_name, expected_rel_path, ' - 'expected_possible_paths', [ - ('foo:one.md', 'foo', 'one.md', - ['foo/one.md']), - ('foo:two.md', 'foo', 'two.md', - ['foo/two.md']), - ('foo:two.html', 'foo', 'two.html', - ['foo/two.html']), - ('foo:two.%ext%', 'foo', 'two.html', - ['foo/two.html', 'foo/two.md', 'foo/two.textile']), - ('foo:subdir/four.md', 'foo', 'subdir/four.md', - ['foo/subdir/four.md']), - ('foo:subdir/four.%ext%', 'foo', 'subdir/four.md', - ['foo/subdir/four.html', 'foo/subdir/four.md', - 'foo/subdir/four.textile']), - ('foo:three.md;bar:three.md', 'foo', 'three.md', - ['foo/three.md', 'bar/three.md']), - ('foo:three.%ext%;bar:three.%ext%', 'foo', 'three.md', - ['foo/three.html', 'foo/three.md', 'foo/three.textile', - 'bar/three.html', 'bar/three.md', 'bar/three.textile']), - ('foo:special.md;bar:special.md', 'bar', 'special.md', - ['foo/special.md', 'bar/special.md']) - ]) -def test_page_ref(page_ref, expected_source_name, expected_rel_path, - expected_possible_paths): - fs = (mock_fs() - .withConfig({ - 'site': { - 'sources': { - 'foo': {}, - 'bar': {} - } - } - }) - .withPage('foo/one.md') - .withPage('foo/two.md') - .withPage('foo/two.html') - .withPage('foo/three.md') - .withPage('foo/subdir/four.md') - .withPage('bar/three.md') - .withPage('bar/special.md')) + } + }) with mock_fs_scope(fs): app = fs.getApp() - r = PageRef(app, page_ref) - - assert r.possible_paths == slashfix( - [os.path.join(fs.path('/kitchen'), p) - for p in expected_possible_paths]) - - assert r.exists - assert r.source_name == expected_source_name - assert r.source == app.getSource(expected_source_name) - assert r.rel_path == expected_rel_path - assert r.path == slashfix(fs.path(os.path.join( - 'kitchen', expected_source_name, expected_rel_path))) - - -def test_page_ref_with_missing_source(): - fs = mock_fs().withConfig() - with mock_fs_scope(fs): - app = fs.getApp() - r = PageRef(app, 'whatever:doesnt_exist.md') - with pytest.raises(Exception): - r.possible_ref_specs - - -def test_page_ref_with_missing_file(): - fs = mock_fs().withConfig() - with mock_fs_scope(fs): - app = fs.getApp() - r = PageRef(app, 'pages:doesnt_exist.%ext%') - assert r.possible_ref_specs == [ - 'pages:doesnt_exist.html', 'pages:doesnt_exist.md', - 'pages:doesnt_exist.textile'] - with pytest.raises(PageNotFoundError): - r.source_name - with pytest.raises(PageNotFoundError): - r.rel_path - with pytest.raises(PageNotFoundError): - r.path - assert not r.exists - + s = app.getSource('test') + item = s.findContentFromRoute({'slug': ref_path}) + assert item is not None + assert os.path.relpath(item.spec, app.root_dir) == \ + slashfix(expected_path) + assert item.metadata['route_params'] == expected_metadata diff -r 7a1903ede496 -r 2e5c5d33d62c tests/test_sources_posts.py --- a/tests/test_sources_posts.py Tue Nov 21 11:00:06 2017 -0800 +++ b/tests/test_sources_posts.py Tue Nov 21 22:07:12 2017 -0800 @@ -1,8 +1,11 @@ +import os.path import pytest from .mockutil import mock_fs, mock_fs_scope -@pytest.mark.parametrize('fs_fac, src_type, expected_paths, expected_metadata', [ +@pytest.mark.parametrize( + 'fs_fac, src_type, expected_paths, expected_metadata', + [ (lambda: mock_fs(), 'flat', [], []), (lambda: mock_fs().withPage('test/2014-01-01_foo.md'), 'flat', @@ -18,9 +21,9 @@ 'hierarchy', ['2014/01/01_foo.md'], [(2014, 1, 1, 'foo')]), - ]) -def test_post_source_factories(fs_fac, src_type, expected_paths, - expected_metadata): + ]) +def test_post_source_items(fs_fac, src_type, expected_paths, + expected_metadata): fs = fs_fac() fs.withConfig({ 'site': { @@ -28,18 +31,20 @@ 'test': {'type': 'posts/%s' % src_type}}, 'routes': [ {'url': '/%slug%', 'source': 'test'}] - } - }) + } + }) fs.withDir('kitchen/test') with mock_fs_scope(fs): - app = fs.getApp(cache=False) + app = fs.getApp() s = app.getSource('test') - facs = list(s.buildPageFactories()) - paths = [f.rel_path for f in facs] + items = list(s.getAllContents()) + paths = [os.path.relpath(f.spec, s.fs_endpoint_path) for f in items] assert paths == expected_paths metadata = [ - (f.metadata['year'], f.metadata['month'], - f.metadata['day'], f.metadata['slug']) - for f in facs] + (f.metadata['route_params']['year'], + f.metadata['route_params']['month'], + f.metadata['route_params']['day'], + f.metadata['route_params']['slug']) + for f in items] assert metadata == expected_metadata diff -r 7a1903ede496 -r 2e5c5d33d62c tests/test_templating_jinjaengine.py --- a/tests/test_templating_jinjaengine.py Tue Nov 21 11:00:06 2017 -0800 +++ b/tests/test_templating_jinjaengine.py Tue Nov 21 22:07:12 2017 -0800 @@ -1,36 +1,33 @@ import pytest -from .mockutil import ( - mock_fs, mock_fs_scope, get_simple_page, render_simple_page) +from .mockutil import mock_fs, mock_fs_scope +from .rdrutil import render_simple_page app_config = { - 'site': { - 'default_format': 'none', - 'default_template_engine': 'jinja'}, - 'foo': 'bar'} + 'site': { + 'default_format': 'none', + 'default_template_engine': 'jinja'}, + 'foo': 'bar'} page_config = {'layout': 'none'} open_patches = ['jinja2.environment', 'jinja2.utils'] @pytest.mark.parametrize( - 'contents, expected', - [ - ("Raw text", "Raw text"), - ("This is {{foo}}", "This is bar"), - ("Info:\nMy URL: {{page.url}}\n", - "Info:\nMy URL: /foo.html") - ]) + 'contents, expected', + [ + ("Raw text", "Raw text"), + ("This is {{foo}}", "This is bar"), + ("Info:\nMy URL: {{page.url}}\n", + "Info:\nMy URL: /foo.html") + ]) def test_simple(contents, expected): fs = (mock_fs() - .withConfig(app_config) - .withPage('pages/foo', config=page_config, contents=contents)) + .withConfig(app_config) + .withPage('pages/foo', config=page_config, contents=contents)) with mock_fs_scope(fs, open_patches=open_patches): - app = fs.getApp() - page = get_simple_page(app, 'foo.md') - route = app.getSourceRoute('pages', None) - route_metadata = {'slug': 'foo'} - output = render_simple_page(page, route, route_metadata) + page = fs.getSimplePage('foo.md') + output = render_simple_page(page) assert output == expected @@ -39,32 +36,26 @@ layout = "{{content}}\nFor site: {{foo}}\n" expected = "Blah\n\nFor site: bar" fs = (mock_fs() - .withConfig(app_config) - .withAsset('templates/blah.jinja', layout) - .withPage('pages/foo', config={'layout': 'blah'}, - contents=contents)) + .withConfig(app_config) + .withAsset('templates/blah.jinja', layout) + .withPage('pages/foo', config={'layout': 'blah.jinja'}, + contents=contents)) with mock_fs_scope(fs, open_patches=open_patches): - app = fs.getApp() - page = get_simple_page(app, 'foo.md') - route = app.getSourceRoute('pages', None) - route_metadata = {'slug': 'foo'} - output = render_simple_page(page, route, route_metadata) + page = fs.getSimplePage('foo.md') + output = render_simple_page(page) assert output == expected def test_partial(): contents = "Info:\n{% include 'page_info.jinja' %}\n" - partial = "- URL: {{page.url}}\n- SLUG: {{page.slug}}\n" + partial = "- URL: {{page.url}}\n- SLUG: {{page.route.slug}}\n" expected = "Info:\n- URL: /foo.html\n- SLUG: foo" fs = (mock_fs() - .withConfig(app_config) - .withAsset('templates/page_info.jinja', partial) - .withPage('pages/foo', config=page_config, contents=contents)) + .withConfig(app_config) + .withAsset('templates/page_info.jinja', partial) + .withPage('pages/foo', config=page_config, contents=contents)) with mock_fs_scope(fs, open_patches=open_patches): - app = fs.getApp() - page = get_simple_page(app, 'foo.md') - route = app.getSourceRoute('pages', None) - route_metadata = {'slug': 'foo'} - output = render_simple_page(page, route, route_metadata) + page = fs.getSimplePage('foo.md') + output = render_simple_page(page) assert output == expected diff -r 7a1903ede496 -r 2e5c5d33d62c tests/test_templating_pystacheengine.py --- a/tests/test_templating_pystacheengine.py Tue Nov 21 11:00:06 2017 -0800 +++ b/tests/test_templating_pystacheengine.py Tue Nov 21 22:07:12 2017 -0800 @@ -1,36 +1,33 @@ import pytest -from .mockutil import ( - mock_fs, mock_fs_scope, get_simple_page, render_simple_page) +from .mockutil import mock_fs, mock_fs_scope +from .rdrutil import render_simple_page app_config = { - 'site': { - 'default_format': 'none', - 'default_template_engine': 'mustache'}, - 'foo': 'bar'} + 'site': { + 'default_format': 'none', + 'default_template_engine': 'mustache'}, + 'foo': 'bar'} page_config = {'layout': 'none'} open_patches = ['pystache.common'] @pytest.mark.parametrize( - 'contents, expected', - [ - ("Raw text", "Raw text"), - ("This is {{foo}}", "This is bar"), - ("Info:\n{{#page}}\nMy URL: {{url}}\n{{/page}}\n", - "Info:\nMy URL: /foo.html\n") - ]) + 'contents, expected', + [ + ("Raw text", "Raw text"), + ("This is {{foo}}", "This is bar"), + ("Info:\n{{#page}}\nMy URL: {{url}}\n{{/page}}\n", + "Info:\nMy URL: /foo.html\n") + ]) def test_simple(contents, expected): fs = (mock_fs() - .withConfig(app_config) - .withPage('pages/foo', config=page_config, contents=contents)) + .withConfig(app_config) + .withPage('pages/foo', config=page_config, contents=contents)) with mock_fs_scope(fs, open_patches=open_patches): - app = fs.getApp() - page = get_simple_page(app, 'foo.md') - route = app.getSourceRoute('pages', None) - route_metadata = {'slug': 'foo'} - output = render_simple_page(page, route, route_metadata) + page = fs.getSimplePage('foo.md') + output = render_simple_page(page) assert output == expected @@ -39,16 +36,13 @@ layout = "{{content}}\nFor site: {{foo}}\n" expected = "Blah\n\nFor site: bar\n" fs = (mock_fs() - .withConfig(app_config) - .withAsset('templates/blah.mustache', layout) - .withPage('pages/foo', config={'layout': 'blah'}, - contents=contents)) + .withConfig(app_config) + .withAsset('templates/blah.mustache', layout) + .withPage('pages/foo', config={'layout': 'blah.mustache'}, + contents=contents)) with mock_fs_scope(fs, open_patches=open_patches): - app = fs.getApp() - page = get_simple_page(app, 'foo.md') - route = app.getSourceRoute('pages', None) - route_metadata = {'slug': 'foo'} - output = render_simple_page(page, route, route_metadata) + page = fs.getSimplePage('foo.md') + output = render_simple_page(page) # On Windows, pystache unexplicably adds `\r` to some newlines... wtf. output = output.replace('\r', '') assert output == expected @@ -56,18 +50,15 @@ def test_partial(): contents = "Info:\n{{#page}}\n{{> page_info}}\n{{/page}}\n" - partial = "- URL: {{url}}\n- SLUG: {{slug}}\n" + partial = "- URL: {{url}}\n- SLUG: {{route.slug}}\n" expected = "Info:\n- URL: /foo.html\n- SLUG: foo\n" fs = (mock_fs() - .withConfig(app_config) - .withAsset('templates/page_info.mustache', partial) - .withPage('pages/foo', config=page_config, contents=contents)) + .withConfig(app_config) + .withAsset('templates/page_info.mustache', partial) + .withPage('pages/foo', config=page_config, contents=contents)) with mock_fs_scope(fs, open_patches=open_patches): - app = fs.getApp() - page = get_simple_page(app, 'foo.md') - route = app.getSourceRoute('pages', None) - route_metadata = {'slug': 'foo'} - output = render_simple_page(page, route, route_metadata) + page = fs.getSimplePage('foo.md') + output = render_simple_page(page) # On Windows, pystache unexplicably adds `\r` to some newlines... wtf. output = output.replace('\r', '') assert output == expected diff -r 7a1903ede496 -r 2e5c5d33d62c tests/tmpfs.py --- a/tests/tmpfs.py Tue Nov 21 11:00:06 2017 -0800 +++ b/tests/tmpfs.py Tue Nov 21 22:07:12 2017 -0800 @@ -9,16 +9,16 @@ class TempDirFileSystem(TestFileSystemBase): def __init__(self): self._root = os.path.join( - os.path.dirname(__file__), - '__tmpfs__', - '%d' % random.randrange(1000)) + os.path.dirname(__file__), + '__tmpfs__', + '%d' % random.randrange(1000)) self._done = False def path(self, p): p = p.lstrip('/\\') return os.path.join(self._root, p) - def getStructure(self, path=None): + def getStructure(self, path=''): path = self.path(path) if not os.path.exists(path): raise Exception("No such path: %s" % path) @@ -44,8 +44,11 @@ self._getStructureRecursive(e, full_cur, item) target[cur] = e else: - with open(full_cur, 'r', encoding='utf8') as fp: - target[cur] = fp.read() + try: + with open(full_cur, 'r', encoding='utf8') as fp: + target[cur] = fp.read() + except Exception as ex: + target[cur] = "ERROR: CAN'T READ '%s': %s" % (full_cur, ex) def _createDir(self, path): if not os.path.exists(path): @@ -69,7 +72,7 @@ def __init__(self, fs, open_patches=None, keep=False): self._fs = fs self._open = open - self._keep = keep + self._keep = keep or TestFileSystemBase._leave_mockfs @property def root(self):