view piecrust/commands/builtin/baking.py @ 196:154b8df04829

processing: Add Compass and Sass processors. The Sass processor is similar to the Less processor, i.e. it tries to be part of the structured pipeline processing by using the mapfile produced by the Sass compiler in order to provide a list of dependencies. The Compass processor is completely acting outside of the pipeline, so the server won't know what's up to date and what's not. It's expected that the user will run `compass watch` to keep things up to date. However, it will require to pass the server's cache directory to put things in, so we'll need to add some easy way to get that path for the user.
author Ludovic Chabant <ludovic@chabant.com>
date Sun, 11 Jan 2015 23:08:49 -0800
parents 5d8351cb32d8
children aaf08277b96d
line wrap: on
line source

import time
import os.path
import logging
import hashlib
import fnmatch
import datetime
from piecrust.baking.baker import Baker
from piecrust.baking.records import BakeRecord
from piecrust.chefutil import format_timed
from piecrust.commands.base import ChefCommand
from piecrust.processing.base import ProcessorPipeline
from piecrust.processing.records import (
        ProcessorPipelineRecord, FLAG_OVERRIDEN)


logger = logging.getLogger(__name__)


class BakeCommand(ChefCommand):
    def __init__(self):
        super(BakeCommand, self).__init__()
        self.name = 'bake'
        self.description = "Bakes your website into static HTML files."

    def setupParser(self, parser, app):
        parser.add_argument(
                '-o', '--output',
                help="The directory to put all the baked HTML files into "
                     "(defaults to `_counter`)")
        parser.add_argument(
                '-f', '--force',
                help="Force re-baking the entire website.",
                action='store_true')
        parser.add_argument(
                '--portable',
                help="Uses relative paths for all URLs.",
                action='store_true')
        parser.add_argument(
                '--no-assets',
                help="Don't process assets (only pages).",
                action='store_true')

    def run(self, ctx):
        if ctx.args.portable:
            # Disable pretty URLs because there's likely not going to be
            # a web server to handle serving default documents.
            ctx.app.config.set('site/pretty_urls', False)

        out_dir = (ctx.args.output or
                   os.path.join(ctx.app.root_dir, '_counter'))

        start_time = time.clock()
        try:
            # Bake the site sources.
            self._bakeSources(ctx, out_dir)

            # Bake the assets.
            if not ctx.args.no_assets:
                self._bakeAssets(ctx, out_dir)

            # All done.
            logger.info('-------------------------')
            logger.info(format_timed(start_time, 'done baking'))
            return 0
        except Exception as ex:
            if ctx.app.debug:
                logger.exception(ex)
            else:
                logger.error(str(ex))
            return 1

    def _bakeSources(self, ctx, out_dir):
        num_workers = ctx.app.config.get('baker/workers') or 4
        baker = Baker(
                ctx.app, out_dir,
                force=ctx.args.force,
                portable=ctx.args.portable,
                no_assets=ctx.args.no_assets,
                num_workers=num_workers)
        baker.bake()

    def _bakeAssets(self, ctx, out_dir):
        mounts = ctx.app.assets_dirs
        baker_params = ctx.app.config.get('baker') or {}
        skip_patterns = baker_params.get('skip_patterns')
        force_patterns = baker_params.get('force_patterns')
        num_workers = ctx.app.config.get('baker/workers') or 4
        proc = ProcessorPipeline(
                ctx.app, mounts, out_dir,
                force=ctx.args.force,
                skip_patterns=skip_patterns,
                force_patterns=force_patterns,
                num_workers=num_workers)
        proc.run()


class ShowRecordCommand(ChefCommand):
    def __init__(self):
        super(ShowRecordCommand, self).__init__()
        self.name = 'showrecord'
        self.description = ("Shows the bake record for a given output "
                            "directory.")

    def setupParser(self, parser, app):
        parser.add_argument(
                '-o', '--output',
                help="The output directory for which to show the bake record "
                     "(defaults to `_counter`)",
                nargs='?')
        parser.add_argument(
                '-p', '--path',
                help="A pattern that will be used to filter the relative path "
                     "of entries to show.")

    def run(self, ctx):
        out_dir = ctx.args.output or os.path.join(ctx.app.root_dir, '_counter')
        record_name = (hashlib.md5(out_dir.encode('utf8')).hexdigest() +
                       '.record')

        pattern = None
        if ctx.args.path:
            pattern = '*%s*' % ctx.args.path.strip('*')

        record_cache = ctx.app.cache.getCache('baker')
        if not record_cache.has(record_name):
            raise Exception("No record has been created for this output path. "
                            "Did you bake there yet?")

        record = BakeRecord.load(record_cache.getCachePath(record_name))
        logging.info("Bake record for: %s" % record.out_dir)
        logging.info("Last baked: %s" %
                     datetime.datetime.fromtimestamp(record.bake_time))
        logging.info("Entries:")
        for entry in record.entries:
            if pattern:
                if not fnmatch.fnmatch(entry.rel_path, pattern):
                    continue
            logging.info(" - ")
            logging.info("   path:      %s" % entry.rel_path)
            logging.info("   spec:      %s:%s" % (entry.source_name,
                                                  entry.rel_path))
            logging.info("   taxonomy:  %s:%s" % (entry.taxonomy_name,
                                                  entry.taxonomy_term))
            logging.info("   config:    %s" % entry.config)
            logging.info("   out URLs:  %s" % entry.out_uris)
            logging.info("   out paths: %s" % [os.path.relpath(p, out_dir)
                                               for p in entry.out_paths])
            logging.info("   used srcs: %s" % entry.used_source_names)
            if entry.errors:
                logging.error("   errors: %s" % entry.errors)

        record_cache = ctx.app.cache.getCache('proc')
        if not record_cache.has(record_name):
            return

        record = ProcessorPipelineRecord.load(
                record_cache.getCachePath(record_name))
        logging.info("")
        logging.info("Processing record for: %s" % record.out_dir)
        logging.info("Last baked: %s" %
                     datetime.datetime.fromtimestamp(record.process_time))
        logging.info("Entries:")
        for entry in record.entries:
            if pattern:
                if not fnmatch.fnmatch(entry.rel_input, pattern):
                    continue
            flags = ''
            if entry.flags & FLAG_OVERRIDEN:
                flags += 'overriden'
            logger.info(" - ")
            logger.info("   path:      %s" % entry.rel_input)
            logger.info("   out paths: %s" % entry.rel_outputs)
            logger.info("   flags:     %s" % flags)
            logger.info("   proc tree: %s" % format_proc_tree(
                    entry.proc_tree, 14*' '))
            if entry.errors:
                logger.error("   errors: %s" % entry.errors)


def format_proc_tree(tree, margin='', level=0):
    name, children = tree
    res = '%s%s%s' % (margin if level > 0 else '', level * '  ', name)
    if children:
        for c in children:
            res += format_proc_tree(c, margin, level + 1)
    return res