changeset 411:e7b865f8f335

bake: Enable multiprocess baking. Baking is now done by running a worker per CPU, and sending jobs to them. This changes several things across the codebase: * Ability to not cache things related to pages other than the 'main' page (i.e. the page at the bottom of the execution stack). * Decouple the baking process from the bake records, so only the main process keeps track (and modifies) the bake record. * Remove the need for 'batch page getters' and loading a page directly from the page factories. There are various smaller changes too included here, including support for scope performance timers that are saved with the bake record and can be printed out to the console. Yes I got carried away. For testing, the in-memory 'mock' file-system doesn't work anymore, since we're spawning processes, so this is replaced by a 'tmpfs' file-system which is saved in temporary files on disk and deleted after tests have run.
author Ludovic Chabant <ludovic@chabant.com>
date Fri, 12 Jun 2015 17:09:19 -0700
parents d1a472464e57
children a1567766c83c
files piecrust/app.py piecrust/baking/baker.py piecrust/baking/records.py piecrust/baking/single.py piecrust/baking/worker.py piecrust/chefutil.py piecrust/commands/builtin/baking.py piecrust/data/linker.py piecrust/data/provider.py piecrust/environment.py piecrust/formatting/markdownformatter.py piecrust/page.py piecrust/rendering.py piecrust/serving/server.py piecrust/sources/array.py piecrust/sources/base.py piecrust/sources/mixins.py piecrust/sources/pageref.py piecrust/taxonomies.py tests/basefs.py tests/conftest.py tests/memfs.py tests/mockutil.py tests/test_baking_baker.py tests/test_data_assetor.py tests/test_data_linker.py tests/test_processing_base.py tests/test_sources_autoconfig.py tests/test_sources_base.py tests/test_sources_posts.py tests/tmpfs.py
diffstat 31 files changed, 1652 insertions(+), 1030 deletions(-) [+]
line wrap: on
line diff
--- a/piecrust/app.py	Sat May 30 15:41:52 2015 -0700
+++ b/piecrust/app.py	Fri Jun 12 17:09:19 2015 -0700
@@ -1,5 +1,6 @@
 import re
 import json
+import time
 import os.path
 import codecs
 import hashlib
@@ -413,9 +414,18 @@
             self.env = StandardEnvironment()
         self.env.initialize(self)
 
+        self.env.registerTimer('SiteConfigLoad')
+        self.env.registerTimer('PageLoad')
+        for engine in self.plugin_loader.getTemplateEngines():
+            self.env.registerTimer(engine.__class__.__name__)
+        for fmt in self.plugin_loader.getFormatters():
+            self.env.registerTimer(fmt.__class__.__name__)
+
     @cached_property
     def config(self):
         logger.debug("Creating site configuration...")
+        start_time = time.perf_counter()
+
         paths = []
         if self.theme_dir:
             paths.append(os.path.join(self.theme_dir, THEME_CONFIG_PATH))
@@ -456,6 +466,7 @@
                         sc['realm'] = REALM_THEME
             config.fixups.append(_fixupThemeSources)
 
+        self.env.stepTimer('SiteConfigLoad', time.perf_counter() - start_time)
         return config
 
     @cached_property
--- a/piecrust/baking/baker.py	Sat May 30 15:41:52 2015 -0700
+++ b/piecrust/baking/baker.py	Fri Jun 12 17:09:19 2015 -0700
@@ -1,13 +1,18 @@
+import copy
 import time
 import os.path
+import queue
 import hashlib
 import logging
-import threading
+import multiprocessing
 from piecrust.baking.records import (
-        TransitionalBakeRecord, BakeRecordPageEntry)
-from piecrust.baking.scheduler import BakeScheduler
-from piecrust.baking.single import (BakingError, PageBaker)
-from piecrust.chefutil import format_timed, log_friendly_exception
+        BakeRecordEntry, TransitionalBakeRecord, TaxonomyInfo, FirstRenderInfo)
+from piecrust.baking.worker import (
+        BakeWorkerJob, LoadJobPayload, RenderFirstSubJobPayload,
+        BakeJobPayload,
+        JOB_LOAD, JOB_RENDER_FIRST, JOB_BAKE)
+from piecrust.chefutil import (
+        format_timed_scope, format_timed)
 from piecrust.sources.base import (
         REALM_NAMES, REALM_USER, REALM_THEME)
 
@@ -21,7 +26,8 @@
         self.app = app
         self.out_dir = out_dir
         self.force = force
-        self.num_workers = app.config.get('baker/workers', 4)
+        self.num_workers = app.config.get('baker/workers',
+                                          multiprocessing.cpu_count())
 
         # Remember what taxonomy pages we should skip
         # (we'll bake them repeatedly later with each taxonomy term)
@@ -29,8 +35,8 @@
         logger.debug("Gathering taxonomy page paths:")
         for tax in self.app.taxonomies:
             for src in self.app.sources:
-                path = tax.resolvePagePath(src.name)
-                if path is not None:
+                tax_page_ref = tax.getPageRef(src)
+                for path in tax_page_ref.possible_paths:
                     self.taxonomy_pages.append(path)
                     logger.debug(" - %s" % path)
 
@@ -39,7 +45,7 @@
         logger.debug("  Root URL: %s" % self.app.config.get('site/root'))
 
         # Get into bake mode.
-        start_time = time.clock()
+        start_time = time.perf_counter()
         self.app.config.set('baker/is_baking', True)
         self.app.env.base_asset_url_format = '%uri%'
 
@@ -52,35 +58,59 @@
         record_cache = self.app.cache.getCache('baker')
         record_id = hashlib.md5(self.out_dir.encode('utf8')).hexdigest()
         record_name = record_id + '.record'
+        previous_record_path = None
         if not self.force and record_cache.has(record_name):
-            t = time.clock()
-            record.loadPrevious(record_cache.getCachePath(record_name))
-            logger.debug(format_timed(
-                    t, 'loaded previous bake record',
-                    colored=False))
+            with format_timed_scope(logger, "loaded previous bake record",
+                                    level=logging.DEBUG, colored=False):
+                previous_record_path = record_cache.getCachePath(record_name)
+                record.loadPrevious(previous_record_path)
         record.current.success = True
 
         # Figure out if we need to clean the cache because important things
         # have changed.
         self._handleCacheValidity(record)
 
+        # Pre-create all caches.
+        for cache_name in ['app', 'baker', 'pages', 'renders']:
+            self.app.cache.getCache(cache_name)
+
         # Gather all sources by realm -- we're going to bake each realm
-        # separately so we can handle "overlaying" (i.e. one realm overrides
-        # another realm's pages).
+        # separately so we can handle "overriding" (i.e. one realm overrides
+        # another realm's pages, like the user realm overriding the theme
+        # realm).
         sources_by_realm = {}
         for source in self.app.sources:
             srclist = sources_by_realm.setdefault(source.realm, [])
             srclist.append(source)
 
+        # Create the worker processes.
+        pool = self._createWorkerPool()
+
         # Bake the realms.
         realm_list = [REALM_USER, REALM_THEME]
         for realm in realm_list:
             srclist = sources_by_realm.get(realm)
             if srclist is not None:
-                self._bakeRealm(record, realm, srclist)
+                self._bakeRealm(record, pool, realm, srclist)
 
         # Bake taxonomies.
-        self._bakeTaxonomies(record)
+        self._bakeTaxonomies(record, pool)
+
+        # All done with the workers.
+        self._terminateWorkerPool(pool)
+
+        # Get the timing information from the workers.
+        record.current.timers = {}
+        for _ in range(len(pool.workers)):
+            try:
+                timers = pool.results.get(True, 0.1)
+            except queue.Empty:
+                logger.error("Didn't get timing information from all workers.")
+                break
+
+            for name, val in timers.items():
+                main_val = record.current.timers.setdefault(name, 0)
+                record.current.timers[name] = main_val + val
 
         # Delete files from the output.
         self._handleDeletetions(record)
@@ -98,11 +128,11 @@
                 os.rename(record_path, record_path_next)
 
         # Save the bake record.
-        t = time.clock()
-        record.current.bake_time = time.time()
-        record.current.out_dir = self.out_dir
-        record.saveCurrent(record_cache.getCachePath(record_name))
-        logger.debug(format_timed(t, 'saved bake record', colored=False))
+        with format_timed_scope(logger, "saved bake record.",
+                                level=logging.DEBUG, colored=False):
+            record.current.bake_time = time.time()
+            record.current.out_dir = self.out_dir
+            record.saveCurrent(record_cache.getCachePath(record_name))
 
         # All done.
         self.app.config.set('baker/is_baking', False)
@@ -111,7 +141,7 @@
         return record.detach()
 
     def _handleCacheValidity(self, record):
-        start_time = time.clock()
+        start_time = time.perf_counter()
 
         reason = None
         if self.force:
@@ -152,41 +182,138 @@
                     start_time, "cache is assumed valid",
                     colored=False))
 
-    def _bakeRealm(self, record, realm, srclist):
-        # Gather all page factories from the sources and queue them
-        # for the workers to pick up. Just skip taxonomy pages for now.
-        logger.debug("Baking realm %s" % REALM_NAMES[realm])
-        pool, queue, abort = self._createWorkerPool(record, self.num_workers)
+    def _bakeRealm(self, record, pool, realm, srclist):
+        start_time = time.perf_counter()
+        try:
+            all_factories = []
+            for source in srclist:
+                factories = source.getPageFactories()
+                all_factories += [f for f in factories
+                                  if f.path not in self.taxonomy_pages]
+
+            self._loadRealmPages(record, pool, all_factories)
+            self._renderRealmPages(record, pool, all_factories)
+            self._bakeRealmPages(record, pool, all_factories)
+        finally:
+            page_count = len(all_factories)
+            logger.info(format_timed(
+                    start_time,
+                    "baked %d %s pages" %
+                    (page_count, REALM_NAMES[realm].lower())))
+
+    def _loadRealmPages(self, record, pool, factories):
+        with format_timed_scope(logger,
+                                "loaded %d pages" % len(factories),
+                                level=logging.DEBUG, colored=False):
+            for fac in factories:
+                job = BakeWorkerJob(
+                        JOB_LOAD,
+                        LoadJobPayload(fac))
+                pool.queue.put_nowait(job)
 
-        for source in srclist:
-            factories = source.getPageFactories()
+            def _handler(res):
+                # Create the record entry for this page.
+                record_entry = BakeRecordEntry(res.source_name, res.path)
+                record_entry.config = res.config
+                if res.errors:
+                    record_entry.errors += res.errors
+                    record.current.success = False
+                record.addEntry(record_entry)
+
+            self._waitOnWorkerPool(
+                    pool,
+                    expected_result_count=len(factories),
+                    result_handler=_handler)
+
+    def _renderRealmPages(self, record, pool, factories):
+        with format_timed_scope(logger,
+                                "prepared %d pages" % len(factories),
+                                level=logging.DEBUG, colored=False):
+            expected_result_count = 0
             for fac in factories:
-                if fac.path in self.taxonomy_pages:
-                    logger.debug(
-                            "Skipping taxonomy page: %s:%s" %
-                            (source.name, fac.ref_spec))
+                record_entry = record.getCurrentEntry(fac.path)
+                if record_entry.errors:
+                    logger.debug("Ignoring %s because it had previous "
+                                 "errors." % fac.ref_spec)
+                    continue
+
+                # Make sure the source and the route exist for this page,
+                # otherwise we add errors to the record entry and we'll skip
+                # this page for the rest of the bake.
+                source = self.app.getSource(fac.source.name)
+                if source is None:
+                    record_entry.errors.append(
+                            "Can't get source for page: %s" % fac.ref_spec)
+                    logger.error(record_entry.errors[-1])
                     continue
 
-                entry = BakeRecordPageEntry(fac.source.name, fac.rel_path,
-                                            fac.path)
-                record.addEntry(entry)
-
-                route = self.app.getRoute(source.name, fac.metadata,
+                route = self.app.getRoute(fac.source.name, fac.metadata,
                                           skip_taxonomies=True)
                 if route is None:
-                    entry.errors.append(
+                    record_entry.errors.append(
                             "Can't get route for page: %s" % fac.ref_spec)
-                    logger.error(entry.errors[-1])
+                    logger.error(record_entry.errors[-1])
                     continue
 
-                queue.addJob(BakeWorkerJob(fac, route, entry))
+                # All good, queue the job.
+                job = BakeWorkerJob(
+                        JOB_RENDER_FIRST,
+                        RenderFirstSubJobPayload(fac))
+                pool.queue.put_nowait(job)
+                expected_result_count += 1
+
+            def _handler(res):
+                entry = record.getCurrentEntry(res.path)
+
+                entry.first_render_info = FirstRenderInfo()
+                entry.first_render_info.used_assets = res.used_assets
+                entry.first_render_info.used_pagination = \
+                    res.used_pagination
+                entry.first_render_info.pagination_has_more = \
+                    res.pagination_has_more
+
+                if res.errors:
+                    entry.errors += res.errors
+                    record.current.success = False
+
+            self._waitOnWorkerPool(
+                    pool,
+                    expected_result_count=expected_result_count,
+                    result_handler=_handler)
 
-        success = self._waitOnWorkerPool(pool, abort)
-        record.current.success &= success
+    def _bakeRealmPages(self, record, pool, factories):
+        with format_timed_scope(logger,
+                                "baked %d pages" % len(factories),
+                                level=logging.DEBUG, colored=False):
+            expected_result_count = 0
+            for fac in factories:
+                if self._queueBakeJob(record, pool, fac):
+                    expected_result_count += 1
+
+            def _handler(res):
+                entry = record.getCurrentEntry(res.path, res.taxonomy_info)
+                entry.bake_info = res.bake_info
+                if res.errors:
+                    entry.errors += res.errors
+                if entry.has_any_error:
+                    record.current.success = False
 
-    def _bakeTaxonomies(self, record):
-        logger.debug("Baking taxonomies")
+            self._waitOnWorkerPool(
+                    pool,
+                    expected_result_count=expected_result_count,
+                    result_handler=_handler)
 
+    def _bakeTaxonomies(self, record, pool):
+        with format_timed_scope(logger, 'built taxonomy buckets',
+                                level=logging.DEBUG, colored=False):
+            buckets = self._buildTaxonomyBuckets(record)
+
+        start_time = time.perf_counter()
+        page_count = self._bakeTaxonomyBuckets(record, pool, buckets)
+        logger.info(format_timed(start_time,
+                                 "baked %d taxonomy pages." % page_count))
+
+    def _buildTaxonomyBuckets(self, record):
         # Let's see all the taxonomy terms for which we must bake a
         # listing page... first, pre-populate our big map of used terms.
         # For each source name, we have a list of taxonomies, and for each
@@ -250,8 +377,11 @@
             if not tt_info.dirty_terms.isdisjoint(set(terms)):
                 tt_info.dirty_terms.add(terms)
 
+        return buckets
+
+    def _bakeTaxonomyBuckets(self, record, pool, buckets):
         # Start baking those terms.
-        pool, queue, abort = self._createWorkerPool(record, self.num_workers)
+        expected_result_count = 0
         for source_name, source_taxonomies in buckets.items():
             for tax_name, tt_info in source_taxonomies.items():
                 terms = tt_info.dirty_terms
@@ -262,8 +392,8 @@
                         "Baking '%s' for source '%s': %s" %
                         (tax_name, source_name, terms))
                 tax = self.app.getTaxonomy(tax_name)
-                route = self.app.getTaxonomyRoute(tax_name, source_name)
-                tax_page_ref = tax.getPageRef(source_name)
+                source = self.app.getSource(source_name)
+                tax_page_ref = tax.getPageRef(source)
                 if not tax_page_ref.exists:
                     logger.debug(
                             "No taxonomy page found at '%s', skipping." %
@@ -273,19 +403,33 @@
                 logger.debug(
                         "Using taxonomy page: %s:%s" %
                         (tax_page_ref.source_name, tax_page_ref.rel_path))
+                fac = tax_page_ref.getFactory()
+
                 for term in terms:
-                    fac = tax_page_ref.getFactory()
                     logger.debug(
                             "Queuing: %s [%s=%s]" %
                             (fac.ref_spec, tax_name, term))
-                    entry = BakeRecordPageEntry(
-                            fac.source.name, fac.rel_path, fac.path,
-                            (tax_name, term, source_name))
-                    record.addEntry(entry)
-                    queue.addJob(BakeWorkerJob(fac, route, entry))
+                    tax_info = TaxonomyInfo(tax_name, source_name, term)
+
+                    cur_entry = BakeRecordEntry(
+                            fac.source.name, fac.path, tax_info)
+                    record.addEntry(cur_entry)
+
+                    if self._queueBakeJob(record, pool, fac, tax_info):
+                        expected_result_count += 1
 
-        success = self._waitOnWorkerPool(pool, abort)
-        record.current.success &= success
+        def _handler(res):
+            entry = record.getCurrentEntry(res.path, res.taxonomy_info)
+            entry.bake_info = res.bake_info
+            if res.errors:
+                entry.errors += res.errors
+            if entry.has_any_error:
+                record.current.success = False
+
+        self._waitOnWorkerPool(
+                pool,
+                expected_result_count=expected_result_count,
+                result_handler=_handler)
 
         # Now we create bake entries for all the terms that were *not* dirty.
         # This is because otherwise, on the next incremental bake, we wouldn't
@@ -296,16 +440,71 @@
             # current version.
             if (prev_entry and prev_entry.taxonomy_info and
                     not cur_entry):
-                sn = prev_entry.source_name
-                tn, tt, tsn = prev_entry.taxonomy_info
-                tt_info = buckets[tsn][tn]
-                if tt in tt_info.all_terms:
+                ti = prev_entry.taxonomy_info
+                tt_info = buckets[ti.source_name][ti.taxonomy_name]
+                if ti.term in tt_info.all_terms:
                     logger.debug("Creating unbaked entry for taxonomy "
-                                 "term '%s:%s'." % (tn, tt))
+                                 "term '%s:%s'." % (ti.taxonomy_name, ti.term))
                     record.collapseEntry(prev_entry)
                 else:
                     logger.debug("Taxonomy term '%s:%s' isn't used anymore." %
-                                 (tn, tt))
+                                 (ti.taxonomy_name, ti.term))
+
+        return expected_result_count
+
+    def _queueBakeJob(self, record, pool, fac, tax_info=None):
+        # Get the previous (if any) and current entry for this page.
+        pair = record.getPreviousAndCurrentEntries(fac.path, tax_info)
+        assert pair is not None
+        prev_entry, cur_entry = pair
+        assert cur_entry is not None
+
+        # Ignore if there were errors in the previous passes.
+        if cur_entry.errors:
+            logger.debug("Ignoring %s because it had previous "
+                         "errors." % fac.ref_spec)
+            return False
+
+        # Build the route metadata and find the appropriate route.
+        route_metadata = copy.deepcopy(fac.metadata)
+        if tax_info is not None:
+            tax = self.app.getTaxonomy(tax_info.taxonomy_name)
+            route = self.app.getTaxonomyRoute(tax_info.taxonomy_name,
+                                              tax_info.source_name)
+
+            slugified_term = route.slugifyTaxonomyTerm(tax_info.term)
+            route_metadata[tax.term_name] = slugified_term
+        else:
+            route = self.app.getRoute(fac.source.name, route_metadata,
+                                      skip_taxonomies=True)
+        assert route is not None
+
+        # Figure out if this page is overriden by another previously
+        # baked page. This happens for example when the user has
+        # made a page that has the same page/URL as a theme page.
+        page = fac.buildPage()
+        uri = route.getUri(route_metadata, provider=page)
+        override_entry = record.getOverrideEntry(page.path, uri)
+        if override_entry is not None:
+            override_source = self.app.getSource(
+                    override_entry.source_name)
+            if override_source.realm == fac.source.realm:
+                cur_entry.errors.append(
+                        "Page '%s' maps to URL '%s' but is overriden "
+                        "by page '%s'." %
+                        (fac.ref_spec, uri, override_entry.path))
+                logger.error(cur_entry.errors[-1])
+            cur_entry.flags |= BakeRecordEntry.FLAG_OVERRIDEN
+            return False
+
+        job = BakeWorkerJob(
+                JOB_BAKE,
+                BakeJobPayload(fac, route_metadata, prev_entry,
+                               cur_entry.first_render_info,
+                               record.dirty_source_names,
+                               tax_info))
+        pool.queue.put_nowait(job)
+        return True
 
     def _handleDeletetions(self, record):
         for path, reason in record.getDeletions():
@@ -318,139 +517,66 @@
                 # by the user.
                 pass
 
-    def _createWorkerPool(self, record, pool_size=4):
-        pool = []
-        queue = BakeScheduler(record)
-        abort = threading.Event()
-        for i in range(pool_size):
+    def _createWorkerPool(self):
+        from piecrust.baking.worker import BakeWorkerContext, worker_func
+
+        pool = _WorkerPool()
+        for i in range(self.num_workers):
             ctx = BakeWorkerContext(
-                    self.app, self.out_dir, self.force,
-                    record, queue, abort)
-            worker = BakeWorker(i, ctx)
-            pool.append(worker)
-        return pool, queue, abort
+                    self.app.root_dir, self.out_dir,
+                    pool.queue, pool.results, pool.abort_event,
+                    force=self.force, debug=self.app.debug)
+            w = multiprocessing.Process(
+                    target=worker_func, args=(i, ctx))
+            w.start()
+            pool.workers.append(w)
+        return pool
+
+    def _terminateWorkerPool(self, pool):
+        pool.abort_event.set()
+        for w in pool.workers:
+            w.join()
 
-    def _waitOnWorkerPool(self, pool, abort):
-        for w in pool:
-            w.start()
+    def _waitOnWorkerPool(self, pool,
+                          expected_result_count=-1, result_handler=None):
+        assert result_handler is None or expected_result_count >= 0
+        abort_with_exception = None
+        try:
+            if result_handler is None:
+                pool.queue.join()
+            else:
+                got_count = 0
+                while got_count < expected_result_count:
+                    try:
+                        res = pool.results.get(True, 10)
+                    except queue.Empty:
+                        logger.error(
+                                "Got %d results, expected %d, and timed-out "
+                                "for 10 seconds. A worker might be stuck?" %
+                                (got_count, expected_result_count))
+                        abort_with_exception = Exception("Worker time-out.")
+                        break
 
-        success = True
-        try:
-            for w in pool:
-                w.join()
-                success &= w.success
-        except KeyboardInterrupt:
+                    got_count += 1
+                    result_handler(res)
+        except KeyboardInterrupt as kiex:
             logger.warning("Bake aborted by user... "
                            "waiting for workers to stop.")
-            abort.set()
-            for w in pool:
-                w.join()
-            raise
-
-        if abort.is_set():
-            excs = [w.abort_exception for w in pool
-                    if w.abort_exception is not None]
-            logger.error("Baking was aborted due to %s error(s):" % len(excs))
-            if self.app.debug:
-                for e in excs:
-                    logger.exception(e)
-            else:
-                for e in excs:
-                    log_friendly_exception(logger, e)
-            raise BakingError("Baking was aborted due to errors.")
-
-        return success
-
+            abort_with_exception = kiex
 
-class BakeWorkerContext(object):
-    def __init__(self, app, out_dir, force, record, work_queue,
-                 abort_event):
-        self.app = app
-        self.out_dir = out_dir
-        self.force = force
-        self.record = record
-        self.work_queue = work_queue
-        self.abort_event = abort_event
-
-
-class BakeWorkerJob(object):
-    def __init__(self, factory, route, record_entry):
-        self.factory = factory
-        self.route = route
-        self.record_entry = record_entry
-
-    @property
-    def source(self):
-        return self.factory.source
+        if abort_with_exception:
+            pool.abort_event.set()
+            for w in pool.workers:
+                w.join(2)
+            raise abort_with_exception
 
 
-class BakeWorker(threading.Thread):
-    def __init__(self, wid, ctx):
-        super(BakeWorker, self).__init__(name=('worker%d' % wid))
-        self.wid = wid
-        self.ctx = ctx
-        self.abort_exception = None
-        self.success = True
-        self._page_baker = PageBaker(
-                ctx.app, ctx.out_dir, ctx.force,
-                ctx.record)
-
-    def run(self):
-        while(not self.ctx.abort_event.is_set()):
-            try:
-                job = self.ctx.work_queue.getNextJob(wait_timeout=1)
-                if job is None:
-                    logger.debug(
-                            "[%d] No more work... shutting down." %
-                            self.wid)
-                    break
-                success = self._unsafeRun(job)
-                logger.debug("[%d] Done with page." % self.wid)
-                self.ctx.work_queue.onJobFinished(job)
-                self.success &= success
-            except Exception as ex:
-                self.ctx.abort_event.set()
-                self.abort_exception = ex
-                self.success = False
-                logger.debug("[%d] Critical error, aborting." % self.wid)
-                if self.ctx.app.debug:
-                    logger.exception(ex)
-                break
-
-    def _unsafeRun(self, job):
-        start_time = time.clock()
-
-        entry = job.record_entry
-        try:
-            self._page_baker.bake(job.factory, job.route, entry)
-        except BakingError as ex:
-            logger.debug("Got baking error. Adding it to the record.")
-            while ex:
-                entry.errors.append(str(ex))
-                ex = ex.__cause__
-
-        has_error = False
-        for e in entry.getAllErrors():
-            has_error = True
-            logger.error(e)
-        if has_error:
-            return False
-
-        if entry.was_any_sub_baked:
-            first_sub = entry.subs[0]
-
-            friendly_uri = first_sub.out_uri
-            if friendly_uri == '':
-                friendly_uri = '[main page]'
-
-            friendly_count = ''
-            if entry.num_subs > 1:
-                friendly_count = ' (%d pages)' % entry.num_subs
-            logger.info(format_timed(
-                    start_time, '[%d] %s%s' %
-                    (self.wid, friendly_uri, friendly_count)))
-
-        return True
+class _WorkerPool(object):
+    def __init__(self):
+        self.queue = multiprocessing.JoinableQueue()
+        self.results = multiprocessing.Queue()
+        self.abort_event = multiprocessing.Event()
+        self.workers = []
 
 
 class _TaxonomyTermsInfo(object):
@@ -463,3 +589,4 @@
 
     def __repr__(self):
         return 'dirty:%s, all:%s' % (self.dirty_terms, self.all_terms)
+
--- a/piecrust/baking/records.py	Sat May 30 15:41:52 2015 -0700
+++ b/piecrust/baking/records.py	Fri Jun 12 17:09:19 2015 -0700
@@ -1,5 +1,6 @@
 import copy
 import os.path
+import hashlib
 import logging
 from piecrust.records import Record, TransitionalRecord
 
@@ -7,35 +8,36 @@
 logger = logging.getLogger(__name__)
 
 
-def _get_transition_key(source_name, rel_path, taxonomy_info=None):
-    key = '%s:%s' % (source_name, rel_path)
+def _get_transition_key(path, taxonomy_info=None):
+    key = path
     if taxonomy_info:
-        taxonomy_name, taxonomy_term, taxonomy_source_name = taxonomy_info
-        key += ';%s:%s=' % (taxonomy_source_name, taxonomy_name)
-        if isinstance(taxonomy_term, tuple):
-            key += '/'.join(taxonomy_term)
+        key += '+%s:%s=' % (taxonomy_info.source_name,
+                            taxonomy_info.taxonomy_name)
+        if isinstance(taxonomy_info.term, tuple):
+            key += '/'.join(taxonomy_info.term)
         else:
-            key += taxonomy_term
-    return key
+            key += taxonomy_info.term
+    return hashlib.md5(key.encode('utf8')).hexdigest()
 
 
 class BakeRecord(Record):
-    RECORD_VERSION = 12
+    RECORD_VERSION = 14
 
     def __init__(self):
         super(BakeRecord, self).__init__()
         self.out_dir = None
         self.bake_time = None
+        self.timers = None
         self.success = True
 
 
-class BakeRecordPassInfo(object):
+class BakePassInfo(object):
     def __init__(self):
         self.used_source_names = set()
         self.used_taxonomy_terms = set()
 
 
-class BakeRecordSubPageEntry(object):
+class SubPageBakeInfo(object):
     FLAG_NONE = 0
     FLAG_BAKED = 2**0
     FLAG_FORCED_BY_SOURCE = 2**1
@@ -68,7 +70,27 @@
                 other.render_passes[p] = copy.deepcopy(pinfo)
 
 
-class BakeRecordPageEntry(object):
+class PageBakeInfo(object):
+    def __init__(self):
+        self.subs = []
+        self.assets = []
+
+
+class FirstRenderInfo(object):
+    def __init__(self):
+        self.assets = []
+        self.used_pagination = False
+        self.pagination_has_more = False
+
+
+class TaxonomyInfo(object):
+    def __init__(self, taxonomy_name, source_name, term):
+        self.taxonomy_name = taxonomy_name
+        self.source_name = source_name
+        self.term = term
+
+
+class BakeRecordEntry(object):
     """ An entry in the bake record.
 
         The `taxonomy_info` attribute should be a tuple of the form:
@@ -79,16 +101,15 @@
     FLAG_SOURCE_MODIFIED = 2**1
     FLAG_OVERRIDEN = 2**2
 
-    def __init__(self, source_name, rel_path, path, taxonomy_info=None):
+    def __init__(self, source_name, path, taxonomy_info=None):
         self.source_name = source_name
-        self.rel_path = rel_path
         self.path = path
         self.taxonomy_info = taxonomy_info
         self.flags = self.FLAG_NONE
         self.config = None
-        self.subs = []
-        self.assets = []
         self.errors = []
+        self.bake_info = None
+        self.first_render_info = None
 
     @property
     def path_mtime(self):
@@ -100,35 +121,59 @@
 
     @property
     def num_subs(self):
-        return len(self.subs)
+        if self.bake_info is None:
+            return 0
+        return len(self.bake_info.subs)
 
     @property
     def was_any_sub_baked(self):
-        for o in self.subs:
-            if o.was_baked:
-                return True
+        if self.bake_info is not None:
+            for o in self.bake_info.subs:
+                if o.was_baked:
+                    return True
+        return False
+
+    @property
+    def subs(self):
+        if self.bake_info is not None:
+            return self.bake_info.subs
+        return []
+
+    @property
+    def has_any_error(self):
+        if len(self.errors) > 0:
+            return True
+        if self.bake_info is not None:
+            for o in self.bake_info.subs:
+                if len(o.errors) > 0:
+                    return True
         return False
 
     def getSub(self, sub_index):
-        return self.subs[sub_index - 1]
+        if self.bake_info is None:
+            raise Exception("No bake info available on this entry.")
+        return self.bake_info.subs[sub_index - 1]
 
     def getAllErrors(self):
         yield from self.errors
-        for o in self.subs:
-            yield from o.errors
+        if self.bake_info is not None:
+            for o in self.bake_info.subs:
+                yield from o.errors
 
     def getAllUsedSourceNames(self):
         res = set()
-        for o in self.subs:
-            for p, pinfo in o.render_passes.items():
-                res |= pinfo.used_source_names
+        if self.bake_info is not None:
+            for o in self.bake_info.subs:
+                for p, pinfo in o.render_passes.items():
+                    res |= pinfo.used_source_names
         return res
 
     def getAllUsedTaxonomyTerms(self):
         res = set()
-        for o in self.subs:
-            for p, pinfo in o.render_passes.items():
-                res |= pinfo.used_taxonomy_terms
+        if self.bake_info is not None:
+            for o in self.bake_info.subs:
+                for p, pinfo in o.render_passes.items():
+                    res |= pinfo.used_taxonomy_terms
         return res
 
 
@@ -141,37 +186,44 @@
     def addEntry(self, entry):
         if (self.previous.bake_time and
                 entry.path_mtime >= self.previous.bake_time):
-            entry.flags |= BakeRecordPageEntry.FLAG_SOURCE_MODIFIED
+            entry.flags |= BakeRecordEntry.FLAG_SOURCE_MODIFIED
             self.dirty_source_names.add(entry.source_name)
         super(TransitionalBakeRecord, self).addEntry(entry)
 
     def getTransitionKey(self, entry):
-        return _get_transition_key(entry.source_name, entry.rel_path,
-                                   entry.taxonomy_info)
+        return _get_transition_key(entry.path, entry.taxonomy_info)
 
-    def getOverrideEntry(self, factory, uri):
+    def getPreviousAndCurrentEntries(self, path, taxonomy_info=None):
+        key = _get_transition_key(path, taxonomy_info)
+        pair = self.transitions.get(key)
+        return pair
+
+    def getOverrideEntry(self, path, uri):
         for pair in self.transitions.values():
             cur = pair[1]
-            if (cur and
-                    (cur.source_name != factory.source.name or
-                        cur.rel_path != factory.rel_path)):
-                    for o in cur.subs:
-                        if o.out_uri == uri:
-                            return cur
+            if cur and cur.path != path:
+                for o in cur.subs:
+                    if o.out_uri == uri:
+                        return cur
         return None
 
-    def getPreviousEntry(self, source_name, rel_path, taxonomy_info=None):
-        key = _get_transition_key(source_name, rel_path, taxonomy_info)
-        pair = self.transitions.get(key)
+    def getPreviousEntry(self, path, taxonomy_info=None):
+        pair = self.getPreviousAndCurrentEntries(path, taxonomy_info)
         if pair is not None:
             return pair[0]
         return None
 
+    def getCurrentEntry(self, path, taxonomy_info=None):
+        pair = self.getPreviousAndCurrentEntries(path, taxonomy_info)
+        if pair is not None:
+            return pair[1]
+        return None
+
     def collapseEntry(self, prev_entry):
         cur_entry = copy.deepcopy(prev_entry)
-        cur_entry.flags = BakeRecordPageEntry.FLAG_NONE
+        cur_entry.flags = BakeRecordEntry.FLAG_NONE
         for o in cur_entry.subs:
-            o.flags = BakeRecordSubPageEntry.FLAG_NONE
+            o.flags = SubPageBakeInfo.FLAG_NONE
         self.addEntry(cur_entry)
 
     def getDeletions(self):
@@ -187,5 +239,5 @@
                     yield (p, 'source file changed outputs')
 
     def _onNewEntryAdded(self, entry):
-        entry.flags |= BakeRecordPageEntry.FLAG_NEW
+        entry.flags |= BakeRecordEntry.FLAG_NEW
 
--- a/piecrust/baking/single.py	Sat May 30 15:41:52 2015 -0700
+++ b/piecrust/baking/single.py	Fri Jun 12 17:09:19 2015 -0700
@@ -1,21 +1,13 @@
 import os.path
-import copy
 import shutil
 import codecs
 import logging
 import urllib.parse
 from piecrust.baking.records import (
-        BakeRecordPassInfo, BakeRecordPageEntry, BakeRecordSubPageEntry)
-from piecrust.data.filters import (
-        PaginationFilter, HasFilterClause,
-        IsFilterClause, AndBooleanClause,
-        page_value_accessor)
+        PageBakeInfo, SubPageBakeInfo, BakePassInfo)
 from piecrust.rendering import (
         QualifiedPage, PageRenderingContext, render_page,
-        PASS_FORMATTING, PASS_RENDERING)
-from piecrust.sources.base import (
-        PageFactory,
-        REALM_NAMES, REALM_USER, REALM_THEME)
+        PASS_FORMATTING)
 from piecrust.uriutil import split_uri
 
 
@@ -35,12 +27,10 @@
 
 
 class PageBaker(object):
-    def __init__(self, app, out_dir, force=False, record=None,
-                 copy_assets=True):
+    def __init__(self, app, out_dir, force=False, copy_assets=True):
         self.app = app
         self.out_dir = out_dir
         self.force = force
-        self.record = record
         self.copy_assets = copy_assets
         self.site_root = app.config.get('site/root')
         self.pretty_urls = app.config.get('site/pretty_urls')
@@ -60,122 +50,43 @@
 
         return os.path.normpath(os.path.join(*bake_path))
 
-    def bake(self, factory, route, record_entry):
+    def bake(self, factory, route, route_metadata, prev_entry,
+             first_render_info, dirty_source_names, tax_info=None):
         # Get the page.
         page = factory.buildPage()
-        route_metadata = copy.deepcopy(factory.metadata)
-
-        # Add taxonomy info in the template data and route metadata if needed.
-        bake_taxonomy_info = None
-        if record_entry.taxonomy_info:
-            tax_name, tax_term, tax_source_name = record_entry.taxonomy_info
-            taxonomy = self.app.getTaxonomy(tax_name)
-            slugified_term = route.slugifyTaxonomyTerm(tax_term)
-            route_metadata[taxonomy.term_name] = slugified_term
-            bake_taxonomy_info = (taxonomy, tax_term)
-
-        # Generate the URI.
-        uri = route.getUri(route_metadata, provider=page)
-
-        # See if this URL has been overriden by a previously baked page.
-        # If that page is from another realm (e.g. a user page vs. a theme
-        # page), we silently skip this page. If they're from the same realm,
-        # we don't allow overriding and raise an error (this is probably
-        # because of a misconfigured configuration that allows for ambiguous
-        # URLs between 2 routes or sources).
-        override = self.record.getOverrideEntry(factory, uri)
-        if override is not None:
-            override_source = self.app.getSource(override.source_name)
-            if override_source.realm == factory.source.realm:
-                raise BakingError(
-                        "Page '%s' maps to URL '%s' but is overriden by page"
-                        "'%s:%s'." % (factory.ref_spec, uri,
-                                      override.source_name,
-                                      override.rel_path))
-            logger.debug("'%s' [%s] is overriden by '%s:%s'. Skipping" %
-                         (factory.ref_spec, uri, override.source_name,
-                          override.rel_path))
-            record_entry.flags |= BakeRecordPageEntry.FLAG_OVERRIDEN
-            return
-
-        # Setup the record entry.
-        record_entry.config = copy_public_page_config(page.config)
 
         # Start baking the sub-pages.
         cur_sub = 1
         has_more_subs = True
-        force_this = self.force
-        invalidate_formatting = False
-        prev_record_entry = self.record.getPreviousEntry(
-                factory.source.name, factory.rel_path,
-                record_entry.taxonomy_info)
-
-        logger.debug("Baking '%s'..." % uri)
+        report = PageBakeInfo()
 
         while has_more_subs:
             # Get the URL and path for this sub-page.
             sub_uri = route.getUri(route_metadata, sub_num=cur_sub,
                                    provider=page)
+            logger.debug("Baking '%s' [%d]..." % (sub_uri, cur_sub))
             out_path = self.getOutputPath(sub_uri)
 
             # Create the sub-entry for the bake record.
-            record_sub_entry = BakeRecordSubPageEntry(sub_uri, out_path)
-            record_entry.subs.append(record_sub_entry)
+            sub_entry = SubPageBakeInfo(sub_uri, out_path)
+            report.subs.append(sub_entry)
 
             # Find a corresponding sub-entry in the previous bake record.
-            prev_record_sub_entry = None
-            if prev_record_entry:
+            prev_sub_entry = None
+            if prev_entry:
                 try:
-                    prev_record_sub_entry = prev_record_entry.getSub(cur_sub)
+                    prev_sub_entry = prev_entry.getSub(cur_sub)
                 except IndexError:
                     pass
 
-            # Figure out what to do with this page.
-            if (prev_record_sub_entry and
-                    (prev_record_sub_entry.was_baked_successfully or
-                        prev_record_sub_entry.was_clean)):
-                # If the current page is known to use pages from other sources,
-                # see if any of those got baked, or are going to be baked for
-                # some reason. If so, we need to bake this one too.
-                # (this happens for instance with the main page of a blog).
-                dirty_src_names, invalidated_render_passes = (
-                        self._getDirtySourceNamesAndRenderPasses(
-                            prev_record_sub_entry))
-                if len(invalidated_render_passes) > 0:
-                    logger.debug(
-                            "'%s' is known to use sources %s, which have "
-                            "items that got (re)baked. Will force bake this "
-                            "page. " % (uri, dirty_src_names))
-                    record_sub_entry.flags |= \
-                        BakeRecordSubPageEntry.FLAG_FORCED_BY_SOURCE
-                    force_this = True
-
-                    if PASS_FORMATTING in invalidated_render_passes:
-                        logger.debug(
-                                "Will invalidate cached formatting for '%s' "
-                                "since sources were using during that pass."
-                                % uri)
-                        invalidate_formatting = True
-            elif (prev_record_sub_entry and
-                    prev_record_sub_entry.errors):
-                # Previous bake failed. We'll have to bake it again.
-                logger.debug(
-                        "Previous record entry indicates baking failed for "
-                        "'%s'. Will bake it again." % uri)
-                record_sub_entry.flags |= \
-                    BakeRecordSubPageEntry.FLAG_FORCED_BY_PREVIOUS_ERRORS
-                force_this = True
-            elif not prev_record_sub_entry:
-                # No previous record. We'll have to bake it.
-                logger.debug("No previous record entry found for '%s'. Will "
-                             "force bake it." % uri)
-                record_sub_entry.flags |= \
-                    BakeRecordSubPageEntry.FLAG_FORCED_BY_NO_PREVIOUS
-                force_this = True
+            # Figure out if we need to invalidate or force anything.
+            force_this_sub, invalidate_formatting = _compute_force_flags(
+                    prev_sub_entry, sub_entry, dirty_source_names)
+            force_this_sub = force_this_sub or self.force
 
             # Check for up-to-date outputs.
             do_bake = True
-            if not force_this:
+            if not force_this_sub:
                 try:
                     in_path_time = page.path_mtime
                     out_path_time = os.path.getmtime(out_path)
@@ -188,10 +99,10 @@
             # If this page didn't bake because it's already up-to-date.
             # Keep trying for as many subs as we know this page has.
             if not do_bake:
-                prev_record_sub_entry.collapseRenderPasses(record_sub_entry)
-                record_sub_entry.flags = BakeRecordSubPageEntry.FLAG_NONE
+                prev_sub_entry.collapseRenderPasses(sub_entry)
+                sub_entry.flags = SubPageBakeInfo.FLAG_NONE
 
-                if prev_record_entry.num_subs >= cur_sub + 1:
+                if prev_entry.num_subs >= cur_sub + 1:
                     cur_sub += 1
                     has_more_subs = True
                     logger.debug("  %s is up to date, skipping to next "
@@ -207,34 +118,40 @@
                     cache_key = sub_uri
                     self.app.env.rendered_segments_repository.invalidate(
                             cache_key)
-                    record_sub_entry.flags |= \
-                        BakeRecordSubPageEntry.FLAG_FORMATTING_INVALIDATED
+                    sub_entry.flags |= \
+                        SubPageBakeInfo.FLAG_FORMATTING_INVALIDATED
 
                 logger.debug("  p%d -> %s" % (cur_sub, out_path))
                 qp = QualifiedPage(page, route, route_metadata)
-                ctx, rp = self._bakeSingle(qp, cur_sub, out_path,
-                                           bake_taxonomy_info)
+                ctx, rp = self._bakeSingle(qp, cur_sub, out_path, tax_info)
             except Exception as ex:
                 if self.app.debug:
                     logger.exception(ex)
                 page_rel_path = os.path.relpath(page.path, self.app.root_dir)
                 raise BakingError("%s: error baking '%s'." %
-                                  (page_rel_path, uri)) from ex
+                                  (page_rel_path, sub_uri)) from ex
 
             # Record what we did.
-            record_sub_entry.flags |= BakeRecordSubPageEntry.FLAG_BAKED
-            self.record.dirty_source_names.add(record_entry.source_name)
+            sub_entry.flags |= SubPageBakeInfo.FLAG_BAKED
+            # self.record.dirty_source_names.add(record_entry.source_name)
             for p, pinfo in ctx.render_passes.items():
-                brpi = BakeRecordPassInfo()
-                brpi.used_source_names = set(pinfo.used_source_names)
-                brpi.used_taxonomy_terms = set(pinfo.used_taxonomy_terms)
-                record_sub_entry.render_passes[p] = brpi
-            if prev_record_sub_entry:
-                prev_record_sub_entry.collapseRenderPasses(record_sub_entry)
+                bpi = BakePassInfo()
+                bpi.used_source_names = set(pinfo.used_source_names)
+                bpi.used_taxonomy_terms = set(pinfo.used_taxonomy_terms)
+                sub_entry.render_passes[p] = bpi
+            if prev_sub_entry:
+                prev_sub_entry.collapseRenderPasses(sub_entry)
+
+            # If this page has had its first sub-page rendered already, we
+            # have that information from the baker. Otherwise (e.g. for
+            # taxonomy pages), we have that information from the result
+            # of the render.
+            info = ctx
+            if cur_sub == 1 and first_render_info is not None:
+                info = first_render_info
 
             # Copy page assets.
-            if (cur_sub == 1 and self.copy_assets and
-                    ctx.used_assets is not None):
+            if cur_sub == 1 and self.copy_assets and info.used_assets:
                 if self.pretty_urls:
                     out_assets_dir = os.path.dirname(out_path)
                 else:
@@ -244,47 +161,113 @@
                         out_assets_dir += out_name_noext
 
                 logger.debug("Copying page assets to: %s" % out_assets_dir)
-                if not os.path.isdir(out_assets_dir):
-                    os.makedirs(out_assets_dir, 0o755)
-                for ap in ctx.used_assets:
+                _ensure_dir_exists(out_assets_dir)
+
+                used_assets = info.used_assets
+                for ap in used_assets:
                     dest_ap = os.path.join(out_assets_dir,
                                            os.path.basename(ap))
                     logger.debug("  %s -> %s" % (ap, dest_ap))
                     shutil.copy(ap, dest_ap)
-                    record_entry.assets.append(ap)
+                    report.assets.append(ap)
 
             # Figure out if we have more work.
             has_more_subs = False
-            if ctx.used_pagination is not None:
-                if ctx.used_pagination.has_more:
-                    cur_sub += 1
-                    has_more_subs = True
+            if info.pagination_has_more:
+                cur_sub += 1
+                has_more_subs = True
+
+        return report
 
-    def _bakeSingle(self, qualified_page, num, out_path, taxonomy_info=None):
+    def _bakeSingle(self, qualified_page, num, out_path, tax_info=None):
         ctx = PageRenderingContext(qualified_page, page_num=num)
-        if taxonomy_info:
-            ctx.setTaxonomyFilter(taxonomy_info[0], taxonomy_info[1])
+        if tax_info:
+            tax = self.app.getTaxonomy(tax_info.taxonomy_name)
+            ctx.setTaxonomyFilter(tax, tax_info.term)
 
         rp = render_page(ctx)
 
         out_dir = os.path.dirname(out_path)
-        if not os.path.isdir(out_dir):
-            os.makedirs(out_dir, 0o755)
+        _ensure_dir_exists(out_dir)
 
         with codecs.open(out_path, 'w', 'utf8') as fp:
             fp.write(rp.content)
 
         return ctx, rp
 
-    def _getDirtySourceNamesAndRenderPasses(self, record_sub_entry):
-        dirty_src_names = set()
-        invalidated_render_passes = set()
-        for p, pinfo in record_sub_entry.render_passes.items():
-            for src_name in pinfo.used_source_names:
-                is_dirty = (src_name in self.record.dirty_source_names)
-                if is_dirty:
-                    invalidated_render_passes.add(p)
-                    dirty_src_names.add(src_name)
-                    break
-        return dirty_src_names, invalidated_render_passes
+
+def _compute_force_flags(prev_sub_entry, sub_entry, dirty_source_names):
+    # Figure out what to do with this page.
+    force_this_sub = False
+    invalidate_formatting = False
+    sub_uri = sub_entry.out_uri
+    if (prev_sub_entry and
+            (prev_sub_entry.was_baked_successfully or
+                prev_sub_entry.was_clean)):
+        # If the current page is known to use pages from other sources,
+        # see if any of those got baked, or are going to be baked for
+        # some reason. If so, we need to bake this one too.
+        # (this happens for instance with the main page of a blog).
+        dirty_for_this, invalidated_render_passes = (
+                _get_dirty_source_names_and_render_passes(
+                    prev_sub_entry, dirty_source_names))
+        if len(invalidated_render_passes) > 0:
+            logger.debug(
+                    "'%s' is known to use sources %s, which have "
+                    "items that got (re)baked. Will force bake this "
+                    "page. " % (sub_uri, dirty_for_this))
+            sub_entry.flags |= \
+                SubPageBakeInfo.FLAG_FORCED_BY_SOURCE
+            force_this_sub = True
 
+            if PASS_FORMATTING in invalidated_render_passes:
+                logger.debug(
+                        "Will invalidate cached formatting for '%s' "
+                        "since sources were using during that pass."
+                        % sub_uri)
+                invalidate_formatting = True
+    elif (prev_sub_entry and
+            prev_sub_entry.errors):
+        # Previous bake failed. We'll have to bake it again.
+        logger.debug(
+                "Previous record entry indicates baking failed for "
+                "'%s'. Will bake it again." % sub_uri)
+        sub_entry.flags |= \
+            SubPageBakeInfo.FLAG_FORCED_BY_PREVIOUS_ERRORS
+        force_this_sub = True
+    elif not prev_sub_entry:
+        # No previous record. We'll have to bake it.
+        logger.debug("No previous record entry found for '%s'. Will "
+                     "force bake it." % sub_uri)
+        sub_entry.flags |= \
+            SubPageBakeInfo.FLAG_FORCED_BY_NO_PREVIOUS
+        force_this_sub = True
+
+    return force_this_sub, invalidate_formatting
+
+
+def _get_dirty_source_names_and_render_passes(
+        sub_entry, dirty_source_names):
+    dirty_for_this = set()
+    invalidated_render_passes = set()
+    for p, pinfo in sub_entry.render_passes.items():
+        for src_name in pinfo.used_source_names:
+            is_dirty = (src_name in dirty_source_names)
+            if is_dirty:
+                invalidated_render_passes.add(p)
+                dirty_for_this.add(src_name)
+                break
+    return dirty_for_this, invalidated_render_passes
+
+
+def _ensure_dir_exists(path):
+    try:
+        os.makedirs(path, mode=0o755, exist_ok=True)
+    except OSError:
+        # In a multiprocess environment, several process may very
+        # occasionally try to create the same directory at the same time.
+        # Let's ignore any error and if something's really wrong (like file
+        # acces permissions or whatever), then it will more legitimately fail
+        # just after this when we try to write files.
+        pass
+
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/piecrust/baking/worker.py	Fri Jun 12 17:09:19 2015 -0700
@@ -0,0 +1,250 @@
+import time
+import copy
+import queue
+import logging
+from piecrust.app import PieCrust
+from piecrust.baking.single import PageBaker, BakingError
+from piecrust.rendering import (
+        QualifiedPage, PageRenderingContext, render_page_segments)
+from piecrust.sources.base import PageFactory
+
+
+logger = logging.getLogger(__name__)
+
+
+def worker_func(wid, ctx):
+    logger.debug("Worker %d booting up..." % wid)
+    w = BakeWorker(wid, ctx)
+    w.run()
+
+
+class BakeWorkerContext(object):
+    def __init__(self, root_dir, out_dir,
+                 work_queue, results, abort_event,
+                 force=False, debug=False):
+        self.root_dir = root_dir
+        self.out_dir = out_dir
+        self.work_queue = work_queue
+        self.results = results
+        self.abort_event = abort_event
+        self.force = force
+        self.debug = debug
+
+
+JOB_LOAD, JOB_RENDER_FIRST, JOB_BAKE = range(0, 3)
+
+
+class BakeWorkerJob(object):
+    def __init__(self, job_type, payload):
+        self.job_type = job_type
+        self.payload = payload
+
+
+class BakeWorker(object):
+    def __init__(self, wid, ctx):
+        self.wid = wid
+        self.ctx = ctx
+
+    def run(self):
+        logger.debug("Working %d initializing..." % self.wid)
+        work_start_time = time.perf_counter()
+
+        # Create the app local to this worker.
+        app = PieCrust(self.ctx.root_dir, debug=self.ctx.debug)
+        app.env.fs_cache_only_for_main_page = True
+        app.env.registerTimer("Worker_%d" % self.wid)
+        app.env.registerTimer("JobReceive")
+
+        # Create the job handlers.
+        job_handlers = {
+                JOB_LOAD: LoadJobHandler(app, self.ctx),
+                JOB_RENDER_FIRST: RenderFirstSubJobHandler(app, self.ctx),
+                JOB_BAKE: BakeJobHandler(app, self.ctx)}
+        for jt, jh in job_handlers.items():
+            app.env.registerTimer(type(jh).__name__)
+
+        # Start working!
+        while not self.ctx.abort_event.is_set():
+            try:
+                with app.env.timerScope('JobReceive'):
+                    job = self.ctx.work_queue.get(True, 0.01)
+            except queue.Empty:
+                continue
+
+            try:
+                handler = job_handlers[job.job_type]
+                with app.env.timerScope(type(handler).__name__):
+                    handler.handleJob(job)
+            except Exception as ex:
+                self.ctx.abort_event.set()
+                self.abort_exception = ex
+                self.success = False
+                logger.debug("[%d] Critical error, aborting." % self.wid)
+                if self.ctx.app.debug:
+                    logger.exception(ex)
+                break
+            finally:
+                self.ctx.work_queue.task_done()
+
+        # Send our timers to the main process before exiting.
+        app.env.stepTimer("Worker_%d" % self.wid,
+                          time.perf_counter() - work_start_time)
+        self.ctx.results.put_nowait(app.env._timers)
+
+
+class JobHandler(object):
+    def __init__(self, app, ctx):
+        self.app = app
+        self.ctx = ctx
+
+    def handleJob(self, job):
+        raise NotImplementedError()
+
+
+def _get_errors(ex):
+    errors = []
+    while ex is not None:
+        errors.append(str(ex))
+        ex = ex.__cause__
+    return errors
+
+
+class PageFactoryInfo(object):
+    def __init__(self, fac):
+        self.source_name = fac.source.name
+        self.rel_path = fac.rel_path
+        self.metadata = fac.metadata
+
+    def build(self, app):
+        source = app.getSource(self.source_name)
+        return PageFactory(source, self.rel_path, self.metadata)
+
+
+class LoadJobPayload(object):
+    def __init__(self, fac):
+        self.factory_info = PageFactoryInfo(fac)
+
+
+class LoadJobResult(object):
+    def __init__(self, source_name, path):
+        self.source_name = source_name
+        self.path = path
+        self.config = None
+        self.errors = None
+
+
+class RenderFirstSubJobPayload(object):
+    def __init__(self, fac):
+        self.factory_info = PageFactoryInfo(fac)
+
+
+class RenderFirstSubJobResult(object):
+    def __init__(self, path):
+        self.path = path
+        self.used_assets = None
+        self.used_pagination = None
+        self.pagination_has_more = False
+        self.errors = None
+
+
+class BakeJobPayload(object):
+    def __init__(self, fac, route_metadata, previous_entry,
+                 first_render_info, dirty_source_names, tax_info=None):
+        self.factory_info = PageFactoryInfo(fac)
+        self.route_metadata = route_metadata
+        self.previous_entry = previous_entry
+        self.dirty_source_names = dirty_source_names
+        self.first_render_info = first_render_info
+        self.taxonomy_info = tax_info
+
+
+class BakeJobResult(object):
+    def __init__(self, path, tax_info=None):
+        self.path = path
+        self.taxonomy_info = tax_info
+        self.bake_info = None
+        self.errors = None
+
+
+class LoadJobHandler(JobHandler):
+    def handleJob(self, job):
+        # Just make sure the page has been cached.
+        fac = job.payload.factory_info.build(self.app)
+        logger.debug("Loading page: %s" % fac.ref_spec)
+        result = LoadJobResult(fac.source.name, fac.path)
+        try:
+            page = fac.buildPage()
+            page._load()
+            result.config = page.config.get()
+        except Exception as ex:
+            result.errors = _get_errors(ex)
+
+        self.ctx.results.put_nowait(result)
+
+
+class RenderFirstSubJobHandler(JobHandler):
+    def handleJob(self, job):
+        # Render the segments for the first sub-page of this page.
+        fac = job.payload.factory_info.build(self.app)
+
+        # These things should be OK as they're checked upstream by the baker.
+        route = self.app.getRoute(fac.source.name, fac.metadata,
+                                  skip_taxonomies=True)
+        assert route is not None
+
+        page = fac.buildPage()
+        route_metadata = copy.deepcopy(fac.metadata)
+        qp = QualifiedPage(page, route, route_metadata)
+        ctx = PageRenderingContext(qp)
+
+        result = RenderFirstSubJobResult(fac.path)
+        logger.debug("Preparing page: %s" % fac.ref_spec)
+        try:
+            render_page_segments(ctx)
+            result.used_assets = ctx.used_assets
+            result.used_pagination = ctx.used_pagination is not None
+            if result.used_pagination:
+                result.pagination_has_more = ctx.used_pagination.has_more
+        except Exception as ex:
+            logger.debug("Got rendering error. Sending it to master.")
+            result.errors = _get_errors(ex)
+
+        self.ctx.results.put_nowait(result)
+
+
+class BakeJobHandler(JobHandler):
+    def __init__(self, app, ctx):
+        super(BakeJobHandler, self).__init__(app, ctx)
+        self.page_baker = PageBaker(app, ctx.out_dir, ctx.force)
+
+    def handleJob(self, job):
+        # Actually bake the page and all its sub-pages to the output folder.
+        fac = job.payload.factory_info.build(self.app)
+
+        route_metadata = job.payload.route_metadata
+        tax_info = job.payload.taxonomy_info
+        if tax_info is not None:
+            route = self.app.getTaxonomyRoute(tax_info.taxonomy_name,
+                                              tax_info.source_name)
+        else:
+            route = self.app.getRoute(fac.source.name, route_metadata,
+                                      skip_taxonomies=True)
+        assert route is not None
+
+        result = BakeJobResult(fac.path, tax_info)
+        previous_entry = job.payload.previous_entry
+        first_render_info = job.payload.first_render_info
+        dirty_source_names = job.payload.dirty_source_names
+        logger.debug("Baking page: %s" % fac.ref_spec)
+        try:
+            report = self.page_baker.bake(fac, route, route_metadata,
+                                          previous_entry, first_render_info,
+                                          dirty_source_names, tax_info)
+            result.bake_info = report
+
+        except BakingError as ex:
+            logger.debug("Got baking error. Sending it to master.")
+            result.errors = _get_errors(ex)
+
+        self.ctx.results.put_nowait(result)
+
--- a/piecrust/chefutil.py	Sat May 30 15:41:52 2015 -0700
+++ b/piecrust/chefutil.py	Fri Jun 12 17:09:19 2015 -0700
@@ -1,9 +1,18 @@
 import time
+import logging
+import contextlib
 from colorama import Fore
 
 
+@contextlib.contextmanager
+def format_timed_scope(logger, message, *, level=logging.INFO, colored=True):
+    start_time = time.perf_counter()
+    yield
+    logger.log(level, format_timed(start_time, message, colored=colored))
+
+
 def format_timed(start_time, message, indent_level=0, colored=True):
-    end_time = time.clock()
+    end_time = time.perf_counter()
     indent = indent_level * '  '
     time_str = '%8.1f ms' % ((end_time - start_time) * 1000.0)
     if colored:
--- a/piecrust/commands/builtin/baking.py	Sat May 30 15:41:52 2015 -0700
+++ b/piecrust/commands/builtin/baking.py	Fri Jun 12 17:09:19 2015 -0700
@@ -6,7 +6,7 @@
 import datetime
 from piecrust.baking.baker import Baker
 from piecrust.baking.records import (
-        BakeRecord, BakeRecordPageEntry, BakeRecordSubPageEntry)
+        BakeRecord, BakeRecordEntry, SubPageBakeInfo)
 from piecrust.chefutil import format_timed
 from piecrust.commands.base import ChefCommand
 from piecrust.processing.base import ProcessorPipeline
@@ -36,6 +36,10 @@
                 help="Force re-baking the entire website.",
                 action='store_true')
         parser.add_argument(
+                '-w', '--workers',
+                help="The number of worker processes to spawn.",
+                type=int, default=-1)
+        parser.add_argument(
                 '--assets-only',
                 help="Only bake the assets (don't bake the web pages).",
                 action='store_true')
@@ -43,13 +47,17 @@
                 '--html-only',
                 help="Only bake HTML files (don't run the asset pipeline).",
                 action='store_true')
+        parser.add_argument(
+                '--show-timers',
+                help="Show detailed timing information.",
+                action='store_true')
 
     def run(self, ctx):
         out_dir = (ctx.args.output or
                    os.path.join(ctx.app.root_dir, '_counter'))
 
         success = True
-        start_time = time.clock()
+        start_time = time.perf_counter()
         try:
             # Bake the site sources.
             if not ctx.args.assets_only:
@@ -71,10 +79,26 @@
             return 1
 
     def _bakeSources(self, ctx, out_dir):
+        if ctx.args.workers > 0:
+            ctx.app.config.set('baker/workers', ctx.args.workers)
         baker = Baker(
                 ctx.app, out_dir,
                 force=ctx.args.force)
         record = baker.bake()
+
+        if ctx.args.show_timers:
+            if record.timers:
+                from colorama import Fore
+                logger.info("-------------------")
+                logger.info("Timing information:")
+                for name in sorted(record.timers.keys()):
+                    val_str = '%8.1f s' % record.timers[name]
+                    logger.info(
+                            "[%s%s%s] %s" %
+                            (Fore.GREEN, val_str, Fore.RESET, name))
+            else:
+                logger.warning("Timing information is not available.")
+
         return record.success
 
     def _bakeAssets(self, ctx, out_dir):
@@ -151,7 +175,7 @@
                 continue
 
             flags = []
-            if entry.flags & BakeRecordPageEntry.FLAG_OVERRIDEN:
+            if entry.flags & BakeRecordEntry.FLAG_OVERRIDEN:
                 flags.append('overriden')
 
             passes = {PASS_RENDERING: 'render', PASS_FORMATTING: 'format'}
@@ -161,9 +185,9 @@
             logging.info("   spec:      %s:%s" % (entry.source_name,
                                                   entry.rel_path))
             if entry.taxonomy_info:
-                tn, t, sn = entry.taxonomy_info
+                tax_name, term, source_name = entry.taxonomy_info
                 logging.info("   taxonomy:  %s (%s:%s)" %
-                             (t, sn, tn))
+                             (term, source_name, tax_name))
             else:
                 logging.info("   taxonomy:  <none>")
             logging.info("   flags:     %s" % ', '.join(flags))
@@ -178,11 +202,11 @@
                 logging.info("     baked?: %s" % sub.was_baked)
 
                 sub_flags = []
-                if sub.flags & BakeRecordSubPageEntry.FLAG_FORCED_BY_SOURCE:
+                if sub.flags & SubPageBakeInfo.FLAG_FORCED_BY_SOURCE:
                     sub_flags.append('forced by source')
-                if sub.flags & BakeRecordSubPageEntry.FLAG_FORCED_BY_NO_PREVIOUS:
+                if sub.flags & SubPageBakeInfo.FLAG_FORCED_BY_NO_PREVIOUS:
                     sub_flags.append('forced by missing previous record entry')
-                if sub.flags & BakeRecordSubPageEntry.FLAG_FORCED_BY_PREVIOUS_ERRORS:
+                if sub.flags & SubPageBakeInfo.FLAG_FORCED_BY_PREVIOUS_ERRORS:
                     sub_flags.append('forced by previous errors')
                 logging.info("     flags:  %s" % ', '.join(sub_flags))
 
@@ -193,7 +217,8 @@
                     logging.info("       used terms: %s" %
                                  ', '.join(
                                         ['%s (%s:%s)' % (t, sn, tn)
-                                         for sn, tn, t in pi.used_taxonomy_terms]))
+                                         for sn, tn, t in
+                                         pi.used_taxonomy_terms]))
 
                 if sub.errors:
                     logging.error("   errors: %s" % sub.errors)
--- a/piecrust/data/linker.py	Sat May 30 15:41:52 2015 -0700
+++ b/piecrust/data/linker.py	Fri Jun 12 17:09:19 2015 -0700
@@ -293,36 +293,35 @@
 
         items = list(self._source.listPath(self._dir_path))
         self._items = collections.OrderedDict()
-        with self._source.app.env.page_repository.startBatchGet():
-            for is_dir, name, data in items:
-                # If `is_dir` is true, `data` will be the directory's source
-                # path. If not, it will be a page factory.
-                if is_dir:
-                    item = Linker(self._source, data,
-                                  root_page_path=self._root_page_path)
-                else:
-                    page = data.buildPage()
-                    is_self = (page.rel_path == self._root_page_path)
-                    item = _LinkedPage(page)
-                    item._linker_info.name = name
-                    item._linker_info.is_self = is_self
-                    if is_self:
-                        self._self_item = item
+        for is_dir, name, data in items:
+            # If `is_dir` is true, `data` will be the directory's source
+            # path. If not, it will be a page factory.
+            if is_dir:
+                item = Linker(self._source, data,
+                              root_page_path=self._root_page_path)
+            else:
+                page = data.buildPage()
+                is_self = (page.rel_path == self._root_page_path)
+                item = _LinkedPage(page)
+                item._linker_info.name = name
+                item._linker_info.is_self = is_self
+                if is_self:
+                    self._self_item = item
 
-                existing = self._items.get(name)
-                if existing is None:
-                    self._items[name] = item
-                elif is_dir:
-                    # The current item is a directory. The existing item
-                    # should be a page.
-                    existing._linker_info.child_linker = item
-                    existing._linker_info.is_dir = True
-                else:
-                    # The current item is a page. The existing item should
-                    # be a directory.
-                    item._linker_info.child_linker = existing
-                    item._linker_info.is_dir = True
-                    self._items[name] = item
+            existing = self._items.get(name)
+            if existing is None:
+                self._items[name] = item
+            elif is_dir:
+                # The current item is a directory. The existing item
+                # should be a page.
+                existing._linker_info.child_linker = item
+                existing._linker_info.is_dir = True
+            else:
+                # The current item is a page. The existing item should
+                # be a directory.
+                item._linker_info.child_linker = existing
+                item._linker_info.is_dir = True
+                self._items[name] = item
 
 
 def filter_page_items(item):
--- a/piecrust/data/provider.py	Sat May 30 15:41:52 2015 -0700
+++ b/piecrust/data/provider.py	Fri Jun 12 17:09:19 2015 -0700
@@ -118,17 +118,17 @@
             return self._yearly
 
         self._yearly = []
+        yearly_index = {}
         for post in self._source.getPages():
             year = post.datetime.strftime('%Y')
 
-            posts_this_year = next(
-                    filter(lambda y: y.name == year, self._yearly),
-                    None)
+            posts_this_year = yearly_index.get(year)
             if posts_this_year is None:
                 timestamp = time.mktime(
                         (post.datetime.year, 1, 1, 0, 0, 0, 0, 0, -1))
                 posts_this_year = BlogArchiveEntry(self._page, year, timestamp)
                 self._yearly.append(posts_this_year)
+                yearly_index[year] = posts_this_year
 
             posts_this_year._data_source.append(post)
         self._yearly = sorted(self._yearly,
--- a/piecrust/environment.py	Sat May 30 15:41:52 2015 -0700
+++ b/piecrust/environment.py	Fri Jun 12 17:09:19 2015 -0700
@@ -1,9 +1,7 @@
-import re
 import time
 import json
 import logging
 import hashlib
-import threading
 import contextlib
 import collections
 import repoze.lru
@@ -12,9 +10,6 @@
 logger = logging.getLogger(__name__)
 
 
-re_fs_cache_key = re.compile(r'[^\d\w\-\._]+')
-
-
 def _make_fs_cache_key(key):
     return hashlib.md5(key.encode('utf8')).hexdigest()
 
@@ -26,57 +21,58 @@
     def __init__(self, size=2048):
         self.cache = repoze.lru.LRUCache(size)
         self.fs_cache = None
+        self._last_access_hit = None
         self._invalidated_fs_items = set()
-        self._lock = threading.RLock()
 
-    @contextlib.contextmanager
-    def startBatchGet(self):
-        logger.debug("Starting batch cache operation.")
-        with self._lock:
-            yield
-        logger.debug("Ending batch cache operation.")
+    @property
+    def last_access_hit(self):
+        return self._last_access_hit
 
     def invalidate(self, key):
-        with self._lock:
-            logger.debug("Invalidating cache item '%s'." % key)
-            self.cache.invalidate(key)
-            if self.fs_cache:
-                logger.debug("Invalidating FS cache item '%s'." % key)
-                fs_key = _make_fs_cache_key(key)
-                self._invalidated_fs_items.add(fs_key)
+        logger.debug("Invalidating cache item '%s'." % key)
+        self.cache.invalidate(key)
+        if self.fs_cache:
+            logger.debug("Invalidating FS cache item '%s'." % key)
+            fs_key = _make_fs_cache_key(key)
+            self._invalidated_fs_items.add(fs_key)
 
-    def get(self, key, item_maker, fs_cache_time=None):
+    def put(self, key, item, save_to_fs=True):
+        self.cache.put(key, item)
+        if self.fs_cache and save_to_fs:
+            fs_key = _make_fs_cache_key(key)
+            item_raw = json.dumps(item)
+            self.fs_cache.write(fs_key, item_raw)
+
+    def get(self, key, item_maker, fs_cache_time=None, save_to_fs=True):
+        self._last_access_hit = True
         item = self.cache.get(key)
         if item is None:
-            logger.debug("Acquiring lock for: %s" % key)
-            with self._lock:
-                item = self.cache.get(key)
-                if item is None:
-                    if (self.fs_cache is not None and
-                            fs_cache_time is not None):
-                        # Try first from the file-system cache.
-                        fs_key = _make_fs_cache_key(key)
-                        if (fs_key not in self._invalidated_fs_items and
-                                self.fs_cache.isValid(fs_key, fs_cache_time)):
-                            logger.debug("'%s' found in file-system cache." %
-                                         key)
-                            item_raw = self.fs_cache.read(fs_key)
-                            item = json.loads(
-                                    item_raw,
-                                    object_pairs_hook=collections.OrderedDict)
-                            self.cache.put(key, item)
-                            return item
+            if (self.fs_cache is not None and
+                    fs_cache_time is not None):
+                # Try first from the file-system cache.
+                fs_key = _make_fs_cache_key(key)
+                if (fs_key not in self._invalidated_fs_items and
+                        self.fs_cache.isValid(fs_key, fs_cache_time)):
+                    logger.debug("'%s' found in file-system cache." %
+                                 key)
+                    item_raw = self.fs_cache.read(fs_key)
+                    item = json.loads(
+                            item_raw,
+                            object_pairs_hook=collections.OrderedDict)
+                    self.cache.put(key, item)
+                    return item
 
-                    # Look into the mem-cache.
-                    logger.debug("'%s' not found in cache, must build." % key)
-                    item = item_maker()
-                    self.cache.put(key, item)
+            # Look into the mem-cache.
+            logger.debug("'%s' not found in cache, must build." % key)
+            item = item_maker()
+            self.cache.put(key, item)
+            self._last_access_hit = False
 
-                    # Save to the file-system if needed.
-                    if (self.fs_cache is not None and
-                            fs_cache_time is not None):
-                        item_raw = json.dumps(item)
-                        self.fs_cache.write(fs_key, item_raw)
+            # Save to the file-system if needed.
+            if self.fs_cache is not None and save_to_fs:
+                item_raw = json.dumps(item)
+                self.fs_cache.write(fs_key, item_raw)
+
         return item
 
 
@@ -88,7 +84,7 @@
         self.start_time = time.clock()
 
 
-class ExecutionInfoStack(threading.local):
+class ExecutionInfoStack(object):
     def __init__(self):
         self._page_stack = []
 
@@ -131,14 +127,28 @@
         self.rendered_segments_repository = MemCache()
         self.fs_caches = {
                 'renders': self.rendered_segments_repository}
+        self.fs_cache_only_for_main_page = False
+        self._timers = {}
 
     def initialize(self, app):
-        self.start_time = time.clock()
+        self.start_time = time.perf_counter()
         self.exec_info_stack.clear()
         self.was_cache_cleaned = False
         self.base_asset_url_format = '%uri%'
         self._onSubCacheDirChanged(app)
 
+    def registerTimer(self, category):
+        self._timers[category] = 0
+
+    @contextlib.contextmanager
+    def timerScope(self, category):
+        start = time.perf_counter()
+        yield
+        self._timers[category] += time.perf_counter() - start
+
+    def stepTimer(self, category, value):
+        self._timers[category] += value
+
     def _onSubCacheDirChanged(self, app):
         for name, repo in self.fs_caches.items():
             cache = app.cache.getCache(name)
--- a/piecrust/formatting/markdownformatter.py	Sat May 30 15:41:52 2015 -0700
+++ b/piecrust/formatting/markdownformatter.py	Fri Jun 12 17:09:19 2015 -0700
@@ -13,7 +13,7 @@
     def render(self, format_name, txt):
         assert format_name in self.FORMAT_NAMES
         self._ensureInitialized()
-        return self._formatter.convert(txt)
+        return self._formatter.reset().convert(txt)
 
     def _ensureInitialized(self):
         if self._formatter is not None:
--- a/piecrust/page.py	Sat May 30 15:41:52 2015 -0700
+++ b/piecrust/page.py	Fri Jun 12 17:09:19 2015 -0700
@@ -9,7 +9,8 @@
 import dateutil.parser
 import collections
 from werkzeug.utils import cached_property
-from piecrust.configuration import (Configuration, ConfigurationError,
+from piecrust.configuration import (
+        Configuration, ConfigurationError,
         parse_config_header)
 from piecrust.routing import IRouteMetadataProvider
 
@@ -241,9 +242,11 @@
 
 def load_page(app, path, path_mtime=None):
     try:
-        return _do_load_page(app, path, path_mtime)
+        with app.env.timerScope('PageLoad'):
+            return _do_load_page(app, path, path_mtime)
     except Exception as e:
-        logger.exception("Error loading page: %s" %
+        logger.exception(
+                "Error loading page: %s" %
                 os.path.relpath(path, app.root_dir))
         _, __, traceback = sys.exc_info()
         raise PageLoadingError(path, e).with_traceback(traceback)
@@ -255,9 +258,11 @@
     cache_path = hashlib.md5(path.encode('utf8')).hexdigest() + '.json'
     page_time = path_mtime or os.path.getmtime(path)
     if cache.isValid(cache_path, page_time):
-        cache_data = json.loads(cache.read(cache_path),
+        cache_data = json.loads(
+                cache.read(cache_path),
                 object_pairs_hook=collections.OrderedDict)
-        config = PageConfiguration(values=cache_data['config'],
+        config = PageConfiguration(
+                values=cache_data['config'],
                 validate=False)
         content = json_load_segments(cache_data['content'])
         return config, content, True
@@ -268,7 +273,7 @@
         raw = fp.read()
     header, offset = parse_config_header(raw)
 
-    if not 'format' in header:
+    if 'format' not in header:
         auto_formats = app.config.get('site/auto_formats')
         name, ext = os.path.splitext(path)
         header['format'] = auto_formats.get(ext, None)
--- a/piecrust/rendering.py	Sat May 30 15:41:52 2015 -0700
+++ b/piecrust/rendering.py	Fri Jun 12 17:09:19 2015 -0700
@@ -2,8 +2,8 @@
 import os.path
 import logging
 from werkzeug.utils import cached_property
-from piecrust.data.builder import (DataBuildingContext, build_page_data,
-        build_layout_data)
+from piecrust.data.builder import (
+        DataBuildingContext, build_page_data, build_layout_data)
 from piecrust.data.filters import (
         PaginationFilter, HasFilterClause, IsFilterClause, AndBooleanClause,
         page_value_accessor)
@@ -94,6 +94,12 @@
         return self.page.getUri(self.page_num)
 
     @property
+    def pagination_has_more(self):
+        if self.used_pagination is None:
+            return False
+        return self.used_pagination.has_more
+
+    @property
     def current_pass_info(self):
         return self.render_passes.get(self._current_pass)
 
@@ -157,15 +163,19 @@
         # Render content segments.
         ctx.setCurrentPass(PASS_FORMATTING)
         repo = ctx.app.env.rendered_segments_repository
+        save_to_fs = True
+        if ctx.app.env.fs_cache_only_for_main_page and not eis.is_main_page:
+            save_to_fs = False
         if repo and not ctx.force_render:
-            cache_key = ctx.uri
-            page_time = page.path_mtime
             contents = repo.get(
-                    cache_key,
+                    ctx.uri,
                     lambda: _do_render_page_segments(page, page_data),
-                    fs_cache_time=page_time)
+                    fs_cache_time=page.path_mtime,
+                    save_to_fs=save_to_fs)
         else:
             contents = _do_render_page_segments(page, page_data)
+            if repo:
+                repo.put(ctx.uri, contents, save_to_fs)
 
         # Render layout.
         ctx.setCurrentPass(PASS_RENDERING)
@@ -226,9 +236,10 @@
         for seg_part in seg.parts:
             part_format = seg_part.fmt or format_name
             try:
-                part_text = engine.renderString(
-                        seg_part.content, page_data,
-                        filename=page.path)
+                with app.env.timerScope(engine.__class__.__name__):
+                    part_text = engine.renderString(
+                            seg_part.content, page_data,
+                            filename=page.path)
             except TemplatingError as err:
                 err.lineno += seg_part.line
                 raise err
@@ -291,7 +302,8 @@
     format_name = format_name or app.config.get('site/default_format')
     for fmt in app.plugin_loader.getFormatters():
         if fmt.FORMAT_NAMES is None or format_name in fmt.FORMAT_NAMES:
-            txt = fmt.render(format_name, txt)
+            with app.env.timerScope(fmt.__class__.__name__):
+                txt = fmt.render(format_name, txt)
             format_count += 1
             if fmt.OUTPUT_FORMAT is not None:
                 format_name = fmt.OUTPUT_FORMAT
--- a/piecrust/serving/server.py	Sat May 30 15:41:52 2015 -0700
+++ b/piecrust/serving/server.py	Fri Jun 12 17:09:19 2015 -0700
@@ -319,7 +319,7 @@
             if route_terms is None:
                 return None
 
-            tax_page_ref = taxonomy.getPageRef(source.name)
+            tax_page_ref = taxonomy.getPageRef(source)
             factory = tax_page_ref.getFactory()
             tax_terms = route.unslugifyTaxonomyTerm(route_terms)
             route_metadata[taxonomy.term_name] = tax_terms
--- a/piecrust/sources/array.py	Sat May 30 15:41:52 2015 -0700
+++ b/piecrust/sources/array.py	Fri Jun 12 17:09:19 2015 -0700
@@ -1,5 +1,6 @@
 from piecrust.sources.base import PageSource
 from piecrust.sources.mixins import SimplePaginationSourceMixin
+from piecrust.sources.pageref import PageRef
 
 
 class CachedPageFactory(object):
@@ -30,7 +31,7 @@
 
 class ArraySource(PageSource, SimplePaginationSourceMixin):
     def __init__(self, app, inner_source, name='array', config=None):
-        super(ArraySource, self).__init__(app, name, config or {})
+        super(ArraySource, self).__init__(app, name, config)
         self.inner_source = inner_source
 
     @property
@@ -41,3 +42,6 @@
         for p in self.inner_source:
             yield CachedPageFactory(p)
 
+    def getTaxonomyPageRef(self, tax_name):
+        return None
+
--- a/piecrust/sources/base.py	Sat May 30 15:41:52 2015 -0700
+++ b/piecrust/sources/base.py	Fri Jun 12 17:09:19 2015 -0700
@@ -20,9 +20,8 @@
 
 
 def build_pages(app, factories):
-    with app.env.page_repository.startBatchGet():
-        for f in factories:
-            yield f.buildPage()
+    for f in factories:
+        yield f.buildPage()
 
 
 class InvalidFileSystemEndpointError(Exception):
@@ -59,9 +58,6 @@
     def _doBuildPage(self):
         logger.debug("Building page: %s" % self.path)
         page = Page(self.source, copy.deepcopy(self.metadata), self.rel_path)
-        # Load it right away, especially when using the page repository,
-        # because we'll be inside a critical scope.
-        page._load()
         return page
 
 
--- a/piecrust/sources/mixins.py	Sat May 30 15:41:52 2015 -0700
+++ b/piecrust/sources/mixins.py	Fri Jun 12 17:09:19 2015 -0700
@@ -5,7 +5,7 @@
 from piecrust.data.filters import PaginationFilter, page_value_accessor
 from piecrust.sources.base import PageFactory
 from piecrust.sources.interfaces import IPaginationSource, IListableSource
-from piecrust.sources.pageref import PageNotFoundError
+from piecrust.sources.pageref import PageRef
 
 
 logger = logging.getLogger(__name__)
@@ -41,13 +41,15 @@
         if self._taxonomy_pages is not None:
             return
 
+        app = self.source.app
         self._taxonomy_pages = set()
-        for tax in self.source.app.taxonomies:
-            page_ref = tax.getPageRef(self.source.name)
-            try:
-                self._taxonomy_pages.add(page_ref.rel_path)
-            except PageNotFoundError:
-                pass
+        for src in app.sources:
+            for tax in app.taxonomies:
+                ref_spec = src.getTaxonomyPageRef(tax.name)
+                page_ref = PageRef(app, ref_spec)
+                for sn, rp in page_ref.possible_split_ref_specs:
+                    if sn == self.source.name:
+                        self._taxonomy_pages.add(rp)
 
 
 class DateSortIterator(object):
--- a/piecrust/sources/pageref.py	Sat May 30 15:41:52 2015 -0700
+++ b/piecrust/sources/pageref.py	Fri Jun 12 17:09:19 2015 -0700
@@ -42,7 +42,6 @@
 
     @property
     def source_name(self):
-        self._checkHits()
         return self._first_valid_hit.source_name
 
     @property
@@ -51,23 +50,25 @@
 
     @property
     def rel_path(self):
-        self._checkHits()
         return self._first_valid_hit.rel_path
 
     @property
     def path(self):
-        self._checkHits()
         return self._first_valid_hit.path
 
     @property
     def metadata(self):
-        self._checkHits()
         return self._first_valid_hit.metadata
 
     @property
-    def possible_rel_paths(self):
+    def possible_ref_specs(self):
         self._load()
-        return [h.rel_path for h in self._hits]
+        return ['%s:%s' % (h.source_name, h.rel_path) for h in self._hits]
+
+    @property
+    def possible_split_ref_specs(self):
+        self._load()
+        return [(h.source_name, h.rel_path) for h in self._hits]
 
     @property
     def possible_paths(self):
@@ -80,17 +81,23 @@
 
     @property
     def _first_valid_hit(self):
+        self._checkHits()
         return self._hits[self._first_valid_hit_index]
 
     def _load(self):
         if self._hits is not None:
             return
 
+        self._hits = []
+
+        if self._page_ref is None:
+            self._first_valid_hit_index = self._INDEX_NOT_FOUND
+            return
+
         it = list(page_ref_pattern.finditer(self._page_ref))
         if len(it) == 0:
             raise Exception("Invalid page ref: %s" % self._page_ref)
 
-        self._hits = []
         for m in it:
             source_name = m.group('src')
             source = self.app.getSource(source_name)
@@ -111,15 +118,17 @@
     def _checkHits(self):
         if self._first_valid_hit_index >= 0:
             return
+
+        if self._first_valid_hit_index == self._INDEX_NEEDS_LOADING:
+            self._load()
+            self._first_valid_hit_index = self._INDEX_NOT_FOUND
+            for i, hit in enumerate(self._hits):
+                if os.path.isfile(hit.path):
+                    self._first_valid_hit_index = i
+                    break
+
         if self._first_valid_hit_index == self._INDEX_NOT_FOUND:
             raise PageNotFoundError(
                     "No valid paths were found for page reference: %s" %
                     self._page_ref)
 
-        self._load()
-        self._first_valid_hit_index = self._INDEX_NOT_FOUND
-        for i, hit in enumerate(self._hits):
-            if os.path.isfile(hit.path):
-                self._first_valid_hit_index = i
-                break
-
--- a/piecrust/taxonomies.py	Sat May 30 15:41:52 2015 -0700
+++ b/piecrust/taxonomies.py	Fri Jun 12 17:09:19 2015 -0700
@@ -16,21 +16,19 @@
             return self.name
         return self.term_name
 
-    def resolvePagePath(self, source_name):
-        pr = self.getPageRef(source_name)
+    def resolvePagePath(self, source):
+        pr = self.getPageRef(source)
         try:
             return pr.path
         except PageNotFoundError:
             return None
 
-    def getPageRef(self, source_name):
-        if source_name in self._source_page_refs:
-            return self._source_page_refs[source_name]
+    def getPageRef(self, source):
+        if source.name in self._source_page_refs:
+            return self._source_page_refs[source.name]
 
-        source = self.app.getSource(source_name)
-        ref_path = (source.getTaxonomyPageRef(self.name) or
-                '%s:%s' % (source_name, self.page_ref))
+        ref_path = source.getTaxonomyPageRef(self.name)
         page_ref = PageRef(self.app, ref_path)
-        self._source_page_refs[source_name] = page_ref
+        self._source_page_refs[source.name] = page_ref
         return page_ref
 
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/tests/basefs.py	Fri Jun 12 17:09:19 2015 -0700
@@ -0,0 +1,104 @@
+import os.path
+import yaml
+from piecrust.app import PieCrust
+
+
+class TestFileSystemBase(object):
+    def __init__(self):
+        pass
+
+    def _initDefaultSpec(self):
+        self.withDir('counter')
+        self.withFile(
+                'kitchen/config.yml',
+                "site:\n  title: Mock Website\n")
+
+    def path(self, p):
+        raise NotImplementedError()
+
+    def getStructure(self, path=None):
+        raise NotImplementedError()
+
+    def getFileEntry(self, path):
+        raise NotImplementedError()
+
+    def _createDir(self, path):
+        raise NotImplementedError()
+
+    def _createFile(self, path, contents):
+        raise NotImplementedError()
+
+    def getApp(self, cache=True):
+        root_dir = self.path('/kitchen')
+        return PieCrust(root_dir, cache=cache, debug=True)
+
+    def withDir(self, path):
+        path = path.replace('\\', '/')
+        path = path.lstrip('/')
+        path = '/%s/%s' % (self._root, path)
+        self._createDir(path)
+        return self
+
+    def withFile(self, path, contents):
+        path = path.replace('\\', '/')
+        path = path.lstrip('/')
+        path = '/%s/%s' % (self._root, path)
+        self._createFile(path, contents)
+        return self
+
+    def withAsset(self, path, contents):
+        return self.withFile('kitchen/' + path, contents)
+
+    def withAssetDir(self, path):
+        return self.withDir('kitchen/' + path)
+
+    def withConfig(self, config):
+        return self.withFile(
+                'kitchen/config.yml',
+                yaml.dump(config))
+
+    def withThemeConfig(self, config):
+        return self.withFile(
+                'kitchen/theme/theme_config.yml',
+                yaml.dump(config))
+
+    def withPage(self, url, config=None, contents=None):
+        config = config or {}
+        contents = contents or "A test page."
+        text = "---\n"
+        text += yaml.dump(config)
+        text += "---\n"
+        text += contents
+
+        name, ext = os.path.splitext(url)
+        if not ext:
+            url += '.md'
+        url = url.lstrip('/')
+        return self.withAsset(url, text)
+
+    def withPageAsset(self, page_url, name, contents=None):
+        contents = contents or "A test asset."
+        url_base, ext = os.path.splitext(page_url)
+        dirname = url_base + '-assets'
+        return self.withAsset(
+                '%s/%s' % (dirname, name), contents)
+
+    def withPages(self, num, url_factory, config_factory=None,
+                  contents_factory=None):
+        for i in range(num):
+            if isinstance(url_factory, str):
+                url = url_factory.format(idx=i, idx1=(i + 1))
+            else:
+                url = url_factory(i)
+
+            config = None
+            if config_factory:
+                config = config_factory(i)
+
+            contents = None
+            if contents_factory:
+                contents = contents_factory(i)
+
+            self.withPage(url, config, contents)
+        return self
+
--- a/tests/conftest.py	Sat May 30 15:41:52 2015 -0700
+++ b/tests/conftest.py	Fri Jun 12 17:09:19 2015 -0700
@@ -148,36 +148,36 @@
             baker = Baker(app, out_dir)
             record = baker.bake()
 
-        if not record.success:
-            errors = []
-            for e in record.entries:
-                errors += e.getAllErrors()
-            raise BakeError(errors)
+            if not record.success:
+                errors = []
+                for e in record.entries:
+                    errors += e.getAllErrors()
+                raise BakeError(errors)
 
-        if expected_output_files:
-            actual = fs.getStructure('kitchen/_counter')
-            error = _compare_dicts(expected_output_files, actual)
-            if error:
-                raise ExpectedBakeOutputError(error)
+            if expected_output_files:
+                actual = fs.getStructure('kitchen/_counter')
+                error = _compare_dicts(expected_output_files, actual)
+                if error:
+                    raise ExpectedBakeOutputError(error)
 
-        if expected_partial_files:
-            keys = list(sorted(expected_partial_files.keys()))
-            for key in keys:
-                try:
-                    actual = fs.getFileEntry('kitchen/_counter/' +
-                                             key.lstrip('/'))
-                except Exception as e:
-                    raise ExpectedBakeOutputError([
-                        "Can't access output file %s: %s" % (key, e)])
+            if expected_partial_files:
+                keys = list(sorted(expected_partial_files.keys()))
+                for key in keys:
+                    try:
+                        actual = fs.getFileEntry('kitchen/_counter/' +
+                                                 key.lstrip('/'))
+                    except Exception as e:
+                        raise ExpectedBakeOutputError([
+                            "Can't access output file %s: %s" % (key, e)])
 
-                expected = expected_partial_files[key]
-                # HACK because for some reason PyYAML adds a new line for those
-                # and I have no idea why.
-                actual = actual.rstrip('\n')
-                expected = expected.rstrip('\n')
-                cmpres = _compare_str(expected, actual, key)
-                if cmpres:
-                    raise ExpectedBakeOutputError(cmpres)
+                    expected = expected_partial_files[key]
+                    # HACK because for some reason PyYAML adds a new line for
+                    # those and I have no idea why.
+                    actual = actual.rstrip('\n')
+                    expected = expected.rstrip('\n')
+                    cmpres = _compare_str(expected, actual, key)
+                    if cmpres:
+                        raise ExpectedBakeOutputError(cmpres)
 
     def reportinfo(self):
         return self.fspath, 0, "bake: %s" % self.name
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/tests/memfs.py	Fri Jun 12 17:09:19 2015 -0700
@@ -0,0 +1,309 @@
+import os.path
+import io
+import time
+import errno
+import random
+import codecs
+import shutil
+import mock
+from piecrust import RESOURCES_DIR
+from .basefs import TestFileSystemBase
+
+
+class _MockFsEntry(object):
+    def __init__(self, contents):
+        self.contents = contents
+        self.metadata = {'mtime': time.time()}
+
+
+class _MockFsEntryWriter(object):
+    def __init__(self, entry, mode='rt'):
+        self._entry = entry
+        self._mode = mode
+
+        if 'b' in mode:
+            data = entry.contents
+            if isinstance(data, str):
+                data = data.encode('utf8')
+            self._stream = io.BytesIO(data)
+        else:
+            self._stream = io.StringIO(entry.contents)
+
+    def __getattr__(self, name):
+        return getattr(self._stream, name)
+
+    def __enter__(self):
+        return self
+
+    def __exit__(self, exc_type, exc_value, exc_tb):
+        if 'w' in self._mode:
+            if 'a' in self._mode:
+                self._entry.contents += self._stream.getvalue()
+            else:
+                self._entry.contents = self._stream.getvalue()
+            self._entry.metadata['mtime'] = time.time()
+        self._stream.close()
+
+
+class MemoryFileSystem(TestFileSystemBase):
+    def __init__(self, default_spec=True):
+        self._root = 'root_%d' % random.randrange(1000)
+        self._fs = {self._root: {}}
+        if default_spec:
+            self._initDefaultSpec()
+
+    def path(self, p):
+        p = p.replace('\\', '/')
+        if p in ['/', '', None]:
+            return '/%s' % self._root
+        return '/%s/%s' % (self._root, p.lstrip('/'))
+
+    def getStructure(self, path=None):
+        root = self._fs[self._root]
+        if path:
+            root = self._getEntry(self.path(path))
+            if root is None:
+                raise Exception("No such path: %s" % path)
+            if not isinstance(root, dict):
+                raise Exception("Path is not a directory: %s" % path)
+
+        res = {}
+        for k, v in root.items():
+            self._getStructureRecursive(v, res, k)
+        return res
+
+    def getFileEntry(self, path):
+        entry = self._getEntry(self.path(path))
+        if entry is None:
+            raise Exception("No such file: %s" % path)
+        if not isinstance(entry, _MockFsEntry):
+            raise Exception("Path is not a file: %s" % path)
+        return entry.contents
+
+    def _getStructureRecursive(self, src, target, name):
+        if isinstance(src, _MockFsEntry):
+            target[name] = src.contents
+            return
+
+        e = {}
+        for k, v in src.items():
+            self._getStructureRecursive(v, e, k)
+        target[name] = e
+
+    def _getEntry(self, path):
+        cur = self._fs
+        path = path.replace('\\', '/').lstrip('/')
+        bits = path.split('/')
+        for p in bits:
+            try:
+                cur = cur[p]
+            except KeyError:
+                return None
+        return cur
+
+    def _createDir(self, path):
+        cur = self._fs
+        path = path.replace('\\', '/').strip('/')
+        bits = path.split('/')
+        for b in bits:
+            if b not in cur:
+                cur[b] = {}
+            cur = cur[b]
+        return self
+
+    def _createFile(self, path, contents):
+        cur = self._fs
+        path = path.replace('\\', '/').lstrip('/')
+        bits = path.split('/')
+        for b in bits[:-1]:
+            if b not in cur:
+                cur[b] = {}
+            cur = cur[b]
+        cur[bits[-1]] = _MockFsEntry(contents)
+        return self
+
+    def _deleteEntry(self, path):
+        parent = self._getEntry(os.path.dirname(path))
+        assert parent is not None
+        name = os.path.basename(path)
+        assert name in parent
+        del parent[name]
+
+
+class MemoryScope(object):
+    def __init__(self, fs, open_patches=None):
+        self.open_patches = open_patches or []
+        self._fs = fs
+        self._patchers = []
+        self._originals = {}
+
+    @property
+    def root(self):
+        return self._fs._root
+
+    def __enter__(self):
+        self._startMock()
+        return self
+
+    def __exit__(self, type, value, traceback):
+        self._endMock()
+
+    def _startMock(self):
+        # TODO: sadly, there seems to be no way to replace `open` everywhere?
+        modules = self.open_patches + [
+                '__main__',
+                'piecrust.records',
+                'jinja2.utils']
+        for m in modules:
+            self._createMock('%s.open' % m, open, self._open, create=True)
+
+        self._createMock('codecs.open', codecs.open, self._codecsOpen)
+        self._createMock('os.listdir', os.listdir, self._listdir)
+        self._createMock('os.makedirs', os.makedirs, self._makedirs)
+        self._createMock('os.remove', os.remove, self._remove)
+        self._createMock('os.rename', os.rename, self._rename)
+        self._createMock('os.path.exists', os.path.exists, self._exists)
+        self._createMock('os.path.isdir', os.path.isdir, self._isdir)
+        self._createMock('os.path.isfile', os.path.isfile, self._isfile)
+        self._createMock('os.path.islink', os.path.islink, self._islink)
+        self._createMock('os.path.getmtime', os.path.getmtime, self._getmtime)
+        self._createMock('shutil.copyfile', shutil.copyfile, self._copyfile)
+        self._createMock('shutil.rmtree', shutil.rmtree, self._rmtree)
+        for p in self._patchers:
+            p.start()
+
+    def _endMock(self):
+        for p in self._patchers:
+            p.stop()
+
+    def _createMock(self, name, orig, func, **kwargs):
+        self._originals[name] = orig
+        self._patchers.append(mock.patch(name, func, **kwargs))
+
+    def _doOpen(self, orig_name, path, mode, *args, **kwargs):
+        path = os.path.normpath(path)
+        if path.startswith(RESOURCES_DIR):
+            return self._originals[orig_name](path, mode, *args, **kwargs)
+
+        if 'r' in mode:
+            e = self._getFsEntry(path)
+        elif 'w' in mode or 'x' in mode or 'a' in mode:
+            e = self._getFsEntry(path)
+            if e is None:
+                contents = ''
+                if 'b' in mode:
+                    contents = bytes()
+                self._fs._createFile(path, contents)
+                e = self._getFsEntry(path)
+                assert e is not None
+            elif 'x' in mode:
+                err = IOError("File '%s' already exists" % path)
+                err.errno = errno.EEXIST
+                raise err
+        else:
+            err = IOError("Unsupported open mode: %s" % mode)
+            err.errno = errno.EINVAL
+            raise err
+
+        if e is None:
+            err = IOError("No such file: %s" % path)
+            err.errno = errno.ENOENT
+            raise err
+        if not isinstance(e, _MockFsEntry):
+            err = IOError("'%s' is not a file %s" % (path, e))
+            err.errno = errno.EISDIR
+            raise err
+
+        return _MockFsEntryWriter(e, mode)
+
+    def _open(self, path, mode, *args, **kwargs):
+        return self._doOpen('__main__.open', path, mode, *args, **kwargs)
+
+    def _codecsOpen(self, path, mode, *args, **kwargs):
+        return self._doOpen('codecs.open', path, mode, *args, **kwargs)
+
+    def _listdir(self, path):
+        path = os.path.normpath(path)
+        if path.startswith(RESOURCES_DIR):
+            return self._originals['os.listdir'](path)
+
+        e = self._getFsEntry(path)
+        if e is None:
+            raise OSError("No such directory: %s" % path)
+        if not isinstance(e, dict):
+            raise OSError("'%s' is not a directory." % path)
+        return list(e.keys())
+
+    def _makedirs(self, path, mode=0o777):
+        if not path.replace('\\', '/').startswith('/' + self.root):
+            raise Exception("Shouldn't create directory: %s" % path)
+        self._fs._createDir(path)
+
+    def _remove(self, path):
+        path = os.path.normpath(path)
+        self._fs._deleteEntry(path)
+
+    def _exists(self, path):
+        path = os.path.normpath(path)
+        if path.startswith(RESOURCES_DIR):
+            return self._originals['os.path.isdir'](path)
+        e = self._getFsEntry(path)
+        return e is not None
+
+    def _isdir(self, path):
+        path = os.path.normpath(path)
+        if path.startswith(RESOURCES_DIR):
+            return self._originals['os.path.isdir'](path)
+        e = self._getFsEntry(path)
+        return e is not None and isinstance(e, dict)
+
+    def _isfile(self, path):
+        path = os.path.normpath(path)
+        if path.startswith(RESOURCES_DIR):
+            return self._originals['os.path.isfile'](path)
+        e = self._getFsEntry(path)
+        return e is not None and isinstance(e, _MockFsEntry)
+
+    def _islink(self, path):
+        path = os.path.normpath(path)
+        if path.startswith(RESOURCES_DIR):
+            return self._originals['os.path.islink'](path)
+        return False
+
+    def _getmtime(self, path):
+        path = os.path.normpath(path)
+        if path.startswith(RESOURCES_DIR):
+            return self._originals['os.path.getmtime'](path)
+        e = self._getFsEntry(path)
+        if e is None:
+            raise OSError("No such file: %s" % path)
+        return e.metadata['mtime']
+
+    def _copyfile(self, src, dst):
+        src = os.path.normpath(src)
+        if src.startswith(RESOURCES_DIR):
+            with self._originals['__main__.open'](src, 'r') as fp:
+                src_text = fp.read()
+        else:
+            e = self._getFsEntry(src)
+            src_text = e.contents
+        if not dst.replace('\\', '/').startswith('/' + self.root):
+            raise Exception("Shouldn't copy to: %s" % dst)
+        self._fs._createFile(dst, src_text)
+
+    def _rename(self, src, dst):
+        src = os.path.normpath(src)
+        if src.startswith(RESOURCES_DIR) or dst.startswith(RESOURCES_DIR):
+            raise Exception("Shouldn't rename files in the resources path.")
+        self._copyfile(src, dst)
+        self._remove(src)
+
+    def _rmtree(self, path):
+        if not path.replace('\\', '/').startswith('/' + self.root):
+            raise Exception("Shouldn't delete trees from: %s" % path)
+        e = self._fs._getEntry(os.path.dirname(path))
+        del e[os.path.basename(path)]
+
+    def _getFsEntry(self, path):
+        return self._fs._getEntry(path)
+
--- a/tests/mockutil.py	Sat May 30 15:41:52 2015 -0700
+++ b/tests/mockutil.py	Fri Jun 12 17:09:19 2015 -0700
@@ -1,23 +1,10 @@
-import io
-import time
-import errno
-import random
-import codecs
-import shutil
 import os.path
 import mock
-import yaml
 from piecrust.app import PieCrust, PieCrustConfiguration
 from piecrust.page import Page
 from piecrust.rendering import QualifiedPage, PageRenderingContext, render_page
 
 
-resources_path = os.path.abspath(
-            os.path.join(
-            os.path.dirname(__file__),
-            '..', 'piecrust', 'resources'))
-
-
 def get_mock_app(config=None):
     app = mock.MagicMock(spec=PieCrust)
     app.config = PieCrustConfiguration()
@@ -37,376 +24,7 @@
     return rp.content
 
 
-class _MockFsEntry(object):
-    def __init__(self, contents):
-        self.contents = contents
-        self.metadata = {'mtime': time.time()}
-
-
-class _MockFsEntryWriter(object):
-    def __init__(self, entry, mode='rt'):
-        self._entry = entry
-        self._mode = mode
-
-        if 'b' in mode:
-            data = entry.contents
-            if isinstance(data, str):
-                data = data.encode('utf8')
-            self._stream = io.BytesIO(data)
-        else:
-            self._stream = io.StringIO(entry.contents)
-
-    def __getattr__(self, name):
-        return getattr(self._stream, name)
-
-    def __enter__(self):
-        return self
-
-    def __exit__(self, exc_type, exc_value, exc_tb):
-        if 'w' in self._mode:
-            if 'a' in self._mode:
-                self._entry.contents += self._stream.getvalue()
-            else:
-                self._entry.contents = self._stream.getvalue()
-            self._entry.metadata['mtime'] = time.time()
-        self._stream.close()
-
-
-class mock_fs(object):
-    def __init__(self, default_spec=True):
-        self._root = 'root_%d' % random.randrange(1000)
-        self._fs = {self._root: {}}
-        if default_spec:
-            self.withDir('counter')
-            self.withFile('kitchen/config.yml',
-                    "site:\n  title: Mock Website\n")
-
-    def path(self, p):
-        p = p.replace('\\', '/')
-        if p in ['/', '', None]:
-            return '/%s' % self._root
-        return '/%s/%s' % (self._root, p.lstrip('/'))
-
-    def getApp(self, cache=True):
-        root_dir = self.path('/kitchen')
-        return PieCrust(root_dir, cache=cache, debug=True)
-
-    def withDir(self, path):
-        path = path.replace('\\', '/')
-        path = path.lstrip('/')
-        path = '/%s/%s' % (self._root, path)
-        self._createDir(path)
-        return self
-
-    def withFile(self, path, contents):
-        path = path.replace('\\', '/')
-        path = path.lstrip('/')
-        path = '/%s/%s' % (self._root, path)
-        self._createFile(path, contents)
-        return self
-
-    def withAsset(self, path, contents):
-        return self.withFile('kitchen/' + path, contents)
-
-    def withAssetDir(self, path):
-        return self.withDir('kitchen/' + path)
-
-    def withConfig(self, config):
-        return self.withFile(
-                'kitchen/config.yml',
-                yaml.dump(config))
-
-    def withThemeConfig(self, config):
-        return self.withFile(
-                'kitchen/theme/theme_config.yml',
-                yaml.dump(config))
-
-    def withPage(self, url, config=None, contents=None):
-        config = config or {}
-        contents = contents or "A test page."
-        text = "---\n"
-        text += yaml.dump(config)
-        text += "---\n"
-        text += contents
-
-        name, ext = os.path.splitext(url)
-        if not ext:
-            url += '.md'
-        url = url.lstrip('/')
-        return self.withAsset(url, text)
-
-    def withPageAsset(self, page_url, name, contents=None):
-        contents = contents or "A test asset."
-        url_base, ext = os.path.splitext(page_url)
-        dirname = url_base + '-assets'
-        return self.withAsset('%s/%s' % (dirname, name),
-                contents)
-
-    def withPages(self, num, url_factory, config_factory=None,
-                  contents_factory=None):
-        for i in range(num):
-            if isinstance(url_factory, str):
-                url = url_factory.format(idx=i, idx1=(i + 1))
-            else:
-                url = url_factory(i)
-
-            config = None
-            if config_factory:
-                config = config_factory(i)
-
-            contents = None
-            if contents_factory:
-                contents = contents_factory(i)
-
-            self.withPage(url, config, contents)
-        return self
-
-    def getStructure(self, path=None):
-        root = self._fs[self._root]
-        if path:
-            root = self._getEntry(self.path(path))
-            if root is None:
-                raise Exception("No such path: %s" % path)
-            if not isinstance(root, dict):
-                raise Exception("Path is not a directory: %s" % path)
-
-        res = {}
-        for k, v in root.items():
-            self._getStructureRecursive(v, res, k)
-        return res
-
-    def getFileEntry(self, path):
-        entry = self._getEntry(self.path(path))
-        if entry is None:
-            raise Exception("No such file: %s" % path)
-        if not isinstance(entry, _MockFsEntry):
-            raise Exception("Path is not a file: %s" % path)
-        return entry.contents
-
-    def _getStructureRecursive(self, src, target, name):
-        if isinstance(src, _MockFsEntry):
-            target[name] = src.contents
-            return
-
-        e = {}
-        for k, v in src.items():
-            self._getStructureRecursive(v, e, k)
-        target[name] = e
-
-    def _getEntry(self, path):
-        cur = self._fs
-        path = path.replace('\\', '/').lstrip('/')
-        bits = path.split('/')
-        for p in bits:
-            try:
-                cur = cur[p]
-            except KeyError:
-                return None
-        return cur
-
-    def _createDir(self, path):
-        cur = self._fs
-        path = path.replace('\\', '/').strip('/')
-        bits = path.split('/')
-        for b in bits:
-            if b not in cur:
-                cur[b] = {}
-            cur = cur[b]
-        return self
+from .tmpfs import (
+        TempDirFileSystem as mock_fs,
+        TempDirScope as mock_fs_scope)
 
-    def _createFile(self, path, contents):
-        cur = self._fs
-        path = path.replace('\\', '/').lstrip('/')
-        bits = path.split('/')
-        for b in bits[:-1]:
-            if b not in cur:
-                cur[b] = {}
-            cur = cur[b]
-        cur[bits[-1]] = _MockFsEntry(contents)
-        return self
-
-    def _deleteEntry(self, path):
-        parent = self._getEntry(os.path.dirname(path))
-        assert parent is not None
-        name = os.path.basename(path)
-        assert name in parent
-        del parent[name]
-
-
-class mock_fs_scope(object):
-    def __init__(self, fs, open_patches=None):
-        self.open_patches = open_patches or []
-        self._fs = fs
-        self._patchers = []
-        self._originals = {}
-
-    @property
-    def root(self):
-        return self._fs._root
-
-    def __enter__(self):
-        self._startMock()
-        return self
-
-    def __exit__(self, type, value, traceback):
-        self._endMock()
-
-    def _startMock(self):
-        # TODO: sadly, there seems to be no way to replace `open` everywhere?
-        modules = self.open_patches + [
-                '__main__',
-                'piecrust.records',
-                'jinja2.utils']
-        for m in modules:
-            self._createMock('%s.open' % m, open, self._open, create=True)
-
-        self._createMock('codecs.open', codecs.open, self._codecsOpen)
-        self._createMock('os.listdir', os.listdir, self._listdir)
-        self._createMock('os.makedirs', os.makedirs, self._makedirs)
-        self._createMock('os.remove', os.remove, self._remove)
-        self._createMock('os.rename', os.rename, self._rename)
-        self._createMock('os.path.exists', os.path.exists, self._exists)
-        self._createMock('os.path.isdir', os.path.isdir, self._isdir)
-        self._createMock('os.path.isfile', os.path.isfile, self._isfile)
-        self._createMock('os.path.islink', os.path.islink, self._islink)
-        self._createMock('os.path.getmtime', os.path.getmtime, self._getmtime)
-        self._createMock('shutil.copyfile', shutil.copyfile, self._copyfile)
-        self._createMock('shutil.rmtree', shutil.rmtree, self._rmtree)
-        for p in self._patchers:
-            p.start()
-
-    def _endMock(self):
-        for p in self._patchers:
-            p.stop()
-
-    def _createMock(self, name, orig, func, **kwargs):
-        self._originals[name] = orig
-        self._patchers.append(mock.patch(name, func, **kwargs))
-
-    def _doOpen(self, orig_name, path, mode, *args, **kwargs):
-        path = os.path.normpath(path)
-        if path.startswith(resources_path):
-            return self._originals[orig_name](path, mode, *args, **kwargs)
-
-        if 'r' in mode:
-            e = self._getFsEntry(path)
-        elif 'w' in mode or 'x' in mode or 'a' in mode:
-            e = self._getFsEntry(path)
-            if e is None:
-                contents = ''
-                if 'b' in mode:
-                    contents = bytes()
-                self._fs._createFile(path, contents)
-                e = self._getFsEntry(path)
-                assert e is not None
-            elif 'x' in mode:
-                err = IOError("File '%s' already exists" % path)
-                err.errno = errno.EEXIST
-                raise err
-        else:
-            err = IOError("Unsupported open mode: %s" % mode)
-            err.errno = errno.EINVAL
-            raise err
-
-        if e is None:
-            err = IOError("No such file: %s" % path)
-            err.errno = errno.ENOENT
-            raise err
-        if not isinstance(e, _MockFsEntry):
-            err = IOError("'%s' is not a file %s" % (path, e))
-            err.errno = errno.EISDIR
-            raise err
-
-        return _MockFsEntryWriter(e, mode)
-
-    def _open(self, path, mode, *args, **kwargs):
-        return self._doOpen('__main__.open', path, mode, *args, **kwargs)
-
-    def _codecsOpen(self, path, mode, *args, **kwargs):
-        return self._doOpen('codecs.open', path, mode, *args, **kwargs)
-
-    def _listdir(self, path):
-        path = os.path.normpath(path)
-        if path.startswith(resources_path):
-            return self._originals['os.listdir'](path)
-
-        e = self._getFsEntry(path)
-        if e is None:
-            raise OSError("No such directory: %s" % path)
-        if not isinstance(e, dict):
-            raise OSError("'%s' is not a directory." % path)
-        return list(e.keys())
-
-    def _makedirs(self, path, mode=0o777):
-        if not path.replace('\\', '/').startswith('/' + self.root):
-            raise Exception("Shouldn't create directory: %s" % path)
-        self._fs._createDir(path)
-
-    def _remove(self, path):
-        path = os.path.normpath(path)
-        self._fs._deleteEntry(path)
-
-    def _exists(self, path):
-        path = os.path.normpath(path)
-        if path.startswith(resources_path):
-            return self._originals['os.path.isdir'](path)
-        e = self._getFsEntry(path)
-        return e is not None
-
-    def _isdir(self, path):
-        path = os.path.normpath(path)
-        if path.startswith(resources_path):
-            return self._originals['os.path.isdir'](path)
-        e = self._getFsEntry(path)
-        return e is not None and isinstance(e, dict)
-
-    def _isfile(self, path):
-        path = os.path.normpath(path)
-        if path.startswith(resources_path):
-            return self._originals['os.path.isfile'](path)
-        e = self._getFsEntry(path)
-        return e is not None and isinstance(e, _MockFsEntry)
-
-    def _islink(self, path):
-        path = os.path.normpath(path)
-        if path.startswith(resources_path):
-            return self._originals['os.path.islink'](path)
-        return False
-
-    def _getmtime(self, path):
-        path = os.path.normpath(path)
-        if path.startswith(resources_path):
-            return self._originals['os.path.getmtime'](path)
-        e = self._getFsEntry(path)
-        if e is None:
-            raise OSError("No such file: %s" % path)
-        return e.metadata['mtime']
-
-    def _copyfile(self, src, dst):
-        src = os.path.normpath(src)
-        if src.startswith(resources_path):
-            with self._originals['__main__.open'](src, 'r') as fp:
-                src_text = fp.read()
-        else:
-            e = self._getFsEntry(src)
-            src_text = e.contents
-        if not dst.replace('\\', '/').startswith('/' + self.root):
-            raise Exception("Shouldn't copy to: %s" % dst)
-        self._fs._createFile(dst, src_text)
-
-    def _rename(self, src, dst):
-        src = os.path.normpath(src)
-        if src.startswith(resources_path) or dst.startswith(resources_path):
-            raise Exception("Shouldn't rename files in the resources path.")
-        self._copyfile(src, dst)
-        self._remove(src)
-
-    def _rmtree(self, path):
-        if not path.replace('\\', '/').startswith('/' + self.root):
-            raise Exception("Shouldn't delete trees from: %s" % path)
-        e = self._fs._getEntry(os.path.dirname(path))
-        del e[os.path.basename(path)]
-
-    def _getFsEntry(self, path):
-        return self._fs._getEntry(path)
-
--- a/tests/test_baking_baker.py	Sat May 30 15:41:52 2015 -0700
+++ b/tests/test_baking_baker.py	Fri Jun 12 17:09:19 2015 -0700
@@ -1,7 +1,8 @@
 import time
 import os.path
 import pytest
-from piecrust.baking.baker import PageBaker, Baker
+from piecrust.baking.baker import Baker
+from piecrust.baking.single import PageBaker
 from piecrust.baking.records import BakeRecord
 from .mockutil import get_mock_app, mock_fs, mock_fs_scope
 
@@ -56,6 +57,7 @@
     with mock_fs_scope(fs):
         out_dir = fs.path('kitchen/_counter')
         app = fs.getApp()
+        app.config.set('baker/workers', 1)
         baker = Baker(app, out_dir)
         baker.bake()
         structure = fs.getStructure('kitchen/_counter')
--- a/tests/test_data_assetor.py	Sat May 30 15:41:52 2015 -0700
+++ b/tests/test_data_assetor.py	Fri Jun 12 17:09:19 2015 -0700
@@ -5,34 +5,35 @@
 from .mockutil import mock_fs, mock_fs_scope
 
 
-@pytest.mark.parametrize('fs, site_root, expected', [
-        (mock_fs().withPage('pages/foo/bar'), '/', {}),
-        (mock_fs()
+@pytest.mark.parametrize('fs_fac, site_root, expected', [
+        (lambda: mock_fs().withPage('pages/foo/bar'), '/', {}),
+        (lambda: mock_fs()
             .withPage('pages/foo/bar')
             .withPageAsset('pages/foo/bar', 'one.txt', 'one'),
             '/',
             {'one': 'one'}),
-        (mock_fs()
+        (lambda: mock_fs()
             .withPage('pages/foo/bar')
             .withPageAsset('pages/foo/bar', 'one.txt', 'one')
             .withPageAsset('pages/foo/bar', 'two.txt', 'two'),
             '/',
             {'one': 'one', 'two': 'two'}),
 
-        (mock_fs().withPage('pages/foo/bar'), '/whatever', {}),
-        (mock_fs()
+        (lambda: mock_fs().withPage('pages/foo/bar'), '/whatever', {}),
+        (lambda: mock_fs()
             .withPage('pages/foo/bar')
             .withPageAsset('pages/foo/bar', 'one.txt', 'one'),
             '/whatever',
             {'one': 'one'}),
-        (mock_fs()
+        (lambda: mock_fs()
             .withPage('pages/foo/bar')
             .withPageAsset('pages/foo/bar', 'one.txt', 'one')
             .withPageAsset('pages/foo/bar', 'two.txt', 'two'),
             '/whatever',
             {'one': 'one', 'two': 'two'})
         ])
-def test_assets(fs, site_root, expected):
+def test_assets(fs_fac, site_root, expected):
+    fs = fs_fac()
     fs.withConfig({'site': {'root': site_root}})
     with mock_fs_scope(fs):
         page = MagicMock()
--- a/tests/test_data_linker.py	Sat May 30 15:41:52 2015 -0700
+++ b/tests/test_data_linker.py	Fri Jun 12 17:09:19 2015 -0700
@@ -5,17 +5,17 @@
 
 
 @pytest.mark.parametrize(
-    'fs, page_path, expected',
+    'fs_fac, page_path, expected',
     [
-        (mock_fs().withPage('pages/foo'), 'foo.md',
+        (lambda: mock_fs().withPage('pages/foo'), 'foo.md',
             # is_dir, name, is_self, data
             [(False, 'foo', True, '/foo')]),
-        ((mock_fs()
+        ((lambda: mock_fs()
                 .withPage('pages/foo')
                 .withPage('pages/bar')),
             'foo.md',
             [(False, 'bar', False, '/bar'), (False, 'foo', True, '/foo')]),
-        ((mock_fs()
+        ((lambda: mock_fs()
                 .withPage('pages/baz')
                 .withPage('pages/something')
                 .withPage('pages/something/else')
@@ -26,7 +26,7 @@
                 (False, 'baz', False, '/baz'),
                 (False, 'foo', True, '/foo'),
                 (True, 'something', False, '/something')]),
-        ((mock_fs()
+        ((lambda: mock_fs()
                 .withPage('pages/something/else')
                 .withPage('pages/foo')
                 .withPage('pages/something/good')
@@ -35,7 +35,8 @@
             [(False, 'else', True, '/something/else'),
                 (False, 'good', False, '/something/good')])
     ])
-def test_linker_iteration(fs, page_path, expected):
+def test_linker_iteration(fs_fac, page_path, expected):
+    fs = fs_fac()
     with mock_fs_scope(fs):
         app = fs.getApp()
         app.config.set('site/pretty_urls', True)
@@ -54,16 +55,16 @@
 
 
 @pytest.mark.parametrize(
-        'fs, page_path, expected',
+        'fs_fac, page_path, expected',
         [
-            (mock_fs().withPage('pages/foo'), 'foo.md',
+            (lambda: mock_fs().withPage('pages/foo'), 'foo.md',
                 [('/foo', True)]),
-            ((mock_fs()
+            ((lambda: mock_fs()
                     .withPage('pages/foo')
                     .withPage('pages/bar')),
                 'foo.md',
                 [('/bar', False), ('/foo', True)]),
-            ((mock_fs()
+            ((lambda: mock_fs()
                     .withPage('pages/baz')
                     .withPage('pages/something/else')
                     .withPage('pages/foo')
@@ -71,7 +72,7 @@
                 'foo.md',
                 [('/bar', False), ('/baz', False),
                     ('/foo', True), ('/something/else', False)]),
-            ((mock_fs()
+            ((lambda: mock_fs()
                     .withPage('pages/something/else')
                     .withPage('pages/foo')
                     .withPage('pages/something/good')
@@ -80,7 +81,8 @@
                 [('/something/else', True),
                     ('/something/good', False)])
         ])
-def test_recursive_linker_iteration(fs, page_path, expected):
+def test_recursive_linker_iteration(fs_fac, page_path, expected):
+    fs = fs_fac()
     with mock_fs_scope(fs):
         app = fs.getApp()
         app.config.set('site/pretty_urls', True)
--- a/tests/test_processing_base.py	Sat May 30 15:41:52 2015 -0700
+++ b/tests/test_processing_base.py	Fri Jun 12 17:09:19 2015 -0700
@@ -77,10 +77,12 @@
         mtime = os.path.getmtime(fs.path('/counter/blah.foo'))
         assert abs(time.time() - mtime) <= 2
 
+        time.sleep(1)
         pp.run()
         assert expected == fs.getStructure('counter')
         assert mtime == os.path.getmtime(fs.path('/counter/blah.foo'))
 
+        time.sleep(1)
         fs.withFile('kitchen/assets/blah.foo', 'A new test file.')
         pp.run()
         expected = {'blah.foo': 'A new test file.'}
@@ -101,10 +103,12 @@
         mtime = os.path.getmtime(fs.path('/counter/blah.bar'))
         assert abs(time.time() - mtime) <= 2
 
+        time.sleep(1)
         pp.run()
         assert expected == fs.getStructure('counter')
         assert mtime == os.path.getmtime(fs.path('/counter/blah.bar'))
 
+        time.sleep(1)
         fs.withFile('kitchen/assets/blah.foo', 'A new test file.')
         pp.run()
         expected = {'blah.bar': 'FOO: A new test file.'}
@@ -126,6 +130,7 @@
         pp.run()
         assert expected == fs.getStructure('counter')
 
+        time.sleep(1)
         os.remove(fs.path('/kitchen/assets/blah2.foo'))
         expected = {
                 'blah1.foo': 'A test file.'}
--- a/tests/test_sources_autoconfig.py	Sat May 30 15:41:52 2015 -0700
+++ b/tests/test_sources_autoconfig.py	Fri Jun 12 17:09:19 2015 -0700
@@ -5,49 +5,49 @@
 
 
 @pytest.mark.parametrize(
-        'fs, src_config, expected_paths, expected_metadata',
+        'fs_fac, src_config, expected_paths, expected_metadata',
         [
-            (mock_fs(), {}, [], []),
-            (mock_fs().withPage('test/_index.md'),
+            (lambda: mock_fs(), {}, [], []),
+            (lambda: mock_fs().withPage('test/_index.md'),
                 {},
                 ['_index.md'],
                 [{'slug': '', 'config': {'foo': []}}]),
-            (mock_fs().withPage('test/something.md'),
+            (lambda: mock_fs().withPage('test/something.md'),
                 {},
                 ['something.md'],
                 [{'slug': 'something', 'config': {'foo': []}}]),
-            (mock_fs().withPage('test/bar/something.md'),
+            (lambda: mock_fs().withPage('test/bar/something.md'),
                 {},
                 ['bar/something.md'],
                 [{'slug': 'something', 'config': {'foo': ['bar']}}]),
-            (mock_fs().withPage('test/bar1/bar2/something.md'),
+            (lambda: mock_fs().withPage('test/bar1/bar2/something.md'),
                 {},
                 ['bar1/bar2/something.md'],
                 [{'slug': 'something', 'config': {'foo': ['bar1', 'bar2']}}]),
 
-            (mock_fs().withPage('test/something.md'),
+            (lambda: mock_fs().withPage('test/something.md'),
                 {'collapse_single_values': True},
                 ['something.md'],
                 [{'slug': 'something', 'config': {'foo': None}}]),
-            (mock_fs().withPage('test/bar/something.md'),
+            (lambda: mock_fs().withPage('test/bar/something.md'),
                 {'collapse_single_values': True},
                 ['bar/something.md'],
                 [{'slug': 'something', 'config': {'foo': 'bar'}}]),
-            (mock_fs().withPage('test/bar1/bar2/something.md'),
+            (lambda: mock_fs().withPage('test/bar1/bar2/something.md'),
                 {'collapse_single_values': True},
                 ['bar1/bar2/something.md'],
                 [{'slug': 'something', 'config': {'foo': ['bar1', 'bar2']}}]),
 
-            (mock_fs().withPage('test/something.md'),
+            (lambda: mock_fs().withPage('test/something.md'),
                 {'only_single_values': True},
                 ['something.md'],
                 [{'slug': 'something', 'config': {'foo': None}}]),
-            (mock_fs().withPage('test/bar/something.md'),
+            (lambda: mock_fs().withPage('test/bar/something.md'),
                 {'only_single_values': True},
                 ['bar/something.md'],
                 [{'slug': 'something', 'config': {'foo': 'bar'}}]),
             ])
-def test_autoconfig_source_factories(fs, src_config, expected_paths,
+def test_autoconfig_source_factories(fs_fac, src_config, expected_paths,
                                      expected_metadata):
     site_config = {
             'sources': {
@@ -58,6 +58,7 @@
                 {'url': '/%slug%', 'source': 'test'}]
             }
     site_config['sources']['test'].update(src_config)
+    fs = fs_fac()
     fs.withConfig({'site': site_config})
     fs.withDir('kitchen/test')
     with mock_fs_scope(fs):
@@ -88,27 +89,27 @@
 
 
 @pytest.mark.parametrize(
-        'fs, expected_paths, expected_metadata',
+        'fs_fac, expected_paths, expected_metadata',
         [
-            (mock_fs(), [], []),
-            (mock_fs().withPage('test/_index.md'),
+            (lambda: mock_fs(), [], []),
+            (lambda: mock_fs().withPage('test/_index.md'),
                 ['_index.md'],
                 [{'slug': '',
                     'config': {'foo': 0, 'foo_trail': [0]}}]),
-            (mock_fs().withPage('test/something.md'),
+            (lambda: mock_fs().withPage('test/something.md'),
                 ['something.md'],
                 [{'slug': 'something',
                     'config': {'foo': 0, 'foo_trail': [0]}}]),
-            (mock_fs().withPage('test/08_something.md'),
+            (lambda: mock_fs().withPage('test/08_something.md'),
                 ['08_something.md'],
                 [{'slug': 'something',
                     'config': {'foo': 8, 'foo_trail': [8]}}]),
-            (mock_fs().withPage('test/02_there/08_something.md'),
+            (lambda: mock_fs().withPage('test/02_there/08_something.md'),
                 ['02_there/08_something.md'],
                 [{'slug': 'there/something',
                     'config': {'foo': 8, 'foo_trail': [2, 8]}}]),
             ])
-def test_ordered_source_factories(fs, expected_paths, expected_metadata):
+def test_ordered_source_factories(fs_fac, expected_paths, expected_metadata):
     site_config = {
             'sources': {
                 'test': {'type': 'ordered',
@@ -117,6 +118,7 @@
             'routes': [
                 {'url': '/%slug%', 'source': 'test'}]
             }
+    fs = fs_fac()
     fs.withConfig({'site': site_config})
     fs.withDir('kitchen/test')
     with mock_fs_scope(fs):
@@ -130,34 +132,34 @@
 
 
 @pytest.mark.parametrize(
-        'fs, route_path, expected_path, expected_metadata',
+        'fs_fac, route_path, expected_path, expected_metadata',
         [
-            (mock_fs(), 'missing', None, None),
-            (mock_fs().withPage('test/something.md'),
+            (lambda: mock_fs(), 'missing', None, None),
+            (lambda: mock_fs().withPage('test/something.md'),
                 'something', 'something.md',
                 {'slug': 'something',
                     'config': {'foo': 0, 'foo_trail': [0]}}),
-            (mock_fs().withPage('test/bar/something.md'),
+            (lambda: mock_fs().withPage('test/bar/something.md'),
                 'bar/something', 'bar/something.md',
                 {'slug': 'bar/something',
                     'config': {'foo': 0, 'foo_trail': [0]}}),
-            (mock_fs().withPage('test/42_something.md'),
+            (lambda: mock_fs().withPage('test/42_something.md'),
                 'something', '42_something.md',
                 {'slug': 'something',
                     'config': {'foo': 42, 'foo_trail': [42]}}),
-            (mock_fs().withPage('test/bar/42_something.md'),
+            (lambda: mock_fs().withPage('test/bar/42_something.md'),
                 'bar/something', 'bar/42_something.md',
                 {'slug': 'bar/something',
                     'config': {'foo': 42, 'foo_trail': [42]}}),
 
-            ((mock_fs()
+            ((lambda: mock_fs()
                 .withPage('test/42_something.md')
                 .withPage('test/43_other_something.md')),
                 'something', '42_something.md',
                 {'slug': 'something',
                     'config': {'foo': 42, 'foo_trail': [42]}}),
             ])
-def test_ordered_source_find(fs, route_path, expected_path,
+def test_ordered_source_find(fs_fac, route_path, expected_path,
                              expected_metadata):
     site_config = {
             'sources': {
@@ -167,6 +169,7 @@
             'routes': [
                 {'url': '/%slug%', 'source': 'test'}]
             }
+    fs = fs_fac()
     fs.withConfig({'site': site_config})
     fs.withDir('kitchen/test')
     with mock_fs_scope(fs):
--- a/tests/test_sources_base.py	Sat May 30 15:41:52 2015 -0700
+++ b/tests/test_sources_base.py	Fri Jun 12 17:09:19 2015 -0700
@@ -6,22 +6,23 @@
 from .pathutil import slashfix
 
 
-@pytest.mark.parametrize('fs, expected_paths, expected_slugs', [
-        (mock_fs(), [], []),
-        (mock_fs().withPage('test/foo.html'),
+@pytest.mark.parametrize('fs_fac, expected_paths, expected_slugs', [
+        (lambda: mock_fs(), [], []),
+        (lambda: mock_fs().withPage('test/foo.html'),
             ['foo.html'], ['foo']),
-        (mock_fs().withPage('test/foo.md'),
+        (lambda: mock_fs().withPage('test/foo.md'),
             ['foo.md'], ['foo']),
-        (mock_fs().withPage('test/foo.ext'),
+        (lambda: mock_fs().withPage('test/foo.ext'),
             ['foo.ext'], ['foo.ext']),
-        (mock_fs().withPage('test/foo/bar.html'),
+        (lambda: mock_fs().withPage('test/foo/bar.html'),
             ['foo/bar.html'], ['foo/bar']),
-        (mock_fs().withPage('test/foo/bar.md'),
+        (lambda: mock_fs().withPage('test/foo/bar.md'),
             ['foo/bar.md'], ['foo/bar']),
-        (mock_fs().withPage('test/foo/bar.ext'),
+        (lambda: mock_fs().withPage('test/foo/bar.ext'),
             ['foo/bar.ext'], ['foo/bar.ext']),
         ])
-def test_default_source_factories(fs, expected_paths, expected_slugs):
+def test_default_source_factories(fs_fac, expected_paths, expected_slugs):
+    fs = fs_fac()
     fs.withConfig({
         'site': {
             'sources': {
@@ -131,7 +132,7 @@
         app = fs.getApp()
         r = PageRef(app, 'whatever:doesnt_exist.md')
         with pytest.raises(Exception):
-            r.possible_rel_paths
+            r.possible_ref_specs
 
 
 def test_page_ref_with_missing_file():
@@ -139,9 +140,11 @@
     with mock_fs_scope(fs):
         app = fs.getApp()
         r = PageRef(app, 'pages:doesnt_exist.%ext%')
-        assert r.possible_rel_paths == [
-                'doesnt_exist.html', 'doesnt_exist.md', 'doesnt_exist.textile']
-        assert r.source_name == 'pages'
+        assert r.possible_ref_specs == [
+                'pages:doesnt_exist.html', 'pages:doesnt_exist.md',
+                'pages:doesnt_exist.textile']
+        with pytest.raises(PageNotFoundError):
+            r.source_name
         with pytest.raises(PageNotFoundError):
             r.rel_path
         with pytest.raises(PageNotFoundError):
--- a/tests/test_sources_posts.py	Sat May 30 15:41:52 2015 -0700
+++ b/tests/test_sources_posts.py	Fri Jun 12 17:09:19 2015 -0700
@@ -1,44 +1,45 @@
-import os
 import pytest
 from .mockutil import mock_fs, mock_fs_scope
 
 
-@pytest.mark.parametrize('fs, src_type, expected_paths, expected_metadata', [
-        (mock_fs(), 'flat', [], []),
-        (mock_fs().withPage('test/2014-01-01_foo.md'),
+@pytest.mark.parametrize('fs_fac, src_type, expected_paths, expected_metadata', [
+        (lambda: mock_fs(), 'flat', [], []),
+        (lambda: mock_fs().withPage('test/2014-01-01_foo.md'),
             'flat',
             ['2014-01-01_foo.md'],
             [(2014, 1, 1, 'foo')]),
-        (mock_fs(), 'shallow', [], []),
-        (mock_fs().withPage('test/2014/01-01_foo.md'),
+        (lambda: mock_fs(), 'shallow', [], []),
+        (lambda: mock_fs().withPage('test/2014/01-01_foo.md'),
             'shallow',
             ['2014/01-01_foo.md'],
             [(2014, 1, 1, 'foo')]),
-        (mock_fs(), 'hierarchy', [], []),
-        (mock_fs().withPage('test/2014/01/01_foo.md'),
+        (lambda: mock_fs(), 'hierarchy', [], []),
+        (lambda: mock_fs().withPage('test/2014/01/01_foo.md'),
             'hierarchy',
             ['2014/01/01_foo.md'],
             [(2014, 1, 1, 'foo')]),
         ])
-def test_post_source_factories(fs, src_type, expected_paths, expected_metadata):
-        fs.withConfig({
-            'site': {
-                'sources': {
-                    'test': {'type': 'posts/%s' % src_type}},
-                'routes': [
-                    {'url': '/%slug%', 'source': 'test'}]
-                }
-            })
-        fs.withDir('kitchen/test')
-        with mock_fs_scope(fs):
-            app = fs.getApp(cache=False)
-            s = app.getSource('test')
-            facs = list(s.buildPageFactories())
-            paths = [f.rel_path for f in facs]
-            assert paths == expected_paths
-            metadata = [
-                    (f.metadata['year'], f.metadata['month'],
-                        f.metadata['day'], f.metadata['slug'])
-                    for f in facs]
-            assert metadata == expected_metadata
+def test_post_source_factories(fs_fac, src_type, expected_paths,
+                               expected_metadata):
+    fs = fs_fac()
+    fs.withConfig({
+        'site': {
+            'sources': {
+                'test': {'type': 'posts/%s' % src_type}},
+            'routes': [
+                {'url': '/%slug%', 'source': 'test'}]
+            }
+        })
+    fs.withDir('kitchen/test')
+    with mock_fs_scope(fs):
+        app = fs.getApp(cache=False)
+        s = app.getSource('test')
+        facs = list(s.buildPageFactories())
+        paths = [f.rel_path for f in facs]
+        assert paths == expected_paths
+        metadata = [
+                (f.metadata['year'], f.metadata['month'],
+                    f.metadata['day'], f.metadata['slug'])
+                for f in facs]
+        assert metadata == expected_metadata
 
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/tests/tmpfs.py	Fri Jun 12 17:09:19 2015 -0700
@@ -0,0 +1,82 @@
+import os
+import os.path
+import shutil
+import random
+from .basefs import TestFileSystemBase
+
+
+class TempDirFileSystem(TestFileSystemBase):
+    def __init__(self, default_spec=True):
+        self._root = os.path.join(
+                os.path.dirname(__file__),
+                '__tmpfs__',
+                '%d' % random.randrange(1000))
+        self._done = False
+        if default_spec:
+            self._initDefaultSpec()
+
+    def path(self, p):
+        p = p.lstrip('/\\')
+        return os.path.join(self._root, p)
+
+    def getStructure(self, path=None):
+        path = self.path(path)
+        if not os.path.exists(path):
+            raise Exception("No such path: %s" % path)
+        if not os.path.isdir(path):
+            raise Exception("Path is not a directory: %s" % path)
+
+        res = {}
+        for item in os.listdir(path):
+            self._getStructureRecursive(res, path, item)
+        return res
+
+    def getFileEntry(self, path):
+        path = self.path(path)
+        with open(path, 'r', encoding='utf8') as fp:
+            return fp.read()
+
+    def _getStructureRecursive(self, target, parent, cur):
+        full_cur = os.path.join(parent, cur)
+        if os.path.isdir(full_cur):
+            e = {}
+            for item in os.listdir(full_cur):
+                self._getStructureRecursive(e, full_cur, item)
+            target[cur] = e
+        else:
+            with open(full_cur, 'r', encoding='utf8') as fp:
+                target[cur] = fp.read()
+
+    def _createDir(self, path):
+        if not os.path.exists(path):
+            os.makedirs(path)
+
+    def _createFile(self, path, contents):
+        dirpath = os.path.dirname(path)
+        if not os.path.exists(dirpath):
+            os.makedirs(dirpath)
+        with open(path, 'w', encoding='utf8') as fp:
+            fp.write(contents)
+
+        if not self._done:
+            import traceback
+            with open(os.path.join(self._root, 'where.txt'), 'w') as fp:
+                fp.write('\n'.join(traceback.format_stack(limit=10)))
+            self._done = True
+
+
+class TempDirScope(object):
+    def __init__(self, fs, open_patches=None):
+        self._fs = fs
+        self._open = open
+
+    @property
+    def root(self):
+        return self._fs._root
+
+    def __enter__(self):
+        return self
+
+    def __exit__(self, type, value, traceback):
+        shutil.rmtree(self.root)
+