Mercurial > piecrust2
view piecrust/publishing/copy.py @ 1136:5f97b5b59dfe
bake: Optimize cache handling for the baking process.
- Get rid of the 2-level pipeline runs... handle a single set of passes.
- Go back to load/render segments/layout passes for pages.
- Add descriptions of what each job batch does.
- Improve the taxonomy pipeline so it doesn't re-bake terms that don't need
to be re-baked.
- Simplify some of the code.
author | Ludovic Chabant <ludovic@chabant.com> |
---|---|
date | Mon, 23 Apr 2018 21:47:49 -0700 |
parents | 13e8b50a2113 |
children |
line wrap: on
line source
import os import os.path import shutil import logging from piecrust.publishing.base import Publisher logger = logging.getLogger(__name__) class CopyPublisher(Publisher): PUBLISHER_NAME = 'copy' PUBLISHER_SCHEME = 'file' def parseUrlTarget(self, url): self.config = {'output': (url.netloc + url.path)} def run(self, ctx): dest = self.config.get('output') if ctx.was_baked: to_upload = list(self.getBakedFiles(ctx)) to_delete = list(self.getDeletedFiles(ctx)) if to_upload or to_delete: logger.info("Copying new/changed files...") for path in to_upload: rel_path = os.path.relpath(path, ctx.bake_out_dir) dest_path = os.path.join(dest, rel_path) dest_dir = os.path.dirname(dest_path) os.makedirs(dest_dir, exist_ok=True) try: dest_mtime = os.path.getmtime(dest_path) except OSError: dest_mtime = 0 if os.path.getmtime(path) >= dest_mtime: logger.info(rel_path) if not ctx.preview: shutil.copyfile(path, dest_path) logger.info("Deleting removed files...") for path in self.getDeletedFiles(ctx): rel_path = os.path.relpath(path, ctx.bake_out_dir) logger.info("%s [DELETE]" % rel_path) if not ctx.preview: try: os.remove(path) except OSError: pass else: logger.info("Nothing to copy to the output folder.")