changeset 570:7dabfdd056a1

serve: Fix corner cases where the pipeline doesn't run correctly. * When `chef serve` is run before the `assets` folder is created, monitor that folder suddenly appearing and rebuild the pipeline. * Do the same when the `config.yml` file has changed.
author Ludovic Chabant <ludovic@chabant.com>
date Sat, 31 Oct 2015 00:03:32 -0700
parents 34e57d4b97e2
children 76f0118276d7
files piecrust/processing/pipeline.py piecrust/serving/procloop.py
diffstat 2 files changed, 74 insertions(+), 29 deletions(-) [+]
line wrap: on
line diff
--- a/piecrust/processing/pipeline.py	Tue Oct 13 23:07:56 2015 -0700
+++ b/piecrust/processing/pipeline.py	Sat Oct 31 00:03:32 2015 -0700
@@ -39,10 +39,10 @@
             tmp_dir = os.path.join(tempfile.gettempdir(), 'piecrust')
         self.tmp_dir = os.path.join(tmp_dir, 'proc')
 
-        baker_params = app.config.get('baker') or {}
+        baker_params = app.config.get('baker', {})
 
-        assets_dirs = baker_params.get('assets_dirs', app.assets_dirs)
-        self.mounts = make_mount_infos(assets_dirs, self.app.root_dir)
+        mount_params = baker_params.get('assets_dirs', {})
+        self.mounts = make_mount_infos(app, mount_params)
 
         self.num_workers = baker_params.get(
                 'workers', multiprocessing.cpu_count())
@@ -193,14 +193,13 @@
         if src_dir_or_file is not None:
             # Process only the given path.
             # Find out what mount point this is in.
-            for name, info in self.mounts.items():
-                path = info['path']
+            for path, info in self.mounts.items():
                 if src_dir_or_file[:len(path)] == path:
                     base_dir = path
                     mount_info = info
                     break
             else:
-                known_roots = [i['path'] for i in self.mounts.values()]
+                known_roots = list(self.mounts.keys())
                 raise Exception("Input path '%s' is not part of any known "
                                 "mount point: %s" %
                                 (src_dir_or_file, known_roots))
@@ -215,8 +214,7 @@
 
         else:
             # Process everything.
-            for name, info in self.mounts.items():
-                path = info['path']
+            for path, info in self.mounts.items():
                 ctx = _ProcessingContext(jobs, record, path, info)
                 logger.debug("Initiating processing pipeline on: %s" % path)
                 self._processDirectory(ctx, path)
@@ -267,16 +265,23 @@
         return pool
 
 
-def make_mount_infos(mounts, root_dir):
-    if isinstance(mounts, list):
-        mounts = {m: {} for m in mounts}
+def make_mount_infos(app, mount_params):
+    mounts = {d: {} for d in app.assets_dirs}
+
+    for name, cfg in mount_params.items():
+        mdir = os.path.join(app.root_dir, name)
+        mounts[mdir] = cfg
 
-    for name, info in mounts.items():
-        if not isinstance(info, dict):
-            raise Exception("Asset directory info for '%s' is not a "
-                            "dictionary." % name)
+    for mdir, info in mounts.items():
+        mname = os.path.basename(mdir)
+        info_from_config = mount_params.get(mname)
+        if info_from_config is not None:
+            if not isinstance(info, dict):
+                raise Exception("Asset directory info for '%s' is not a "
+                                "dictionary." % mname)
+            info.update(info_from_config)
         info.setdefault('processors', 'all -uglifyjs -cleancss')
-        info['path'] = os.path.join(root_dir, name)
+        info['name'] = mname
 
     return mounts
 
--- a/piecrust/serving/procloop.py	Tue Oct 13 23:07:56 2015 -0700
+++ b/piecrust/serving/procloop.py	Sat Oct 31 00:03:32 2015 -0700
@@ -77,16 +77,20 @@
     def __init__(self, root_dir, out_dir, sub_cache_dir=None, debug=False):
         super(ProcessingLoop, self).__init__(
                 name='pipeline-reloader', daemon=True)
-        # TODO: re-create the app when `config.yml` is changed.
-        self.app = PieCrust(root_dir=root_dir, debug=debug)
-        if sub_cache_dir:
-            self.app._useSubCacheDir(sub_cache_dir)
-        self.pipeline = ProcessorPipeline(self.app, out_dir)
+        self.root_dir = root_dir
+        self.out_dir = out_dir
+        self.sub_cache_dir = sub_cache_dir
+        self.debug = debug
         self.last_status_id = 0
         self.interval = 1
+        self.app = None
+        self._roots = []
+        self._monitor_assets_root = False
         self._paths = set()
+        self._config_path = os.path.join(root_dir, 'config.yml')
         self._record = None
         self._last_bake = 0
+        self._last_config_mtime = 0
         self._obs = []
         self._obs_lock = threading.Lock()
 
@@ -99,18 +103,32 @@
             self._obs.remove(obs)
 
     def run(self):
-        # Build the first list of known files and run the pipeline once.
-        roots = [os.path.join(self.app.root_dir, r)
-                 for r in self.pipeline.mounts.keys()]
-        for root in roots:
-            for dirpath, dirnames, filenames in os.walk(root):
-                self._paths |= set([os.path.join(dirpath, f)
-                                    for f in filenames])
+        self._initPipeline()
+
         self._last_bake = time.time()
+        self._last_config_mtime = os.path.getmtime(self._config_path)
         self._record = self.pipeline.run()
 
         while True:
-            for root in roots:
+            cur_config_time = os.path.getmtime(self._config_path)
+            if self._last_config_mtime < cur_config_time:
+                logger.info("Site configuration changed, reloading pipeline.")
+                self._last_config_mtime = cur_config_time
+                self._initPipeline()
+                for root in self._roots:
+                    self._runPipeline(root)
+                continue
+
+            if self._monitor_assets_root:
+                assets_dir = os.path.join(self.app.root_dir, 'assets')
+                if os.path.isdir(assets_dir):
+                    logger.info("Assets directory was created, reloading "
+                                "pipeline.")
+                    self._initPipeline()
+                    self._runPipeline(assets_dir)
+                    continue
+
+            for root in self._roots:
                 # For each mount root we try to find the first new or
                 # modified file. If any, we just run the pipeline on
                 # that mount.
@@ -136,6 +154,28 @@
 
             time.sleep(self.interval)
 
+    def _initPipeline(self):
+        # Create the app and pipeline.
+        self.app = PieCrust(root_dir=self.root_dir, debug=self.debug)
+        if self.sub_cache_dir:
+            self.app._useSubCacheDir(self.sub_cache_dir)
+        self.pipeline = ProcessorPipeline(self.app, self.out_dir)
+
+        # Get the list of assets directories.
+        self._roots = list(self.pipeline.mounts.keys())
+
+        # The 'assets' folder may not be in the mounts list if it doesn't
+        # exist yet, but we want to monitor for when the user creates it.
+        default_root = os.path.join(self.app.root_dir, 'assets')
+        self._monitor_assets_root = (default_root not in self._roots)
+
+        # Build the list of initial asset files.
+        self._paths = set()
+        for root in self._roots:
+            for dirpath, dirnames, filenames in os.walk(root):
+                self._paths |= set([os.path.join(dirpath, f)
+                                    for f in filenames])
+
     def _runPipeline(self, root):
         self._last_bake = time.time()
         try: