changeset 303:6bd9d44fc535

Wiki updater cleanup and improvements. * Less confusing API on `WikiParameters`. * Less confusing way to do things when running the Flask app. * When running locally, and the user edits a page, just resolve that page and mark any other including/querying page invalidated. Resolve those lazily on demand. * Add DB ability to return the resolved state of pages.
author Ludovic Chabant <ludovic@chabant.com>
date Wed, 01 Oct 2014 08:19:03 -0700
parents 15d317805c58
children 11d9d8ae8696
files wikked/commands/web.py wikked/db/base.py wikked/db/sql.py wikked/page.py wikked/views/__init__.py wikked/web.py wikked/wiki.py
diffstat 7 files changed, 53 insertions(+), 54 deletions(-) [+]
line wrap: on
line diff
--- a/wikked/commands/web.py	Mon Sep 29 13:52:08 2014 -0700
+++ b/wikked/commands/web.py	Wed Oct 01 08:19:03 2014 -0700
@@ -8,6 +8,10 @@
 logger = logging.getLogger(__name__)
 
 
+def autoreload_wiki_updater(wiki, url):
+    wiki.db.uncachePages(except_url=url, only_required=True)
+
+
 @register_command
 class RunServerCommand(WikkedCommand):
     def __init__(self):
@@ -74,8 +78,9 @@
             app.config['DEV_ASSETS'] = True
         if not ctx.args.noupdate:
             app.config['WIKI_AUTO_RELOAD'] = True
+            ctx.params.wiki_updater = autoreload_wiki_updater
 
-        app.set_wiki_params(ctx.params)
+        app.wiki_params = ctx.params
         if bool(app.config.get('UPDATE_WIKI_ON_START')):
             ctx.wiki.updateAll()
 
--- a/wikked/db/base.py	Mon Sep 29 13:52:08 2014 -0700
+++ b/wikked/db/base.py	Wed Oct 01 08:19:03 2014 -0700
@@ -4,9 +4,6 @@
 class Database(object):
     """ The base class for a database cache.
     """
-    def __init__(self):
-        pass
-
     def start(self, wiki):
         pass
 
@@ -19,7 +16,7 @@
     def close(self, commit, exception):
         pass
 
-    def reset(self, page_infos, page_factory):
+    def reset(self, page_infos):
         pass
 
     def updatePage(self, page_info):
@@ -53,6 +50,9 @@
     def cachePage(self, page):
         pass
 
+    def uncachePages(self, except_url=None, only_required=False):
+        pass
+
     def pageExists(self, url=None, path=None):
         raise NotImplementedError()
 
--- a/wikked/db/sql.py	Mon Sep 29 13:52:08 2014 -0700
+++ b/wikked/db/sql.py	Wed Oct 01 08:19:03 2014 -0700
@@ -211,11 +211,11 @@
                 self._session.commit()
             self._session.remove()
 
-    def reset(self, page_infos, page_factory):
+    def reset(self, page_infos):
         logger.debug("Re-creating SQL database.")
         self._createSchema()
         for pi in page_infos:
-            page = page_factory(pi)
+            page = FileSystemPage(self.wiki, pi)
             self._addPage(page)
         self.session.commit()
 
@@ -237,17 +237,7 @@
             self.session.commit()
 
         page = FileSystemPage(self.wiki, page_info)
-        added_p = self._addPage(page)
-        self.session.commit()
-
-        # Invalidate all the appropriate pages.
-        q = self.session.query(SQLPage)\
-                .options(load_only('id', 'needs_invalidate', 'is_ready'))\
-                .filter(SQLPage.needs_invalidate is True)
-        for p in q.all():
-            if p.id == added_p.id:
-                continue
-            p.is_ready = False
+        self._addPage(page)
         self.session.commit()
 
     def updateAll(self, page_infos, force=False):
@@ -370,6 +360,18 @@
 
         self.session.commit()
 
+    def uncachePages(self, except_url=None, only_required=False):
+        q = self.session.query(SQLPage)\
+                .options(load_only('id', 'url', 'needs_invalidate', 'is_ready'))
+        if except_url:
+            q = q.filter(SQLPage.url != except_url)
+        if only_required:
+            q = q.filter(SQLPage.needs_invalidate == True)
+
+        for p in q.all():
+            p.is_ready = False
+        self.session.commit()
+
     def pageExists(self, url=None, path=None):
         q = self.session.query(SQLPage.id, SQLPage.url).filter_by(url=url)
         res = self.session.query(q.exists())
@@ -410,7 +412,8 @@
                 'local_links': 'links',
                 'meta': 'ready_meta',
                 'links': 'ready_links',
-                'text': 'ready_text'}
+                'text': 'ready_text',
+                'is_resolved': 'is_ready'}
         subqueryfields = {
                 'local_meta': SQLPage.meta,
                 'local_links': SQLPage.links,
@@ -481,6 +484,8 @@
             data.path = db_obj.path
         if fields is None or 'cache_time' in fields:
             data.cache_time = db_obj.cache_time
+        if fields is None or 'is_resolved' in fields:
+            data.is_resolved = db_obj.is_ready
         if fields is None or 'title' in fields:
             data.title = db_obj.title
         if fields is None or 'raw_text' in fields:
--- a/wikked/page.py	Mon Sep 29 13:52:08 2014 -0700
+++ b/wikked/page.py	Wed Oct 01 08:19:03 2014 -0700
@@ -62,6 +62,10 @@
         return self._data.cache_time
 
     @property
+    def is_resolved(self):
+        return self._data.is_resolved
+
+    @property
     def extension(self):
         if self._data.path is None:
             raise Exception("The 'path' field was not loaded.")
--- a/wikked/views/__init__.py	Mon Sep 29 13:52:08 2014 -0700
+++ b/wikked/views/__init__.py	Wed Oct 01 08:19:03 2014 -0700
@@ -44,6 +44,8 @@
             fields.append('path')
         if 'cache_time' not in fields:
             fields.append('cache_time')
+        if 'is_resolved' not in fields:
+            fields.append('is_resolved')
 
     try:
         page = g.wiki.getPage(url, fields=fields)
@@ -57,6 +59,12 @@
             app.logger.info("Page '%s' has changed, reloading." % url)
             g.wiki.updatePage(path=page.path)
             page = g.wiki.getPage(url, fields=fields)
+        elif not page.is_resolved:
+            app.logger.info("Page '%s' was not resolved, resolving now." % url)
+            g.wiki.resolve(only_urls=[url])
+            g.wiki.index.updatePage(g.wiki.db.getPage(
+                url, fields=['url', 'path', 'title', 'text']))
+            page = g.wiki.getPage(url, fields=fields)
 
     if check_perms == CHECK_FOR_READ and not is_page_readable(page):
         abort(401)
--- a/wikked/web.py	Mon Sep 29 13:52:08 2014 -0700
+++ b/wikked/web.py	Wed Oct 01 08:19:03 2014 -0700
@@ -69,14 +69,8 @@
 app.logger.debug("Creating Flask application...")
 
 
-def set_app_wiki_params(params):
-    app.wiki_params = params
-    if app.wiki_updater is not None:
-        app.wiki_params.wiki_updater = app.wiki_updater
-
-app.set_wiki_params = set_app_wiki_params
-app.wiki_updater = None
-app.set_wiki_params(WikiParameters(wiki_root))
+# Set the default wiki parameters.
+app.wiki_params = WikiParameters(wiki_root)
 
 
 # Set the wiki as a request global, and open/close the database.
@@ -143,7 +137,7 @@
 
     # Configure Celery.
     app.config['BROKER_URL'] = app.config['BROKER_URL'] % (
-        { 'root': wiki_root })
+            {'root': wiki_root})
     celery_app.conf.update(app.config)
     app.logger.debug("Using Celery broker: %s" % app.config['BROKER_URL'])
 
@@ -151,5 +145,5 @@
     def async_updater(wiki):
         app.logger.debug("Running update task on Celery.")
         update_wiki.delay(wiki.root)
-    app.wiki_updater = async_updater
+    app.wiki_params.wiki_updater = async_updater
 
--- a/wikked/wiki.py	Mon Sep 29 13:52:08 2014 -0700
+++ b/wikked/wiki.py	Wed Oct 01 08:19:03 2014 -0700
@@ -30,6 +30,10 @@
 BACKGROUND_CONTEXT = 2
 
 
+def synchronous_wiki_updater(wiki, url):
+    wiki.updateAll()
+
+
 class WikiParameters(object):
     """ An object that defines how a wiki gets initialized.
     """
@@ -39,7 +43,7 @@
         self.root = root
         self.context = ctx
         self.formatters = self.getFormatters()
-        self.wiki_updater = self.getWikiUpdater()
+        self.wiki_updater = synchronous_wiki_updater
         self._config = None
         self._index_factory = None
         self._scm_factory = None
@@ -95,9 +99,6 @@
         except ImportError:
             pass
 
-    def getWikiUpdater(self):
-        return lambda wiki: wiki.updateAll()
-
     def _loadConfig(self):
         # Merge the default settings with any settings provided by
         # the local config file(s).
@@ -245,8 +246,7 @@
         """
         logger.info("Resetting wiki data...")
         page_infos = self.fs.getPageInfos()
-        factory = lambda pi: FileSystemPage(self, pi)
-        self.db.reset(page_infos, factory)
+        self.db.reset(page_infos)
         self.resolve(force=True)
         self.index.reset(self.getPages())
 
@@ -342,7 +342,7 @@
         self.updatePage(path=page_info.path)
 
         # Update all the other pages.
-        self._wiki_updater(self)
+        self._wiki_updater(self, url)
 
     def revertPage(self, url, page_fields):
         """ Reverts the page with the given URL to an older revision.
@@ -375,7 +375,7 @@
         self.updatePage(url)
 
         # Update all the other pages.
-        self._wiki_updater(self)
+        self._wiki_updater(self, url)
 
     def pageExists(self, url):
         """ Returns whether a page exists at the given URL.
@@ -402,23 +402,6 @@
             endpoints[ep.name] = ep
         return endpoints
 
-    def _setupPostSetPageUpdater(self, async):
-        if async:
-            logger.debug("Setting up asynchronous updater.")
-            from tasks import update_wiki
-            self._postSetPageUpdate = lambda wiki: update_wiki.delay(self.root)
-        else:
-            logger.debug("Setting up simple updater.")
-            self._postSetPageUpdate = lambda wiki: wiki._simplePostSetPageUpdate()
-
-    def _simpleWikiUpdater(self):
-        page_urls = self.db.getPageUrls(uncached_only=True)
-        self.resolve(only_urls=page_urls)
-        pages = [self.db.getPage(url=pu,
-                                 fields=['url', 'path', 'title', 'text'])
-                 for pu in page_urls]
-        self.index.updateAll(pages)
-
 
 def reloader_stat_loop(wiki, interval=1):
     mtimes = {}