changeset 390:5855c42752be

code: Fix PEP8 and other cosmetic errors.
author Ludovic Chabant <ludovic@chabant.com>
date Wed, 14 Oct 2015 20:50:42 -0700
parents 08b831d894c8
children 2156d47b577f
files wikked/db/base.py wikked/db/sql.py wikked/web.py wikked/wiki.py
diffstat 4 files changed, 49 insertions(+), 38 deletions(-) [+]
line wrap: on
line diff
--- a/wikked/db/base.py	Tue Oct 13 23:56:21 2015 -0700
+++ b/wikked/db/base.py	Wed Oct 14 20:50:42 2015 -0700
@@ -3,7 +3,8 @@
 
 class PageListNotFound(Exception):
     def __init__(self, list_name):
-        super(PageListNotFound, self).__init__("No such page list: %s" % list_name)
+        super(PageListNotFound, self).__init__(
+                "No such page list: %s" % list_name)
 
 
 class Database(object):
--- a/wikked/db/sql.py	Tue Oct 13 23:56:21 2015 -0700
+++ b/wikked/db/sql.py	Wed Oct 14 20:50:42 2015 -0700
@@ -1,6 +1,5 @@
 import os
 import os.path
-import types
 import string
 import logging
 import datetime
@@ -12,7 +11,7 @@
     String, Text, UnicodeText)
 from sqlalchemy.ext.declarative import declarative_base
 from sqlalchemy.orm import (
-    scoped_session, sessionmaker,
+    scoped_session,
     relationship, backref, load_only, subqueryload, joinedload,
     Load)
 from sqlalchemy.orm.exc import NoResultFound
@@ -353,11 +352,12 @@
         logger.debug("Updating SQL database for page: %s" % page_info.url)
 
         db_page = self.session.query(SQLPage).\
-                options(load_only('id', 'url')).\
-                filter(SQLPage.url == page_info.url).\
-                first()
+            options(load_only('id', 'url')).\
+            filter(SQLPage.url == page_info.url).\
+            first()
         if db_page:
-            logger.debug("Removing page '%s' [%d] from SQL database." %
+            logger.debug(
+                    "Removing page '%s' [%d] from SQL database." %
                     (db_page.url, db_page.id))
             self.session.delete(db_page)
             self.session.commit()
@@ -395,7 +395,7 @@
                     to_update.add(p.path)
         for p in to_remove:
             logger.debug("Removing page '%s' [%d] from SQL database." %
-                (p.url, p.id))
+                         (p.url, p.id))
             self.session.delete(p)
 
         self.session.commit()
@@ -418,7 +418,7 @@
             subdir = string.rstrip(subdir, '/') + '/%'
             q = q.filter(SQLPage.url.like(subdir))
         if uncached_only:
-            q = q.filter(SQLPage.is_ready == False)
+            q = q.filter(SQLPage.is_ready is False)
         for p in q.all():
             yield p.url
 
@@ -430,7 +430,7 @@
             for name, values in meta_query.items():
                 for v in values:
                     q = q.filter(and_(SQLReadyMeta.name == name,
-                        SQLReadyMeta.value == v))
+                                      SQLReadyMeta.value == v))
         if subdir:
             subdir = string.rstrip(subdir, '/') + '/%'
             q = q.filter(SQLPage.url.like(subdir))
@@ -439,16 +439,19 @@
         if endpoint_only:
             q = q.filter(SQLPage.endpoint == endpoint_only)
         elif no_endpoint_only:
-            q = q.filter(SQLPage.endpoint == None)
+            q = q.filter(SQLPage.endpoint is None)
         q = self._addFieldOptions(q, fields)
         for p in q.all():
             yield SQLDatabasePage(self, p, fields)
 
     def cachePage(self, page):
         if not hasattr(page, '_id') or not page._id:
-            raise Exception("Given page '%s' has no `_id` attribute set." % page.url)
+            raise Exception("Given page '%s' has no `_id` attribute set." %
+                            page.url)
 
-        logger.debug("Caching extended data for page '%s' [%d]." % (page.url, page._id))
+        logger.debug(
+                "Caching extended data for page '%s' [%d]." %
+                (page.url, page._id))
 
         try:
             db_obj = self.session.query(SQLPage).\
@@ -488,11 +491,12 @@
 
     def uncachePages(self, except_url=None, only_required=False):
         q = self.session.query(SQLPage)\
-                .options(load_only('id', 'url', 'needs_invalidate', 'is_ready'))
+                .options(load_only('id', 'url', 'needs_invalidate',
+                                   'is_ready'))
         if except_url:
             q = q.filter(SQLPage.url != except_url)
         if only_required:
-            q = q.filter(SQLPage.needs_invalidate == True)
+            q = q.filter(SQLPage.needs_invalidate is True)
 
         for p in q.all():
             p.is_ready = False
@@ -530,7 +534,7 @@
         return SQLDatabasePage(self, page, fields)
 
     def _addFieldOptions(self, query, fields, use_joined=True,
-            use_load_obj=False):
+                         use_load_obj=False):
         if fields is None:
             return query
 
@@ -608,11 +612,12 @@
             # We may have a previous list marked as non-valid. Let's
             # revive it.
             if page_list.is_valid:
-                raise Exception("Page list already exists and is valid: %s" % list_name)
+                raise Exception("Page list already exists and is valid: %s" %
+                                list_name)
             logger.debug("Reviving page list '%s'." % list_name)
             self.session.query(SQLPageListItem)\
-                    .filter(SQLPageListItem.list_id == page_list.id)\
-                    .delete()
+                .filter(SQLPageListItem.list_id == page_list.id)\
+                .delete()
             page_list.is_valid = True
         else:
             logger.debug("Creating page list '%s'." % list_name)
--- a/wikked/web.py	Tue Oct 13 23:56:21 2015 -0700
+++ b/wikked/web.py	Wed Oct 14 20:50:42 2015 -0700
@@ -164,19 +164,18 @@
 
 
 # Import the views.
-# (this creates a PyLint warning but it's OK)
-# pylint: disable=unused-import
-import wikked.api.admin
-import wikked.api.edit
-import wikked.api.history
-import wikked.api.read
-import wikked.api.special
-import wikked.views.admin
-import wikked.views.edit
-import wikked.views.error
-import wikked.views.history
-import wikked.views.read
-import wikked.views.special
+# (this creates a PyFlakes warning but it's OK)
+import wikked.api.admin       # NOQA
+import wikked.api.edit        # NOQA
+import wikked.api.history     # NOQA
+import wikked.api.read        # NOQA
+import wikked.api.special     # NOQA
+import wikked.views.admin     # NOQA
+import wikked.views.edit      # NOQA
+import wikked.views.error     # NOQA
+import wikked.views.history   # NOQA
+import wikked.views.read      # NOQA
+import wikked.views.special   # NOQA
 
 
 # Async wiki update.
@@ -209,14 +208,17 @@
     username = app.config['INFLUXDB_USERNAME']
     password = app.config['INFLUXDB_PASSWORD']
     database = app.config['INFLUXDB_DATABASE']
-    metrics_db = influxdb.InfluxDBClient(host, port, username, password, database)
+    metrics_db = influxdb.InfluxDBClient(host, port, username, password,
+                                         database)
     app.logger.info("Opening InfluxDB %s on %s:%s as %s." % (
         database, host, port, username))
 
     import time
-    from flask import g, request, request_started, request_tearing_down
+    from flask import request, request_started, request_tearing_down
+
     def on_request_started(sender, **extra):
         g.metrics_start_time = time.clock()
+
     def on_request_tearing_down(sender, **extra):
         duration = time.clock() - g.metrics_start_time
         data = [
--- a/wikked/wiki.py	Tue Oct 13 23:56:21 2015 -0700
+++ b/wikked/wiki.py	Wed Oct 14 20:50:42 2015 -0700
@@ -5,7 +5,6 @@
 import importlib
 import multiprocessing
 from configparser import SafeConfigParser, NoOptionError
-from wikked.page import FileSystemPage
 from wikked.fs import FileSystem
 from wikked.auth import UserManager
 from wikked.scheduler import ResolveScheduler
@@ -155,7 +154,8 @@
 
             if scm_type == 'hg':
                 def impl():
-                    from wikked.scm.mercurial import MercurialCommandServerSourceControl
+                    from wikked.scm.mercurial import \
+                            MercurialCommandServerSourceControl
                     return MercurialCommandServerSourceControl(self.root)
                 self._scm_factory = impl
 
@@ -307,8 +307,11 @@
                  endpoint_only=None, no_endpoint_only=False, fields=None):
         """ Gets all the pages in the wiki, or in the given sub-directory.
         """
-        for page in self.db.getPages(subdir=subdir, meta_query=meta_query,
-                endpoint_only=endpoint_only, no_endpoint_only=no_endpoint_only,
+        for page in self.db.getPages(
+                subdir=subdir,
+                meta_query=meta_query,
+                endpoint_only=endpoint_only,
+                no_endpoint_only=no_endpoint_only,
                 fields=fields):
             yield page