Mercurial > wikked
changeset 49:fb6ae96756c1
Added unit tests.
Refactored core APIs to make them more testable.
Removed unused stuff like caching the configuration in the SQL database.
Fixed the web bootstrap.
Some cosmetic changes to be PEP8 compliant.
author | Ludovic Chabant <ludovic@chabant.com> |
---|---|
date | Mon, 28 Jan 2013 23:13:04 -0800 |
parents | 9658edea3121 |
children | 350f7f084028 |
files | tests/__init__.py tests/mock.py tests/test_db.py wikked/db.py wikked/fs.py wikked/indexer.py wikked/scm.py wikked/web.py wikked/wiki.py |
diffstat | 9 files changed, 504 insertions(+), 169 deletions(-) [+] |
line wrap: on
line diff
--- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/tests/__init__.py Mon Jan 28 23:13:04 2013 -0800 @@ -0,0 +1,32 @@ +import os +import os.path +import shutil +import unittest +from wikked.wiki import Wiki +from mock import MockWikiParameters + + +class WikkedTest(unittest.TestCase): + def setUp(self): + self.root = os.path.join( + os.path.dirname(os.path.dirname(__file__)), + 'test_data') + + def tearDown(self): + if hasattr(self, 'root') and os.path.isdir(self.root): + shutil.rmtree(self.root) + + def getWiki(self, **kwargs): + parameters = self.getParameters() + for key in kwargs: + setattr(parameters, key, kwargs[key]) + wiki = Wiki(parameters) + return wiki + + def getStartedWiki(self, **kwargs): + wiki = self.getWiki(**kwargs) + wiki.start() + return wiki + + def getParameters(self): + return MockWikiParameters()
--- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/tests/mock.py Mon Jan 28 23:13:04 2013 -0800 @@ -0,0 +1,238 @@ +import re +import os.path +import types +import codecs +import logging +import StringIO +from wikked.page import Page +from wikked.fs import PageNotFoundError +from wikked.db import Database +from wikked.indexer import WikiIndex +from wikked.scm import SourceControl + + +class MockWikiParameters(object): + def __init__(self): + self.formatters = { + self._passthrough: ['txt', 'html'] + } + + self.config_text = "" + self.special_filenames = [] + + self.logger_factory = lambda: logging.getLogger('wikked.tests') + self.page_factory = lambda wiki, url: MockPage(wiki, url) + self.config_factory = lambda: StringIO.StringIO(self.config_text) + self.fs_factory = lambda cfg: MockFileSystem() + self.index_factory = lambda cfg: MockWikiIndex() + self.db_factory = lambda cfg: MockDatabase() + self.scm_factory = lambda cfg: MockSourceControl() + + def getSpecialFilenames(self): + return self.special_filenames + + def _passthrough(self, text): + return text + + +class MockPage(Page): + def __init__(self, wiki, url): + Page.__init__(self, wiki, url) + + +class MockDatabase(Database): + def __init__(self, content=None, logger=None): + Database.__init__(self, logger) + self.content = content + self._open_count = 0 + + def initDb(self): + pass + + def open(self): + self._open_count += 1 + + def close(self): + self._open_count -= 1 + if self._open_count < 0: + raise Exception( + "The database was closed more times than it was open.") + + def reset(self, pages): + pass + + def update(self, pages): + pass + + def getPageUrls(self, subdir=None): + return [] + + def getPages(self, subdir=None): + return [] + + def getPage(self, url): + return None + + def pageExists(self, url): + return False + + def getLinksTo(self, url): + return [] + + +class MockFileSystem(): + def __init__(self, structure=None, slugify=Page.title_to_url, logger=None): + if not structure: + structure = [] + if not slugify: + slugify = lambda x: x + self.structure = structure + self.slugify = slugify + self.logger = logger + self.excluded = [] + + def getPageInfos(self, subdir=None): + node = self._getNode(subdir) + for n in self._getChildren(node): + yield self._getPageInfo(n) + + def getPageInfo(self, path): + node = self._getNode(path) + return self._getPageInfo(node) + + def getPage(self, url): + path = self._getPath(url, True) + node = self._getNode(path) + return self._getPageInfo(node, True) + + def setPage(self, path, content): + pass + + def pageExists(self, url): + return False + + def getPhysicalNamespacePath(self, url): + return None + + def _getPageInfo(self, node, with_content=False): + path_split = os.path.splitext(node['path']) + url = self.slugify(path_split[0]) + info = { + 'url': url, + 'path': node['path'] + } + if with_content: + info['content'] = node['content'] + return info + + def _getNode(self, path): + node = self.structure + if path: + for n in path.split('/'): + node = node[n] + else: + path = '' + if isinstance(node, types.StringTypes): + return {'type': 'file', 'path': path, 'content': node} + return {'type': 'dir', 'path': path, 'content': node} + + def _getChildren(self, node): + if node['type'] != 'dir': + raise Exception("'%s' is not a directory." % node['path']) + for name in node['content']: + child_path = os.path.join(node['path'], name) + child = node['content'][name] + if isinstance(child, types.StringTypes): + yield { + 'type': 'file', + 'path': child_path, + 'content': child + } + else: + for c in self._getChildren({ + 'type': 'dir', + 'path': child_path, + 'content': child + }): + yield c + + def _getPath(self, url, is_file): + path = '' + current = self.structure + parts = unicode(url).lower().split('/') + for i, part in enumerate(parts): + for name in current: + name_slug = self.slugify(name) + if is_file and i == len(parts) - 1: + if re.match(r"%s\.[a-z]+" % re.escape(part), name_slug): + current = current[name] + path = os.path.join(path, name) + break + else: + if name_slug == part: + current = current[name] + path = os.path.join(path, name) + break + else: + # Failed to find a part of the URL. + raise PageNotFoundError("No such page: " + url) + return path + + @staticmethod + def save_structure(path, structure): + if not os.path.isdir(path): + os.makedirs(path) + for node in structure: + node_path = os.path.join(path, node) + if isinstance(structure[node], types.StringTypes): + with codecs.open(node_path, 'w', encoding='utf-8') as f: + f.write(structure[node]) + else: + MockFileSystem.save_structure(node_path, structure[node]) + + +class MockWikiIndex(WikiIndex): + def __init__(self, logger=None): + WikiIndex.__init__(self, logger) + + def initIndex(self): + pass + + def reset(self, pages): + pass + + def update(self, pages): + pass + + def search(self, query): + # url, title, content_highlights + return None + + +class MockSourceControl(SourceControl): + def __init__(self, logger=None): + SourceControl.__init__(self, logger) + + def initRepo(self): + pass + + def getSpecialFilenames(self): + return [] + + def getHistory(self, path=None): + return [] + + def getState(self, path): + raise NotImplementedError() + + def getRevision(self, path, rev): + raise NotImplementedError() + + def diff(self, path, rev1, rev2): + raise NotImplementedError() + + def commit(self, paths, op_meta): + raise NotImplementedError() + + def revert(self, paths=None): + raise NotImplementedError()
--- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/tests/test_db.py Mon Jan 28 23:13:04 2013 -0800 @@ -0,0 +1,42 @@ +import os.path +from tests import WikkedTest +from mock import MockFileSystem +from wikked.fs import FileSystem +from wikked.db import SQLiteDatabase + + +class DatabaseTest(WikkedTest): + def tearDown(self): + if hasattr(self, 'wiki') and self.wiki: + self.wiki.db.close() + WikkedTest.tearDown(self) + + def testEmpty(self): + self.wiki = self._getWikiFromStructure({}) + self.assertEqual([], list(self.wiki.getPageUrls())) + + def testOnePage(self): + self.wiki = self._getWikiFromStructure({ + 'foo.txt': 'A test page.' + }) + self.assertEqual(['foo'], list(self.wiki.getPageUrls())) + page = self.wiki.getPage('foo') + self.assertEqual('foo', page.url) + self.assertEqual(os.path.join(self.root, 'foo.txt'), page.path) + self.assertEqual('A test page.', page.raw_text) + + def _getWikiFromStructure(self, structure): + MockFileSystem.save_structure(self.root, structure) + wiki = self.getWiki( + db_factory=self._dbFactory, + fs_factory=self._fsFactory + ) + wiki.db.open() + wiki.start() + return wiki + + def _fsFactory(self, config): + return FileSystem(self.root) + + def _dbFactory(self, config): + return SQLiteDatabase(':memory:')
--- a/wikked/db.py Sat Jan 26 22:17:51 2013 -0800 +++ b/wikked/db.py Mon Jan 28 23:13:04 2013 -0800 @@ -28,9 +28,7 @@ class Database(object): """ The base class for a database cache. """ - def __init__(self, wiki, logger=None): - self.wiki = wiki - + def __init__(self, logger=None): if logger is None: logger = logging.getLogger('wikked.db') self.logger = logger @@ -65,60 +63,46 @@ def getLinksTo(self, url): raise NotImplementedError() - def getConfigValues(self, section): - raise NotImplementedError() - - def getConfigValue(self, section, name): - raise NotImplementedError() - class SQLiteDatabase(Database): """ A database cache based on SQLite. """ schema_version = 1 - def __init__(self, wiki, logger=None): - Database.__init__(self, wiki, logger) - self.db_path = os.path.join(wiki.root, '.wiki', 'wiki.db') + def __init__(self, db_path, logger=None): + Database.__init__(self, logger) + self.db_path = db_path self.conn = None def initDb(self): create_schema = False - if not os.path.isdir(os.path.dirname(self.db_path)): - # No database on disk... create one. - self.logger.debug("Creating SQL database.") - os.makedirs(os.path.dirname(self.db_path)) + if self.db_path != ':memory:': + if not os.path.isdir(os.path.dirname(self.db_path)): + # No database on disk... create one. + self.logger.debug("Creating SQL database.") + os.makedirs(os.path.dirname(self.db_path)) + create_schema = True + else: + # The existing schema is outdated, re-create it. + schema_version = self._getSchemaVersion() + if schema_version < self.schema_version: + create_schema = True + else: create_schema = True - else: - # The existing schema is outdated, re-create it. - schema_version = self._getSchemaVersion() - if schema_version < self.schema_version: - create_schema = True if create_schema: with conn_scope(self): self._createSchema() - # Cache the configuration. - cache_config = False - config_time = self._getInfoTime('config_time') - if os.path.isfile(self.wiki.config_path): - if (config_time is None or - config_time <= datetime.datetime.fromtimestamp( - os.path.getmtime(self.wiki.config_path))): - cache_config = True - elif config_time is not None: - cache_config = True - if cache_config: - self._cacheConfig(self.wiki.config) - def open(self): if self.conn is None: + self.logger.debug("Opening connection") self.conn = sqlite3.connect(self.db_path, detect_types=sqlite3.PARSE_DECLTYPES | sqlite3.PARSE_COLNAMES) self.conn.row_factory = sqlite3.Row def close(self): if self.conn is not None: + self.logger.debug("Closing connection") self.conn.close() self.conn = None @@ -154,16 +138,10 @@ self.conn.commit() for page in pages: - # We want the page's path, but getting it may load all kinds - # of metadata that is time-consuming, so we shortcut the - # system by querying the file-system directly. - fs_meta = self.wiki.fs.getPage(page.url) - if (fs_meta['path'] in to_update or - fs_meta['path'] not in already_added): + if (page.path in to_update or + page.path not in already_added): self._addPage(page, c) - # TODO: update any page with a query in it. - self.conn.commit() self.logger.debug("...done updating SQL database.") @@ -224,14 +202,6 @@ sources.append(r['source']) return sources - def getConfigValues(self, section): - with conn_scope(self): - pass - - def getConfigValue(self, section, name): - with conn_scope(self): - pass - def _createSchema(self): self.logger.debug("Creating SQL schema...") c = self.conn.cursor() @@ -260,11 +230,6 @@ page_id INTEGER, name TEXT, value TEXT)''') - c.execute('''DROP TABLE IF EXISTS config''') - c.execute('''CREATE TABLE config - (section TEXT, - name TEXT, - value TEXT)''') c.execute('''DROP TABLE IF EXISTS info''') c.execute('''CREATE TABLE info (name TEXT UNIQUE NOT NULL, @@ -319,21 +284,6 @@ return 0 return row[0] - def _cacheConfig(self, config): - self.logger.debug("Re-caching configuration into SQL database.") - with conn_scope(self): - c = self.conn.cursor() - c.execute('''DELETE FROM config''') - for section in config.sections(): - items = config.items(section) - for item in items: - c.execute('''INSERT INTO config - (section, name, value) VALUES (?, ?, ?)''', - (section, item[0], item[1])) - c.execute('''INSERT OR REPLACE INTO info (name, time_value) - VALUES ("config_time", ?)''', (datetime.datetime.now(),)) - self.conn.commit() - def _addPage(self, page, c): self.logger.debug("Adding page '%s' to SQL database." % page.url) now = datetime.datetime.now()
--- a/wikked/fs.py Sat Jan 26 22:17:51 2013 -0800 +++ b/wikked/fs.py Mon Jan 28 23:13:04 2013 -0800 @@ -3,6 +3,7 @@ import re import string import codecs +import logging class PageNotFoundError(Exception): @@ -17,15 +18,20 @@ file-system paths, and for scanning the file-system to list existing pages. """ - def __init__(self, root, slugify=None): + def __init__(self, root, slugify=None, logger=None): self.root = unicode(root) + + if slugify is None: + slugify = lambda x: x self.slugify = slugify + + if logger is None: + logger = logging.getLogger('wikked.fs') + self.logger = logger + self.excluded = [] self.page_extensions = None - if slugify is None: - self.slugify = lambda x: x - def getPageInfos(self, subdir=None): basepath = self.root if subdir is not None: @@ -59,6 +65,10 @@ 'content': content } + def setPage(self, path, content): + with codecs.open(path, 'w', encoding='utf-8') as f: + f.write(content) + def pageExists(self, url): try: self.getPhysicalPagePath(url) @@ -66,6 +76,9 @@ except PageNotFoundError: return False + def getPhysicalPagePath(self, url): + return self._getPhysicalPath(url, True) + def getPhysicalNamespacePath(self, url): return self._getPhysicalPath(url, False) @@ -84,9 +97,6 @@ 'path': path } - def getPhysicalPagePath(self, url): - return self._getPhysicalPath(url, True) - def _getPhysicalPath(self, url, is_file): if string.find(url, '..') >= 0: raise ValueError("Page URLs can't contain '..': " + url) @@ -103,7 +113,7 @@ if is_file and i == len(parts) - 1: # If we're looking for a file and this is the last part, # look for something similar but with an extension. - if re.match("%s\.[a-z]+" % re.escape(part), name_formatted): + if re.match(r"%s\.[a-z]+" % re.escape(part), name_formatted): current = os.path.join(current, name) break else: @@ -114,4 +124,3 @@ # Failed to find a part of the URL. raise PageNotFoundError("No such page: " + url) return current -
--- a/wikked/indexer.py Sat Jan 26 22:17:51 2013 -0800 +++ b/wikked/indexer.py Mon Jan 28 23:13:04 2013 -0800 @@ -8,8 +8,7 @@ class WikiIndex(object): - def __init__(self, store_dir, logger=None): - self.store_dir = store_dir + def __init__(self, logger=None): self.logger = logger if logger is None: self.logger = logging.getLogger('wikked.index') @@ -29,7 +28,8 @@ class WhooshWikiIndex(WikiIndex): def __init__(self, store_dir, logger=None): - WikiIndex.__init__(self, store_dir, logger) + WikiIndex.__init__(self, logger) + self.store_dir = store_dir def initIndex(self): if not os.path.isdir(self.store_dir):
--- a/wikked/scm.py Sat Jan 26 22:17:51 2013 -0800 +++ b/wikked/scm.py Mon Jan 28 23:13:04 2013 -0800 @@ -18,8 +18,7 @@ class SourceControl(object): - def __init__(self, root, logger=None): - self.root = root + def __init__(self, logger=None): self.logger = logger if logger is None: self.logger = logging.getLogger('wikked.scm') @@ -27,7 +26,7 @@ def initRepo(self): raise NotImplementedError() - def getSpecialDirs(self): + def getSpecialFilenames(self): raise NotImplementedError() def getHistory(self, path=None): @@ -68,7 +67,8 @@ class MercurialSourceControl(SourceControl): def __init__(self, root, logger=None): - SourceControl.__init__(self, root, logger) + SourceControl.__init__(self, logger) + self.root = root self.hg = 'hg' self.log_style = os.path.join(os.path.dirname(__file__), 'resources', 'hg_log.style') @@ -93,15 +93,15 @@ self._run('add', ignore_path) self._run('commit', ignore_path, '-m', 'Created .hgignore.') - def getSpecialDirs(self): - specials = [ '.hg', '.hgignore', '.hgtags' ] - return [ os.path.join(self.root, d) for d in specials ] + def getSpecialFilenames(self): + specials = ['.hg', '.hgignore', '.hgtags'] + return [os.path.join(self.root, d) for d in specials] def getHistory(self, path=None): if path is not None: st_out = self._run('status', path) if len(st_out) > 0 and st_out[0] == '?': - return [ Revision() ] + return [Revision()] log_args = [] if path is not None: @@ -131,7 +131,7 @@ def diff(self, path, rev1, rev2): if rev2 is None: - diff_out = self._run('diff', '-c', rev1, '--git', path); + diff_out = self._run('diff', '-c', rev1, '--git', path) else: diff_out = self._run('diff', '-r', rev1, '-r', rev2, '--git', path) return diff_out @@ -156,9 +156,9 @@ # Commit and clean up the temp file. try: - commit_args = list(paths) + [ '-l', temp ] + commit_args = list(paths) + ['-l', temp] if 'author' in op_meta: - commit_args += [ '-u', op_meta['author'] ] + commit_args += ['-u', op_meta['author']] self._run('commit', *commit_args) finally: os.remove(temp) @@ -194,16 +194,15 @@ for j in range(i + 1, len(lines)): if lines[j] == '': continue - rev.files.append({ 'path': lines[j][2:], 'action': self.actions[lines[j][0]] }) + rev.files.append({'path': lines[j][2:], 'action': self.actions[lines[j][0]]}) return rev def _run(self, cmd, *args, **kwargs): - exe = [ self.hg ] + exe = [self.hg] if 'norepo' not in kwargs or not kwargs['norepo']: - exe += [ '-R', self.root ] + exe += ['-R', self.root] exe.append(cmd) exe += args self.logger.debug("Running Mercurial: " + str(exe)) return subprocess.check_output(exe) -
--- a/wikked/web.py Sat Jan 26 22:17:51 2013 -0800 +++ b/wikked/web.py Mon Jan 28 23:13:04 2013 -0800 @@ -1,5 +1,5 @@ from flask import Flask, abort, g -from wiki import Wiki +from wiki import Wiki, WikiParameters # Create the main app. app = Flask("wikked") @@ -8,7 +8,9 @@ def create_wiki(): - wiki = Wiki(root=app.config.get('WIKI_ROOT'), logger=app.logger) + params = WikiParameters(root=app.config.get('WIKI_ROOT')) + params.logger = app.logger + wiki = Wiki(params) wiki.start() return wiki
--- a/wikked/wiki.py Sat Jan 26 22:17:51 2013 -0800 +++ b/wikked/wiki.py Mon Jan 28 23:13:04 2013 -0800 @@ -8,94 +8,129 @@ import textile import creole from page import Page, DatabasePage -from cache import Cache from fs import FileSystem -from db import SQLiteDatabase +from db import SQLiteDatabase, conn_scope from scm import MercurialSourceControl from indexer import WhooshWikiIndex from auth import UserManager +def passthrough_formatter(text): + """ Passthrough formatter. Pretty simple stuff. + """ + return text + + class InitializationError(Exception): + """ An exception that can get raised while the wiki gets + initialized. + """ pass +class WikiParameters(object): + """ An object that defines how a wiki gets initialized. + """ + def __init__(self, root=None): + if root is None: + root = os.getcwd() + self.root = root + + self.formatters = { + markdown.markdown: ['md', 'mdown', 'markdown'], + textile.textile: ['tl', 'text', 'textile'], + creole.creole2html: ['cr', 'creole'], + passthrough_formatter: ['txt', 'html'] + } + self.config_path = os.path.join(self.root, '.wikirc') + self.index_path = os.path.join(self.root, '.wiki', 'index') + self.db_path = os.path.join(self.root, '.wiki', 'wiki.db') + + self.page_factory = DatabasePage.factory + + def logger_factory(self): + if getattr(self, 'logger', None): + return self.logger + return logging.getLogger('wikked.wiki') + + def config_factory(self): + return open(self.config_path) + + def fs_factory(self, config): + return FileSystem(self.root, slugify=Page.title_to_url, logger=self.logger_factory()) + + def index_factory(self, config): + return WhooshWikiIndex(self.index_path, logger=self.logger_factory()) + + def db_factory(self, config): + return SQLiteDatabase(self.db_path, logger=self.logger_factory()) + + def scm_factory(self, config): + scm_type = config.get('wiki', 'scm') + if scm_type == 'hg': + return MercurialSourceControl(self.root, logger=self.logger_factory()) + else: + raise InitializationError("No such source control: " + scm_type) + + def getSpecialFilenames(self): + yield self.config_path + yield os.path.join(self.root, '.wiki') + + class Wiki(object): - def __init__(self, root=None, logger=None): - if root is None: - root = os.getcwd() + """ The wiki class! This is where the magic happens. + """ + def __init__(self, parameters): + """ Creates a new wiki instance. It won't be fully functional + until you call `start`, which does the actual initialization. + This gives you a chance to customize a few more things before + getting started. + """ + if parameters is None: + raise ValueError("No parameters were given to the wiki.") - self.logger = logger - if logger is None: - self.logger = logging.getLogger('wikked.wiki') - self.logger.debug("Initializing wiki at: " + root) + self.logger = parameters.logger_factory() + self.logger.debug("Initializing wiki.") + self.config = self._loadConfig(parameters) + + self.formatters = parameters.formatters self.page_factory = DatabasePage.factory - self.use_db = True - self.formatters = { - markdown.markdown: ['md', 'mdown', 'markdown'], - textile.textile: ['tl', 'text', 'textile'], - creole.creole2html: ['cr', 'creole'], - self._passthrough: ['txt', 'html'] - } - self.default_config_path = os.path.join( - os.path.dirname(__file__), 'resources', 'defaults.cfg') - self.config_path = os.path.join(root, '.wikirc') - self.config = self._loadConfig() + self.fs = parameters.fs_factory(self.config) + self.index = parameters.index_factory(self.config) + self.db = parameters.db_factory(self.config) + self.scm = parameters.scm_factory(self.config) - self.fs = FileSystem(root, slugify=Page.title_to_url) self.auth = UserManager(self.config, logger=self.logger) - self.index = WhooshWikiIndex(os.path.join(root, '.wiki', 'index'), - logger=self.logger) - self.db = SQLiteDatabase(self, logger=self.logger) - self.scm = self._createScm() - self.cache = self._createJsonCache() self.fs.page_extensions = list(set( itertools.chain(*self.formatters.itervalues()))) - self.fs.excluded.append(self.config_path) - self.fs.excluded.append(os.path.join(root, '.wiki')) - if self.scm is not None: - self.fs.excluded += self.scm.getSpecialDirs() - - def _createScm(self): - scm_type = self.config.get('wiki', 'scm') - if scm_type == 'hg': - return MercurialSourceControl(self.fs.root, self.logger) - else: - raise InitializationError("No such source control: " + scm_type) - - def _createJsonCache(self): - if (not self.config.has_option('wiki', 'cache') or - self.config.getboolean('wiki', 'cache')): - return Cache(os.path.join(self.fs.root, '.wiki', 'cache')) - else: - return None - - def _loadConfig(self): - config = SafeConfigParser() - config.readfp(open(self.default_config_path)) - config.read(self.config_path) - return config + self.fs.excluded += parameters.getSpecialFilenames() + self.fs.excluded += self.scm.getSpecialFilenames() def start(self, update=True): - if self.scm is not None: - self.scm.initRepo() - if self.index is not None: - self.index.initIndex() - if self.db is not None: - self.db.initDb() + """ Properly initializes the wiki and all its sub-systems. + """ + self.scm.initRepo() + self.index.initIndex() + self.db.initDb() if update: - pass + with conn_scope(self.db): + self.db.update(self.getPages(from_db=False, factory=Page.factory)) + self.index.update(self.getPages()) - @property - def root(self): - return self.fs.root + def stop(self): + self.db.close() def getPageUrls(self, subdir=None, from_db=True): - if from_db and self.db: + """ Returns all the page URLs in the wiki, or in the given + sub-directory. + By default, it queries the DB, but it can query the file-system + directly if `from_db` is `False`. + """ + if from_db: for url in self.db.getPageUrls(subdir): yield url else: @@ -103,17 +138,29 @@ yield info['url'] def getPages(self, subdir=None, from_db=True, factory=None): + """ Gets all the pages in the wiki, or in the given sub-directory. + By default it will use the DB to fetch the list of pages, but it + can scan the file-system directly if `from_db` is `False`. If + that's the case, it's probably a good idea to provide a custom + `factory` for creating `Page` instances, since by default it will + use `DatabasePage` which also uses the DB to load its information. + """ if factory is None: factory = self.page_factory for url in self.getPageUrls(subdir, from_db): yield factory(self, url) def getPage(self, url, factory=None): + """ Gets the page for a given URL. + """ if factory is None: factory = self.page_factory return factory(self, url) def setPage(self, url, page_fields): + """ Updates or creates a page for a given URL. + """ + # Validate the parameters. if 'author' not in page_fields: raise ValueError( "No author specified for editing page '%s'." % url) @@ -121,14 +168,14 @@ raise ValueError( "No commit message specified for editing page '%s'." % url) + # Save the new/modified text. do_commit = False path = self.fs.getPhysicalPagePath(url) - if 'text' in page_fields: - with open(path, 'w') as f: - f.write(page_fields['text']) + self.fs.setPage(path, page_fields['text']) do_commit = True + # Commit the file to the source-control. if do_commit: commit_meta = { 'author': page_fields['author'], @@ -136,21 +183,37 @@ } self.scm.commit([path], commit_meta) - if self.db is not None: - self.db.update([self.getPage(url)]) - if self.index is not None: - self.index.update([self.getPage(url)]) + # Update the DB and index with the new/modified page. + self.db.update([self.getPage(url)]) + self.index.update([self.getPage(url)]) def pageExists(self, url, from_db=True): + """ Returns whether a page exists at the given URL. + By default it will query the DB, but it can query the underlying + file-system directly if `from_db` is `False`. + """ if from_db: return self.db.pageExists(url) return self.fs.pageExists(url) def getHistory(self): + """ Shorthand method to get the history from the source-control. + """ return self.scm.getHistory() - def _passthrough(self, content): - return content + def _loadConfig(self, parameters): + # Merge the default settings with any settings provided by + # the parameters. + default_config_path = os.path.join( + os.path.dirname(__file__), 'resources', 'defaults.cfg') + config = SafeConfigParser() + config.readfp(open(default_config_path)) + + fp = parameters.config_factory() + config.readfp(fp) + fp.close() + + return config def reloader_stat_loop(wiki, interval=1):