Mercurial > wikked
changeset 101:13249e5ca51c
Big refactor for better database caching:
- Using SQL alchemy instead of raw SQLite.
- Better architecture and internal APIs.
- Fixed some issues where the database was not used correctly.
- Fixed some problems with querying pages.
Got rid of `Makefile`, now using `grunt`.
Now using a custom `Bootstrap` include file.
author | Ludovic Chabant <ludovic@chabant.com> |
---|---|
date | Tue, 05 Nov 2013 08:13:18 -0800 |
parents | fd6eccb24882 |
children | ea23c9483bc4 |
files | .hgignore Gruntfile.js Makefile manage.py package.json static/bootstrap/less/custom-variables.less static/css/bootstrap.less static/css/custom-variables.less static/css/wikked.less static/js/wikked.js static/js/wikked/handlebars.js static/js/wikked/models.js static/tpl/category.html wikked/db.py wikked/fs.py wikked/page.py wikked/resolver.py wikked/templates/index.html wikked/views.py wikked/web.py wikked/wiki.py |
diffstat | 21 files changed, 677 insertions(+), 499 deletions(-) [+] |
line wrap: on
line diff
--- a/.hgignore Sat May 25 22:35:23 2013 -0700 +++ b/.hgignore Tue Nov 05 08:13:18 2013 -0800 @@ -1,5 +1,8 @@ syntax:glob venv +node_modules +build *.pyc *.pyo +*.sublime-*
--- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/Gruntfile.js Tue Nov 05 08:13:18 2013 -0800 @@ -0,0 +1,104 @@ +module.exports = function(grunt) { + + // Project configuration. + grunt.initConfig({ + pkg: grunt.file.readJSON('package.json'), + less: { + development: { + options: { + paths: ["static"] + }, + files: { + "build/css/wikked.min.css": "static/css/wikked.less" + } + }, + production: { + options: { + paths: ["static"], + compress: true + }, + files: { + "build/css/wikked.min.css": "static/css/wikked.less" + } + } + }, + requirejs: { + development: { + options: { + optimize: "none", + baseUrl: "static", + mainConfigFile: "static/js/wikked.js", + name: "js/wikked", + out: "build/js/wikked.min.js" + } + }, + production: { + options: { + optimize: "uglify", + baseUrl: "static", + mainConfigFile: "static/js/wikked.js", + name: "js/wikked", + out: "build/js/wikked.min.js" + } + } + }, + imagemin: { + all: { + files: [ + {expand: true, cwd: 'static/', dest: 'build/', src: ['img/*.{png,jpg,gif}']} + ] + } + }, + copy: { + images: { + files: [ + {expand: true, cwd: 'static/', dest: 'build/', src: ['img/**']} + ] + }, + production: { + files: [ + {expand: true, cwd: 'static/', dest: 'build/', src: ['js/require.js']}, + {expand: true, cwd: 'static/', dest: 'build/', src: ['css/*.css']}, + {expand: true, cwd: 'static/', dest: 'build/', src: ['font-awesome/font/**']} + ] + } + }, + jshint: { + all: ['static/js/wikked.js', 'static/js/wikked/**/*.js'], + gruntfile: ['Gruntfile.js'] + }, + watch: { + scripts: { + files: ['static/js/**/*.js'], + tasks: ['jshint', 'requirejs:development'] + }, + templates: { + files: ['static/tpl/**/*.html'], + tasks: ['requirejs:development'] + }, + styles: { + files: ['static/css/**/*.less'], + tasks: ['less:development'] + }, + gruntfile: { + files: ['Gruntfile.js'], + tasks: ['jshint:gruntfile'] + } + } + }); + + // Load plugins. + grunt.loadNpmTasks('grunt-contrib-less'); + grunt.loadNpmTasks('grunt-contrib-requirejs'); + grunt.loadNpmTasks('grunt-contrib-copy'); + grunt.loadNpmTasks('grunt-contrib-imagemin'); + grunt.loadNpmTasks('grunt-contrib-jshint'); + grunt.loadNpmTasks('grunt-contrib-watch'); + + // Default task(s). + grunt.registerTask('default', ['less:production', 'requirejs:production', 'imagemin:all', 'copy:production']); + + // Other tasks. + grunt.registerTask('dev', ['less:development', 'requirejs:development', 'copy:production', 'copy:images']); +}; +
--- a/Makefile Sat May 25 22:35:23 2013 -0700 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,26 +0,0 @@ - -WIKKED_LESS = ./static/css/wikked.less -WIKKED_CSS = ./static/css/wikked.min.css -REQUIREJS_BUILD = ./static/.rbuild.js - -DATE=$(shell date +%I:%M%p) -CHECK=\033[32m✔\033[39m - -build: - @echo "" - @echo "Building Wikked..." - @echo "" - @recess --compile --compress ${WIKKED_LESS} > ${WIKKED_CSS} - @echo "Compiling LESS stylesheets... ${CHECK} Done" - @r.js -o ${REQUIREJS_BUILD} - @echo "Compiling Javascript code... ${CHECK} Done" - @echo "" - @echo "Successfully compiled Wikked." - @echo "" - -clean: - rm ${WIKKED_CSS} - -watch: - recess ${WIKKED_LESS}:${WIKKED_CSS} --watch -
--- a/manage.py Sat May 25 22:35:23 2013 -0700 +++ b/manage.py Tue Nov 05 08:13:18 2013 -0800 @@ -6,8 +6,7 @@ # Create the app and the wiki. from wikked.web import app, wiki -from wikked.page import Page -from wikked.db import conn_scope +from wikked.page import FileSystemPage # Create the manager. from flask.ext.script import Manager, prompt, prompt_pass @@ -24,12 +23,12 @@ @manager.command -def new_user(): +def user(username=None, password=None): """Generates the entry for a new user so you can copy/paste it in your `.wikirc`. """ - username = prompt('Username: ') - password = prompt_pass('Password: ') + username = username or prompt('Username: ') + password = password or prompt_pass('Password: ') password = app.bcrypt.generate_password_hash(password) print "[users]" print "%s = %s" % (username, password) @@ -39,9 +38,10 @@ def reset(): """ Re-generates the database and the full-text-search index. """ - with conn_scope(wiki.db): - wiki.db.reset(wiki.getPages(from_db=False, factory=Page.factory)) - wiki.index.reset(wiki.getPages()) + page_infos = wiki.fs.getPageInfos() + fs_pages = FileSystemPage.fromPageInfos(wiki, page_infos) + wiki.db.reset(fs_pages) + wiki.index.reset(wiki.getPages()) @manager.command @@ -49,9 +49,10 @@ """ Updates the database and the full-text-search index with any changed/new files. """ - with conn_scope(wiki.db): - wiki.db.update(wiki.getPages(from_db=False, factory=Page.factory)) - wiki.index.update(wiki.getPages()) + page_infos = wiki.fs.getPageInfos() + fs_pages = FileSystemPage.fromPageInfos(wiki, page_infos) + wiki.db.update(fs_pages) + wiki.index.update(wiki.getPages()) @manager.command @@ -66,9 +67,8 @@ def get(url): """ Gets a page that matches the given URL. """ - with conn_scope(wiki.db): - page = wiki.getPage(url) - print page.text + page = wiki.getPage(url) + print page.text if __name__ == "__main__":
--- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/package.json Tue Nov 05 08:13:18 2013 -0800 @@ -0,0 +1,17 @@ +{ + "name": "Wikked", + "version": "0.1.0", + "dependencies": {}, + "devDependencies": { + "grunt": "~0.4.1", + "grunt-contrib-less": "~0.8.1", + "grunt-contrib-requirejs": "~0.4.1", + "grunt-contrib-copy": "~0.4.1", + "grunt-contrib-imagemin": "~0.3.0", + "grunt-contrib-jshint": "~0.7.1", + "grunt-contrib-watch": "~0.5.3" + }, + "engines": { + "node": ">=0.8.0" + } +}
--- a/static/bootstrap/less/custom-variables.less Sat May 25 22:35:23 2013 -0700 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,18 +0,0 @@ -// -// Custom Variables -// (this file isn't part of Bootstrap) -// -------------------------------------------------- - -// Bootstrap overrides -// ------------------------- -@bodyBackground: @white; -@textColor: @grayDark; - -//@monoFontFamily: "Lucida Console", Monaco, Consolas, "Courier New", monospace; - -@baseFontSize: 18px; -@baseLineHeight: (@baseFontSize * 1.5); - -//@linkColor: rgb(128, 128, 128); -//@linkColorHover: rgb(96, 96, 96); -
--- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/static/css/bootstrap.less Tue Nov 05 08:13:18 2013 -0800 @@ -0,0 +1,37 @@ +/* + * Custom includes for Bootstrap, to inject custom variables and skip + * the components we don't care about. + */ + +// Core variables and mixins +@import "../bootstrap/less/variables.less"; // Modify this for custom colors, font-sizes, etc +@import "custom-variables.less"; +@import "../bootstrap/less/mixins.less"; + +// CSS Reset +@import "../bootstrap/less/reset.less"; + +// Grid system and page structure +@import "../bootstrap/less/scaffolding.less"; +@import "../bootstrap/less/grid.less"; +@import "../bootstrap/less/layouts.less"; + +// Base CSS +@import "../bootstrap/less/type.less"; +@import "../bootstrap/less/code.less"; +@import "../bootstrap/less/forms.less"; +@import "../bootstrap/less/tables.less"; + +// Components: Buttons & Alerts +@import "../bootstrap/less/buttons.less"; +@import "../bootstrap/less/button-groups.less"; +@import "../bootstrap/less/alerts.less"; // Note: alerts share common CSS with buttons and thus have styles in buttons.less + +// Components: Popovers +@import "../bootstrap/less/modals.less"; +@import "../bootstrap/less/tooltip.less"; +@import "../bootstrap/less/popovers.less"; + +// Utility classes +@import "../bootstrap/less/utilities.less"; // Has to be last to override when necessary +
--- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/static/css/custom-variables.less Tue Nov 05 08:13:18 2013 -0800 @@ -0,0 +1,18 @@ +// +// Custom Variables +// (this file isn't part of Bootstrap) +// -------------------------------------------------- + +// Bootstrap overrides +// ------------------------- +@bodyBackground: @white; +@textColor: @grayDark; + +//@monoFontFamily: "Lucida Console", Monaco, Consolas, "Courier New", monospace; + +@baseFontSize: 18px; +@baseLineHeight: (@baseFontSize * 1.5); + +//@linkColor: rgb(128, 128, 128); +//@linkColorHover: rgb(96, 96, 96); +
--- a/static/css/wikked.less Sat May 25 22:35:23 2013 -0700 +++ b/static/css/wikked.less Tue Nov 05 08:13:18 2013 -0800 @@ -1,7 +1,7 @@ // Includes -@import "/bootstrap/less/bootstrap.less"; -@import "/css/syntax.css"; -@import "/css/wmd.css"; +@import "bootstrap.less"; +@import "syntax.css"; +@import "wmd.css"; // Constants @colorBlue: #0F1FFF;
--- a/static/js/wikked.js Sat May 25 22:35:23 2013 -0700 +++ b/static/js/wikked.js Tue Nov 05 08:13:18 2013 -0800 @@ -12,8 +12,8 @@ handlebars: 'js/handlebars-1.0.rc.1', moment: 'js/moment.min', text: 'js/text', - bootstrap_modal: '/bootstrap/js/bootstrap-modal', - bootstrap_tooltip: '/bootstrap/js/bootstrap-tooltip' + bootstrap_modal: 'bootstrap/js/bootstrap-modal', + bootstrap_tooltip: 'bootstrap/js/bootstrap-tooltip' }, shim: { 'jquery': {
--- a/static/js/wikked/handlebars.js Sat May 25 22:35:23 2013 -0700 +++ b/static/js/wikked/handlebars.js Tue Nov 05 08:13:18 2013 -0800 @@ -8,7 +8,7 @@ function(Handlebars) { /** - * Handlebars helper: reverse iterator. + * Reverse iterator. */ Handlebars.registerHelper('eachr', function(context, options) { if (context === undefined) { @@ -30,20 +30,6 @@ }); /** - * - */ - Handlebars.registerHelper('each_or_this', function(context, options) { - if (context === undefined) { - return ''; - } - data = undefined; - if (options.data) { - data = Handlebars.createFrame(options.data); - } - var out = ''; - }); - - /** * Would you believe Handlebars doesn't have an equality * operator? */ @@ -71,6 +57,29 @@ }); /** + * Concatenate strings with a separator. + */ + Handlebars.registerHelper('concat', function(context, options) { + if (context === undefined) { + return ''; + } + data = undefined; + if (options.data) { + data = Handlebars.createFrame(options.data); + } + + var sep = options.hash.sep; + var out = ''; + for (var i = 0; i < context.length; i++) { + if (i > 0) { + out += sep; + } + out += options.fn(context[i], { data: data }); + } + return out; + }); + + /** * Format dates. */ Handlebars.registerHelper('date', function(timestamp, options) {
--- a/static/js/wikked/models.js Sat May 25 22:35:23 2013 -0700 +++ b/static/js/wikked/models.js Tue Nov 05 08:13:18 2013 -0800 @@ -245,10 +245,6 @@ action: 'read', url: function() { return '/api/query?category=' + this.get('path'); - }, - _onChangePath: function(path) { - CategoryModel.__super__._onChangePath.apply(this, arguments); - this.set('category', path); } });
--- a/static/tpl/category.html Sat May 25 22:35:23 2013 -0700 +++ b/static/tpl/category.html Tue Nov 05 08:13:18 2013 -0800 @@ -1,13 +1,13 @@ <article> <header> - <h1>{{category}} <span class="decorator">Category</span></h1> + <h1>{{query.category}} <span class="decorator">Category</span></h1> </header> <section> {{content}} - <h2>Pages in category "{{category}}"</h2> + <h2>Pages in category "{{query.category}}"</h2> <ul class="list-category"> {{#each pages}} - <li><a href="{{url_read}}">{{title}}</a></li> + <li><a href="/#/read/{{url}}">{{title}}</a></li> {{/each}} </ul> </section>
--- a/wikked/db.py Sat May 25 22:35:23 2013 -0700 +++ b/wikked/db.py Tue Nov 05 08:13:18 2013 -0800 @@ -4,25 +4,11 @@ import string import logging import datetime -import sqlite3 - - -class conn_scope(object): - """ Helper class, disguised as a function, to ensure the database - has been opened before doing something. If the database wasn't - open, it will be closed after the operation. - """ - def __init__(self, db): - self.db = db - self.do_close = False - - def __enter__(self): - self.do_close = (self.db.conn is None) - self.db.open() - - def __exit__(self, type, value, traceback): - if self.do_close: - self.db.close() +from sqlalchemy import ( + create_engine, and_, + Column, Boolean, Integer, String, Text, DateTime, ForeignKey) +from sqlalchemy.orm import sessionmaker, relationship, backref +from wikked.web import db class Database(object): @@ -51,7 +37,7 @@ def getPageUrls(self, subdir=None): raise NotImplementedError() - def getPages(self, subdir=None): + def getPages(self, subdir=None, meta_query=None): raise NotImplementedError() def getPage(self, url): @@ -64,31 +50,109 @@ raise NotImplementedError() -class SQLitePageInfo(object): - def __init__(self, row): - self.url = row['url'] - self.path = row['path'] - self.time = row['time'] - self.title = row['title'] - self.raw_text = row['raw_text'] - self.formatted_text = row['formatted_text'] - self.links = [] - self.meta = {} +Base = db.Model + +class SQLPage(Base): + __tablename__ = 'pages' + + id = Column(Integer, primary_key=True) + time = Column(DateTime) + url = Column(Text) + path = Column(Text) + title = Column(Text) + raw_text = Column(Text) + formatted_text = Column(Text) + + meta = relationship('SQLMeta', order_by='SQLMeta.id', + backref=backref('page'), + cascade='all, delete, delete-orphan') + links = relationship('SQLLink', order_by='SQLLink.id', + backref=backref('source'), + cascade='all, delete, delete-orphan') + + ready_text = Column(Text) + is_ready = Column(Boolean) + + ready_meta = relationship('SQLReadyMeta', order_by='SQLReadyMeta.id', + backref=backref('page'), + cascade='all, delete, delete-orphan') + ready_links = relationship('SQLReadyLink', order_by='SQLReadyLink.id', + backref=backref('source'), + cascade='all, delete, delete-orphan') + + +class SQLMeta(Base): + __tablename__ = 'meta' + + id = Column(Integer, primary_key=True) + page_id = Column(Integer, ForeignKey('pages.id')) + name = Column(String(128)) + value = Column(Text) + + def __init__(self, name=None, value=None): + self.name = name + self.value = value -class SQLiteDatabase(Database): - """ A database cache based on SQLite. +class SQLReadyMeta(Base): + __tablename__ = 'ready_meta' + + id = Column(Integer, primary_key=True) + page_id = Column(Integer, ForeignKey('pages.id')) + name = Column(String(128)) + value = Column(Text) + + def __init__(self, name=None, value=None): + self.name = name + self.value = value + + +class SQLLink(Base): + __tablename__ = 'links' + + id = Column(Integer, primary_key=True) + source_id = Column(Integer, ForeignKey('pages.id')) + target_url = Column(Text) + + def __init__(self, target_url=None): + self.target_url = target_url + + +class SQLReadyLink(Base): + __tablename__ = 'ready_links' + + id = Column(Integer, primary_key=True) + source_id = Column(Integer, ForeignKey('pages.id')) + target_url = Column(Text) + + def __init__(self, target_url=None): + self.target_url = target_url + + +class SQLInfo(Base): + __tablename__ = 'info' + + id = Column(Integer, primary_key=True) + name = Column(String(64)) + str_value = Column(String(256)) + int_value = Column(Integer) + time_value = Column(DateTime) + + +class SQLDatabase(Database): + """ A database cache based on SQL. """ - schema_version = 1 + schema_version = 3 def __init__(self, db_path, logger=None): Database.__init__(self, logger) self.db_path = db_path - self.conn = None + self.engine = create_engine(db_path, echo=True) + self.session_class = sessionmaker(bind=self.engine) def initDb(self): create_schema = False - if self.db_path != ':memory:': + if self.db_path != 'sqlite:///:memory:': if not os.path.isdir(os.path.dirname(self.db_path)): # No database on disk... create one. self.logger.debug("Creating SQL database.") @@ -102,250 +166,170 @@ else: create_schema = True if create_schema: - with conn_scope(self): - self._createSchema() + self._createSchema() def open(self): - if self.conn is None: - self.logger.debug("Opening connection") - self.conn = sqlite3.connect(self.db_path, - detect_types=sqlite3.PARSE_DECLTYPES | sqlite3.PARSE_COLNAMES) - self.conn.row_factory = sqlite3.Row + self.logger.debug("Opening connection") def close(self): - if self.conn is not None: - self.logger.debug("Closing connection") - self.conn.close() - self.conn = None + self.logger.debug("Closing connection") def reset(self, pages): self.logger.debug("Re-creating SQL database.") - with conn_scope(self): - self._createSchema() - c = self.conn.cursor() - for page in pages: - self._addPage(page, c) - self.conn.commit() + db.drop_all() + db.create_all() + for page in pages: + self._addPage(page) + db.session.commit() def update(self, pages): self.logger.debug("Updating SQL database...") to_update = set() already_added = set() + to_remove = [] - with conn_scope(self): - c = self.conn.cursor() - c.execute('''SELECT id, time, path FROM pages''') - for r in c.fetchall(): - if not os.path.isfile(r['path']): - # File was deleted. - self._removePage(r['id'], c) - else: - already_added.add(r['path']) - path_time = datetime.datetime.fromtimestamp( - os.path.getmtime(r['path'])) - if path_time > r['time']: - # File has changed since last index. - self._removePage(r['id'], c) - to_update.add(r['path']) - self.conn.commit() + db_pages = SQLPage.query.all() + for p in db_pages: + if not os.path.isfile(p.path): + # File was deleted. + to_remove.append(p) + else: + already_added.add(p.path) + path_time = datetime.datetime.fromtimestamp( + os.path.getmtime(p.path)) + if path_time > p.time: + # File has changed since last index. + to_remove.append(p) + to_update.add(p.path) + for p in to_remove: + self._removePage(p) - for page in pages: - if (page.path in to_update or - page.path not in already_added): - self._addPage(page, c) + self.session.commit() - self.conn.commit() - self.logger.debug("...done updating SQL database.") + added_db_objs = [] + for p in pages: + if (p.path in to_update or + p.path not in already_added): + added_db_objs.append(self._addPage(p)) + + self.session.commit() + self.logger.debug("...done updating SQL database.") + + return [o.id for o in added_db_objs] def getPageUrls(self, subdir=None): - with conn_scope(self): - c = self.conn.cursor() - if subdir: - subdir = string.rstrip(subdir, '/') + '/%' - c.execute('''SELECT url FROM pages WHERE url LIKE ?''', - (subdir,)) - else: - c.execute('''SELECT url FROM pages''') - urls = [] - for row in c.fetchall(): - urls.append(row['url']) - return urls + q = db.session.query(SQLPage) + if subdir: + subdir = string.rstrip(subdir, '/') + '/%' + q = q.filter(SQLPage.url.like(subdir)) + urls = [] + for p in q.all(): + urls.append(p.url) + return urls - def getPages(self, subdir=None): - with conn_scope(self): - c = self.conn.cursor() - if subdir: - subdir = string.rstrip(subdir, '/') + '/%' - c.execute('''SELECT id, url, path, time, title, raw_text, - formatted_text - FROM pages WHERE url LIKE ?''', - (subdir,)) - else: - c.execute('''SELECT id, url, path, time, title, raw_text, - formatted_text - FROM pages''') - pages = [] - for row in c.fetchall(): - pages.append(self._getPage(row, c)) - return pages + def getPages(self, subdir=None, meta_query=None): + q = db.session.query(SQLPage) + if meta_query: + q = q.join(SQLReadyMeta) + for name, values in meta_query.iteritems(): + for v in values: + q = q.filter(and_(SQLReadyMeta.name == name, SQLReadyMeta.value == v)) + if subdir: + subdir = string.rstrip(subdir, '/') + '/%' + q = q.filter(SQLPage.url.like(subdir)) + pages = [] + for p in q.all(): + pages.append(p) + return pages def getPage(self, url): - with conn_scope(self): - c = self.conn.cursor() - c.execute('''SELECT id, url, path, time, title, raw_text, - formatted_text FROM pages WHERE url=?''', (url,)) - row = c.fetchone() - if row is None: - return None - return self._getPage(row, c) + q = db.session.query(SQLPage).filter_by(url=url) + page = q.first() + return page def pageExists(self, url): - with conn_scope(self): - c = self.conn.cursor() - c.execute('''SELECT id FROM pages WHERE url=?''', (url,)) - return c.fetchone() is not None + return self.getPage(url) is not None def getLinksTo(self, url): - with conn_scope(self): - c = self.conn.cursor() - c.execute('''SELECT source FROM links WHERE target=?''', (url,)) - sources = [] - for r in c.fetchall(): - sources.append(r['source']) - return sources + q = db.session.query(SQLLink, SQLPage).\ + filter(SQLLink.target_url == SQLPage.url).\ + all() + for l in q: + yield l.source def _createSchema(self): - self.logger.debug("Creating SQL schema...") - c = self.conn.cursor() - c.execute('''DROP TABLE IF EXISTS pages''') - c.execute('''CREATE TABLE pages - (id INTEGER PRIMARY KEY AUTOINCREMENT, - time TIMESTAMP, - url TEXT, - path TEXT, - title TEXT, - raw_text TEXT, - formatted_text TEXT)''') - c.execute('''DROP TABLE IF EXISTS links''') - c.execute('''CREATE TABLE links - (id INTEGER PRIMARY KEY AUTOINCREMENT, - source TEXT, - target TEXT)''') - c.execute('''DROP TABLE IF EXISTS meta''') - c.execute('''CREATE TABLE meta - (id INTEGER PRIMARY KEY AUTOINCREMENT, - page_id INTEGER, - name TEXT, - value TEXT)''') - c.execute('''DROP TABLE IF EXISTS info''') - c.execute('''CREATE TABLE info - (name TEXT UNIQUE NOT NULL, - str_value TEXT, - int_value INTEGER, - time_value TIMESTAMP)''') - c.execute('''INSERT INTO info (name, int_value) VALUES (?, ?)''', - ('schema_version', self.schema_version)) - self.conn.commit() + db.create_all() - def _getInfo(self, name, default=None): - with conn_scope(self): - c = self.conn.cursor() - c.execute('''SELECT name, str_value FROM info - WHERE name=?''', (name,)) - row = c.fetchone() - if row is None: - return default - return row['str_value'] - - def _getInfoInt(self, name, default=None): - with conn_scope(self): - c = self.conn.cursor() - c.execute('''SELECT name, int_value FROM info - WHERE name=?''', (name,)) - row = c.fetchone() - if row is None: - return default - return row['int_value'] - - def _getInfoTime(self, name, default=None): - with conn_scope(self): - c = self.conn.cursor() - c.execute('''SELECT name, time_value FROM info - WHERE name=?''', (name,)) - row = c.fetchone() - if row is None: - return default - return row['time_value'] + ver = SQLInfo() + ver.name = 'schema_version' + ver.int_value = self.schema_version + db.session.add(ver) + db.session.commit() def _getSchemaVersion(self): - with conn_scope(self): - c = self.conn.cursor() - c.execute('''SELECT name FROM sqlite_master - WHERE type="table" AND name="info"''') - if c.fetchone() is None: - return 0 - c.execute('''SELECT int_value FROM info - WHERE name="schema_version"''') - row = c.fetchone() - if row is None: - return 0 - return row[0] + q = db.session.query(SQLInfo).\ + filter(SQLInfo.name == 'schema_version').\ + first() + if q is None: + return 0 + return q.int_value + + def _addPage(self, page): + self.logger.debug("Adding page '%s' to SQL database." % page.url) - def _addPage(self, page, c): - self.logger.debug("Adding page '%s' to SQL database." % page.url) - now = datetime.datetime.now() - c.execute('''INSERT INTO pages - (time, url, path, title, raw_text, formatted_text) - VALUES (?, ?, ?, ?, ?, ?)''', - (now, page.url, page.path, page.title, - page.raw_text, page._getFormattedText())) - page_id = c.lastrowid + po = SQLPage() + po.time = datetime.datetime.now() + po.url = page.url + po.path = page.path + po.title = page.title + po.raw_text = page.raw_text + po.formatted_text = page.getFormattedText() + po.ready_text = None + po.is_ready = False - for name, value in page._getLocalMeta().iteritems(): + for name, value in page.getLocalMeta().iteritems(): if isinstance(value, bool): value = "" if isinstance(value, types.StringTypes): - c.execute('''INSERT INTO meta - (page_id, name, value) VALUES (?, ?, ?)''', - (page_id, name, value)) + po.meta.append(SQLMeta(name, value)) else: for v in value: - c.execute('''INSERT INTO meta - (page_id, name, value) VALUES (?, ?, ?)''', - (page_id, name, v)) + po.meta.append(SQLMeta(name, v)) - for link_url in page._getLocalLinks(): - c.execute('''INSERT INTO links - (source, target) VALUES (?, ?)''', - (page.url, link_url)) + for link_url in page.getLocalLinks(): + po.links.append(SQLLink(link_url)) + + db.session.add(po) - def _removePage(self, page_id, c): - c.execute('''SELECT url FROM pages WHERE id=?''', (page_id,)) - row = c.fetchone() - self.logger.debug("Removing page '%s' [%d] from SQL database." % - (row['url'], page_id)) - c.execute('''DELETE FROM pages WHERE id=?''', (page_id,)) - c.execute('''DELETE FROM meta WHERE page_id=?''', (page_id,)) - c.execute('''DELETE FROM links WHERE source=?''', (row['url'],)) + return po + + def _cacheExtendedData(self, page): + self.logger.debug("Caching extended data for page '%s' [%d]." % (page.url, page._id)) + + if not hasattr(page, '_id') or not page._id: + raise Exception("Given page '%s' has no `_id` attribute set." % page.url) + db_obj = db.session.query(SQLPage).filter(SQLPage.id == page._id).one() - def _getPage(self, row, c): - db_page = SQLitePageInfo(row) - - c.execute('''SELECT target FROM links - WHERE source=?''', (row['url'],)) - for r in c.fetchall(): - db_page.links.append(r['target']) + db_obj.ready_text = page._data.text + db_obj.is_ready = True + + for name, value in page._data.ext_meta.iteritems(): + if isinstance(value, bool): + value = "" + if isinstance(value, types.StringTypes): + db_obj.ready_meta.append(SQLReadyMeta(name, value)) + else: + for v in value: + db_obj.ready_meta.append(SQLReadyMeta(name, v)) - c.execute('''SELECT page_id, name, value - FROM meta WHERE page_id=?''', (row['id'],)) - for r in c.fetchall(): - value = r['value'] - if value == '': - value = True - name = r['name'] - if name not in db_page.meta: - db_page.meta[name] = [value] - else: - db_page.meta[name].append(value) + for link_url in page._data.ext_links: + db_obj.ready_links.append(SQLReadyLink(link_url)) + + db.session.commit() + - return db_page + def _removePage(self, page): + self.logger.debug("Removing page '%s' [%d] from SQL database." % + (page.url, page.id)) + self.session.remove(page) +
--- a/wikked/fs.py Sat May 25 22:35:23 2013 -0700 +++ b/wikked/fs.py Tue Nov 05 08:13:18 2013 -0800 @@ -15,14 +15,17 @@ class PageInfo(object): - def __init__(self, url, path, content=None): + def __init__(self, url, path): self.url = url self.path = path - self.content = content + self._content = None @property - def has_content(self): - return self.content is not None + def content(self): + if self._content is None: + with codecs.open(self.path, 'r', encoding='utf-8') as f: + self._content = f.read() + return self._content class FileSystem(object): @@ -65,13 +68,13 @@ def getPage(self, url): path = self.getPhysicalPagePath(url) - with codecs.open(path, 'r', encoding='utf-8') as f: - content = f.read() - return PageInfo(url, path, content) + return PageInfo(url, path) - def setPage(self, path, content): + def setPage(self, url, content): + path = self.getPhysicalPagePath(url) with codecs.open(path, 'w', encoding='utf-8') as f: f.write(content) + return PageInfo(url, path) def pageExists(self, url): try: @@ -102,7 +105,7 @@ if i > 0: url += '/' url += title_to_url(part) - return PageInfo(url, path, None) + return PageInfo(url, path) def _getPhysicalPath(self, url, is_file): if string.find(url, '..') >= 0:
--- a/wikked/page.py Sat May 25 22:35:23 2013 -0700 +++ b/wikked/page.py Tue Nov 05 08:13:18 2013 -0800 @@ -7,6 +7,12 @@ from resolver import PageResolver, CircularIncludeError +class PageLoadingError(Exception): + """ An exception that can get raised if a page can't be loaded. + """ + pass + + class PageData(object): def __init__(self): self.path = None @@ -22,7 +28,8 @@ class Page(object): - """ A wiki page. + """ A wiki page. This is a non-functional class, as it doesn't know where + to load things from. Use `FileSystemPage` or `DatabasePage` instead. """ def __init__(self, wiki, url): self.wiki = wiki @@ -84,15 +91,15 @@ def getDiff(self, rev1, rev2): return self.wiki.scm.diff(self.path, rev1, rev2) - def _getFormattedText(self): + def getFormattedText(self): self._ensureData() return self._data.formatted_text - def _getLocalMeta(self): + def getLocalMeta(self): self._ensureData() return self._data.local_meta - def _getLocalLinks(self): + def getLocalLinks(self): self._ensureData() return self._data.local_links @@ -100,24 +107,81 @@ if self._data is not None: return - self._data = self._loadCachedData() + self._data = self._loadData() if self._data is not None: return - self._data = self._loadOriginalData() - self._saveCachedData(self._data) + raise PageLoadingError() + + def _loadData(self): + raise NotImplementedError() - def _loadCachedData(self): - return None + def _onExtendedDataLoading(self): + pass - def _saveCachedData(self, meta): + def _onExtendedDataLoaded(self): pass - def _loadOriginalData(self): - data = PageData() + def _ensureExtendedData(self): + if self._data is not None and self._data.has_extended_data: + return + + self._ensureData() + self._onExtendedDataLoading() + if self._data.has_extended_data: + return + + try: + r = PageResolver(self) + out = r.run() + self._data.text = out.text + self._data.ext_meta = out.meta + self._data.ext_links = out.out_links + self._data.has_extended_data = True + self._onExtendedDataLoaded() + except CircularIncludeError as cie: + template_path = os.path.join( + os.path.dirname(__file__), + 'templates', + 'circular_include_error.html' + ) + with open(template_path, 'r') as f: + env = jinja2.Environment() + template = env.from_string(f.read()) + self._data.text = template.render({ + 'message': str(cie), + 'url_trail': cie.url_trail + }) + + +class FileSystemPage(Page): + """ A page that can load its properties directly from the file-system. + """ + def __init__(self, wiki, url=None, page_info=None): + if url and page_info: + raise Exception("You can't specify both an url and a page info.") + if not url and not page_info: + raise Exception("You need to specify either a url or a page info.") + + super(FileSystemPage, self).__init__(wiki, url or page_info.url) + self._page_info = page_info + + @property + def path(self): + if self._page_info: + return self._page_info.path + return super(FileSystemPage, self).path + + def _loadData(self): # Get info from the file-system. - page_info = self.wiki.fs.getPage(self.url) + page_info = self._page_info or self.wiki.fs.getPage(self.url) + data = self._loadFromPageInfo(page_info) + self._page_info = None + return data + + def _loadFromPageInfo(self, page_info): + data = PageData() data.path = page_info.path data.raw_text = page_info.content split = os.path.splitext(data.path) @@ -140,76 +204,93 @@ return data - def _ensureExtendedData(self): - if self._data is not None and self._data.has_extended_data: - return - - self._ensureData() - try: - r = PageResolver(self) - out = r.run() - self._data.text = out.text - self._data.ext_meta = out.meta - self._data.ext_links = out.out_links - self._data.has_extended_data = True - except CircularIncludeError as cie: - template_path = os.path.join( - os.path.dirname(__file__), - 'templates', - 'circular_include_error.html' - ) - with open(template_path, 'r') as f: - env = jinja2.Environment() - template = env.from_string(f.read()) - self._data.text = template.render({ - 'message': str(cie), - 'url_trail': cie.url_trail - }) - @staticmethod - def factory(wiki, url): - return Page(wiki, url) + def fromPageInfos(wiki, page_infos): + for p in page_infos: + yield FileSystemPage(wiki, page_info=p) class DatabasePage(Page): - """ A page that can load its properties from a - database. + """ A page that can load its properties from a database. """ - def __init__(self, wiki, url): - Page.__init__(self, wiki, url) - if getattr(wiki, 'db', None) is None: - raise Exception("The wiki doesn't have a database.") + def __init__(self, wiki, url=None, db_obj=None): + if url and db_obj: + raise Exception("You can't specify both an url and a database object.") + if not url and not db_obj: + raise Exception("You need to specify either a url or a database object.") + + super(DatabasePage, self).__init__(wiki, url or db_obj.url) self.auto_update = wiki.config.get('wiki', 'auto_update') + self._db_obj = db_obj + + @property + def path(self): + if self._db_obj: + return self._db_obj.path + return super(DatabasePage, self).path + + @property + def _id(self): + if self._db_obj: + return self._db_obj.id + self._ensureData() + return self._data._db_id - def _loadCachedData(self): - if self.wiki.db is None: - return None - db_page = self.wiki.db.getPage(self.url) - if db_page is None: - return None - if self.auto_update: + def _loadData(self): + db_obj = self._db_obj or self.wiki.db.getPage(self.url) + data = self._loadFromDbObject(db_obj) + self._db_obj = None + return data + + def _onExtendedDataLoaded(self): + self.wiki.db._cacheExtendedData(self) + + def _loadFromDbObject(self, db_obj, bypass_auto_update=False): + if not bypass_auto_update and self.auto_update: path_time = datetime.datetime.fromtimestamp( - os.path.getmtime(db_page.path)) - if path_time >= db_page.time: - return None + os.path.getmtime(db_obj.path)) + if path_time >= db_obj.time: + self.wiki.logger.debug( + "Updating database cache for page '%s'." % self.url) + fs_page = FileSystemPage(self.wiki, self.url) + fs_page._ensureData() + added_ids = self.wiki.db.update([fs_page]) + fs_page._data._db_id = added_ids[0] + return fs_page._data + data = PageData() - data.path = db_page.path + data._db_id = db_obj.id + data.path = db_obj.path split = os.path.splitext(data.path) data.filename = split[0] data.extension = split[1].lstrip('.') - data.title = db_page.title - data.raw_text = db_page.raw_text - data.formatted_text = db_page.formatted_text - data.local_meta = db_page.meta - data.local_links = db_page.links + data.title = db_obj.title + data.raw_text = db_obj.raw_text + data.formatted_text = db_obj.formatted_text + + data.local_meta = {} + for m in db_obj.meta: + value = data.local_meta.get(m.name) + if value is None: + data.local_meta[m.name] = [m.value] + else: + data.local_meta[m.name].append(m.value) + + data.local_links = [l.target_url for l in db_obj.links] + + if db_obj.is_ready: + # If we have extended cache data from the database, we might as + # well load it now too. + data.text = db_obj.ready_text + for m in db_obj.ready_meta: + value = data.ext_meta.get(m.name) + if value is None: + data.ext_meta[m.name] = [m.value] + else: + data.ext_meta[m.name].append(m.value) + data.ext_links = [l.target_url for l in db_obj.ready_links] + # Flag this data as completely loaded. + data.has_extended_data = True + return data - def _saveCachedData(self, meta): - if self.wiki.db is not None: - self.wiki.logger.debug( - "Updated database cache for page '%s'." % self.url) - self.wiki.db.update([self]) - - @staticmethod - def factory(wiki, url): - return DatabasePage(wiki, url)
--- a/wikked/resolver.py Sat May 25 22:35:23 2013 -0700 +++ b/wikked/resolver.py Tue Nov 05 08:13:18 2013 -0800 @@ -44,8 +44,8 @@ self.meta = {} self.out_links = [] if page: - self.meta = dict(page._getLocalMeta()) - self.out_links = list(page._getLocalLinks()) + self.meta = dict(page.getLocalMeta()) + self.out_links = list(page.getLocalLinks()) def add(self, other): self.out_links = list(set(self.out_links + other.out_links)) @@ -76,7 +76,7 @@ def __init__(self, page, ctx=None, parameters=None): self.page = page - self.ctx = ctx + self.ctx = ctx or ResolveContext(page) self.parameters = parameters self.output = None self.env = None @@ -98,10 +98,6 @@ return self.output def _unsafeRun(self): - # Create the context object. - if not self.ctx: - self.ctx = ResolveContext(self.page) - # Create default parameters. if not self.parameters: self.parameters = { @@ -126,7 +122,7 @@ final_text = re.sub( r'<a class="wiki-link" data-wiki-url="(?P<url>[^"]+)">', repl1, - self.page._getFormattedText()) + self.page.getFormattedText()) # Resolve queries, includes, etc. def repl2(m): @@ -247,7 +243,7 @@ 'url': p.url, 'title': p.title } - tokens.update(p._getLocalMeta()) + tokens.update(p.getLocalMeta()) item_url, item_text = self._valueOrPageText(parameters['__item'], with_url=True) text += self._renderTemplate(item_text, tokens, error_url=item_url or self.page.url) text += self._valueOrPageText(parameters['__footer']) @@ -273,7 +269,7 @@ # meta properties. meta_keys.append('+' + name) for key in meta_keys: - actual = page._getLocalMeta().get(key) + actual = page.getLocalMeta().get(key) if (actual is not None and ((type(actual) is list and value in actual) or (actual == value))): @@ -289,7 +285,7 @@ else: include_meta_keys.append('__include') for key in include_meta_keys: - i = page._getLocalMeta().get(key) + i = page.getLocalMeta().get(key) if i is not None: if (type(i) is list): include_meta_values += i
--- a/wikked/templates/index.html Sat May 25 22:35:23 2013 -0700 +++ b/wikked/templates/index.html Tue Nov 05 08:13:18 2013 -0800 @@ -3,18 +3,19 @@ <head> <title>Wikked</title> <meta name="viewport" content="width=device-width, initial-scale=1.0"> - <link rel="stylesheet/less" type="text/css" href="/css/wikked.less" /> - <script src="/js/less-1.3.1.min.js" type="text/javascript"></script> + <link rel="stylesheet" type="text/css" href="/css/wikked.min.css" /> </head> <body> <div id="app" class="container"> </div> - <script type="text/javascript"> + <script src="/js/require.js"></script> + <script src="/js/wikked.min.js"></script> + <!--script type="text/javascript"> var require = { baseUrl: "/", deps: ["/js/wikked.js{{cache_bust}}"] }; </script> - <script src="/js/require.js"></script> + <script src="/js/require.js"></script--> </body> </html>
--- a/wikked/views.py Sat May 25 22:35:23 2013 -0700 +++ b/wikked/views.py Tue Nov 05 08:13:18 2013 -0800 @@ -1,4 +1,5 @@ import time +import urllib import os.path from flask import render_template, abort, request, g, jsonify from flask.ext.login import login_user, logout_user, current_user @@ -22,7 +23,7 @@ result = [] for item in category: result.append({ - 'url': title_to_url(item), + 'url': urllib.quote_plus(item), 'name': item }) return result @@ -38,7 +39,7 @@ Page.__init__(self, wiki, url) self._text = text - def _loadCachedData(self): + def _loadData(self): extension = self.wiki.config.get('wiki', 'default_extension') data = PageData() data.path = '__preview__.' + extension @@ -87,7 +88,7 @@ def get_page_meta(page, local_only=False): if local_only: - meta = dict(page._getLocalMeta()) + meta = dict(page.getLocalMeta()) else: meta = dict(page.meta) meta['title'] = page.title @@ -198,21 +199,11 @@ @app.route('/api/query') def api_query(): query = dict(request.args) - - def makelower(i): - return i.lower() - - def filterfunc(p): - for k, v in query.iteritems(): - if not k.lower() in p.meta: - return False - if v is None: - return True - intersect = set(map(makelower, v)).intersection(map(makelower, p.meta[k])) - return len(intersect) > 0 - - pages = filter(filterfunc, g.wiki.getPages()) - result = {'pages': [get_page_meta(p) for p in pages]} + pages = g.wiki.getPages(meta_query=query) + result = { + 'query': query, + 'pages': [get_page_meta(p) for p in pages] + } return make_auth_response(result) @@ -379,7 +370,7 @@ app.logger.error(" %s" % e) continue if run_queries: - page_queries = page._getLocalMeta().get('query') + page_queries = page.getLocalMeta().get('query') if page_queries is not None: pages_with_queries.append(page)
--- a/wikked/web.py Sat May 25 22:35:23 2013 -0700 +++ b/wikked/web.py Tue Nov 05 08:13:18 2013 -0800 @@ -1,6 +1,6 @@ import os +import os.path from flask import Flask, abort, g -from wiki import Wiki, WikiParameters # Create the main app. app = Flask("wikked") @@ -21,7 +21,7 @@ app.wsgi_app = SharedDataMiddleware(app.wsgi_app, { '/': os.path.join( os.path.dirname(os.path.dirname(__file__)), - 'static'), + 'build'), '/files': os.path.join(wiki_root) }) @@ -43,19 +43,26 @@ # access to the context instance for the wiki. @app.before_request def before_request(): - wiki.db.open() g.wiki = wiki @app.teardown_request def teardown_request(exception): - wiki.db.close() + pass + + +# SQLAlchemy extension. +from flask.ext.sqlalchemy import SQLAlchemy +# TODO: get the path from the wiki parameters +app.config['SQLALCHEMY_DATABASE_URI'] = 'sqlite:///' + os.path.join(wiki_root, '.wiki', 'wiki.db') +db = SQLAlchemy(app) # Login extension. def user_loader(username): return g.wiki.auth.getUser(username) + from flask.ext.login import LoginManager login_manager = LoginManager() login_manager.init_app(app) @@ -69,6 +76,8 @@ # Create the wiki. +from wiki import Wiki, WikiParameters + def create_wiki(update_on_start=True): params = WikiParameters(root=wiki_root) params.logger = app.logger @@ -76,6 +85,7 @@ wiki.start(update_on_start) return wiki + wiki = create_wiki(bool(app.config.get('UPDATE_WIKI_ON_START')))
--- a/wikked/wiki.py Sat May 25 22:35:23 2013 -0700 +++ b/wikked/wiki.py Tue Nov 05 08:13:18 2013 -0800 @@ -5,9 +5,9 @@ import itertools import importlib from ConfigParser import SafeConfigParser -from page import Page, DatabasePage +from page import DatabasePage, FileSystemPage from fs import FileSystem -from db import SQLiteDatabase, conn_scope +from db import SQLDatabase from scm import MercurialCommandServerSourceControl from indexer import WhooshWikiIndex from auth import UserManager @@ -37,15 +37,12 @@ self.config_path = os.path.join(self.root, '.wikirc') self.index_path = os.path.join(self.root, '.wiki', 'index') - self.db_path = os.path.join(self.root, '.wiki', 'wiki.db') - - self.use_db = True - self.page_factory = DatabasePage.factory + self.db_path = 'sqlite://' + os.path.join(self.root, '.wiki', 'wiki.db') def logger_factory(self): if getattr(self, 'logger', None): return self.logger - return logging.getLogger('wikked.wiki') + return logging.getLogger(__name__) def config_factory(self): return open(self.config_path) @@ -57,7 +54,7 @@ return WhooshWikiIndex(self.index_path, logger=self.logger_factory()) def db_factory(self, config): - return SQLiteDatabase(self.db_path, logger=self.logger_factory()) + return SQLDatabase(self.db_path, logger=self.logger_factory()) def scm_factory(self, config): scm_type = config.get('wiki', 'scm') @@ -104,8 +101,6 @@ self.config = self._loadConfig(parameters) self.formatters = parameters.formatters - self.use_db = parameters.use_db - self.page_factory = DatabasePage.factory self.fs = parameters.fs_factory(self.config) self.index = parameters.index_factory(self.config) @@ -127,49 +122,31 @@ self.db.initDb() if update: - with conn_scope(self.db): - self.db.update(self.getPages(from_db=False, factory=Page.factory)) - self.index.update(self.getPages()) + page_infos = self.fs.getPageInfos() + fs_pages = FileSystemPage.fromPageInfos(self, page_infos) + self.db.update(fs_pages) + self.index.update(self.getPages()) def stop(self): self.db.close() - def getPageUrls(self, subdir=None, from_db=None): + def getPageUrls(self, subdir=None): """ Returns all the page URLs in the wiki, or in the given sub-directory. - By default, it queries the DB, but it can query the file-system - directly if `from_db` is `False`. """ - if from_db is None: - from_db = self.use_db - if from_db: - for url in self.db.getPageUrls(subdir): - yield url - else: - for info in self.fs.getPageInfos(subdir): - yield info.url + for url in self.db.getPageUrls(subdir): + yield url - def getPages(self, subdir=None, from_db=None, factory=None): + def getPages(self, subdir=None, meta_query=None): """ Gets all the pages in the wiki, or in the given sub-directory. - By default it will use the DB to fetch the list of pages, but it - can scan the file-system directly if `from_db` is `False`. If - that's the case, it's probably a good idea to provide a custom - `factory` for creating `Page` instances, since by default it will - use `DatabasePage` which also uses the DB to load its information. """ - if from_db is None: - from_db = self.use_db - if factory is None: - factory = self.page_factory - for url in self.getPageUrls(subdir, from_db): - yield factory(self, url) + for page in self.db.getPages(subdir, meta_query): + yield DatabasePage(self, db_obj=page) - def getPage(self, url, factory=None): + def getPage(self, url): """ Gets the page for a given URL. """ - if factory is None: - factory = self.page_factory - return factory(self, url) + return DatabasePage(self, url) def setPage(self, url, page_fields): """ Updates or creates a page for a given URL. @@ -186,18 +163,18 @@ "No commit message specified for editing page '%s'." % url) # Save the new/modified text. - path = self.fs.getPhysicalPagePath(url) - self.fs.setPage(path, page_fields['text']) + page_info = self.fs.setPage(url, page_fields['text']) # Commit the file to the source-control. commit_meta = { 'author': page_fields['author'], 'message': page_fields['message'] } - self.scm.commit([path], commit_meta) + self.scm.commit([page_info.path], commit_meta) # Update the DB and index with the new/modified page. - self.db.update([self.getPage(url, factory=Page.factory)]) + fs_page = FileSystemPage(self, page_info=page_info) + self.db.update([fs_page]) self.index.update([self.getPage(url)]) def revertPage(self, url, page_fields): @@ -219,7 +196,7 @@ rev_text = self.scm.getRevision(path, page_fields['rev']) # Write to the file and commit. - self.fs.setPage(path, rev_text) + page_info = self.fs.setPage(url, rev_text) # Commit to source-control. commit_meta = { @@ -229,19 +206,14 @@ self.scm.commit([path], commit_meta) # Update the DB and index with the modified page. - self.db.update([self.getPage(url, factory=Page.factory)]) + fs_page = FileSystemPage(self, page_info=page_info) + self.db.update([fs_page]) self.index.update([self.getPage(url)]) - def pageExists(self, url, from_db=None): + def pageExists(self, url): """ Returns whether a page exists at the given URL. - By default it will query the DB, but it can query the underlying - file-system directly if `from_db` is `False`. """ - if from_db is None: - from_db = self.use_db - if from_db: - return self.db.pageExists(url) - return self.fs.pageExists(url) + return self.db.pageExists(url) def getHistory(self): """ Shorthand method to get the history from the source-control.