Mercurial > wikked
changeset 168:a71822a4beed
Refactoring to not better use Flask, only when needed:
- command-line utility is using plain `argparse`.
- no Flask app created unless we want to run a server.
- split all commands into sub-modules.
- do more initialization stuff in the `WikiParameters`.
- make `Wiki` init as cheap as possible.
- one `Wiki` instance now created per request.
author | Ludovic Chabant <ludovic@chabant.com> |
---|---|
date | Wed, 22 Jan 2014 21:39:02 -0800 |
parents | adc70e861804 |
children | f1003615a261 |
files | wikked/commands/__init__.py wikked/commands/base.py wikked/commands/manage.py wikked/commands/query.py wikked/commands/users.py wikked/commands/web.py wikked/db/sql.py wikked/fs.py wikked/scm/mercurial.py wikked/web.py wikked/wiki.py wikked/witch.py wk.py |
diffstat | 12 files changed, 515 insertions(+), 226 deletions(-) [+] |
line wrap: on
line diff
--- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/wikked/commands/base.py Wed Jan 22 21:39:02 2014 -0800 @@ -0,0 +1,39 @@ + + +command_classes = [] + + +def register_command(cls): + command_classes.append(cls) + return cls + + +class WikkedCommand(object): + def __init__(self): + self.name = None + self.description = None + self.requires_wiki = True + + def setupParser(self, parser): + raise NotImplementedError() + + def run(self, ctx): + raise NotImplementedError() + + def _doRun(self, ctx): + if ctx.wiki is None and self.requires_wiki: + raise Exception("No wiki found here.") + result = self.run(ctx) + if result is None: + result = 0 + return result + + +# Import the commands. +# (this creates a PyLint warning but it's OK) +# pylint: disable=unused-import +import wikked.commands.manage +import wikked.commands.query +import wikked.commands.users +import wikked.commands.web +
--- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/wikked/commands/manage.py Wed Jan 22 21:39:02 2014 -0800 @@ -0,0 +1,64 @@ +import logging +from wikked.commands.base import WikkedCommand, register_command + + +logger = logging.getLogger(__name__) + + +@register_command +class ResetCommand(WikkedCommand): + def __init__(self): + super(ResetCommand, self).__init__() + self.name = 'reset' + self.description = ("Re-generates the database and the full-text " + "search index.") + + def setupParser(self, parser): + parser.add_argument('--cache', + help="Re-cache all pages", + action='store_true') + parser.add_argument('--indexonly', + help="Only update the full-text search index", + action='store_true') + + def run(self, ctx): + if ctx.args.indexonly: + ctx.wiki.index.reset(ctx.wiki.getPages()) + else: + ctx.wiki.reset(cache_ext_data=ctx.args.cache) + + +@register_command +class UpdateCommand(WikkedCommand): + def __init__(self): + super(UpdateCommand, self).__init__() + self.name = 'update' + self.description = ("Updates the database and the full-text-search " + "index with any changed/new files.") + + def setupParser(self, parser): + parser.add_argument('url', + help="The URL of a page to update specifically", + nargs='?') + parser.add_argument('--cache', + help="Re-cache all pages", + action='store_true') + + def run(self, ctx): + ctx.wiki.update(ctx.args.url, cache_ext_data=ctx.args.cache) + + +@register_command +class CacheCommand(WikkedCommand): + def __init__(self): + super(CacheCommand, self).__init__() + self.name = 'cache' + self.description = ("Makes sure the extended cache is valid for the " + "whole wiki.") + + def setupParser(self, parser): + pass + + def run(self, ctx): + ctx.wiki._cachePages() +
--- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/wikked/commands/query.py Wed Jan 22 21:39:02 2014 -0800 @@ -0,0 +1,109 @@ +import logging +from wikked.commands.base import WikkedCommand, register_command + + +logger = logging.getLogger(__name__) + + +@register_command +class ListCommand(WikkedCommand): + def __init__(self): + super(ListCommand, self).__init__() + self.name = 'list' + self.description = "Lists page names in the wiki." + + def setupParser(self, parser): + parser.add_argument('--fs', + help="Lists pages by scanning the file-system directly", + action='store_true') + + def run(self, ctx): + if ctx.args.fs: + for pi in ctx.wiki.fs.getPageInfos(): + logger.info(pi.url) + else: + for url in ctx.wiki.db.getPageUrls(): + logger.info(url) + + +@register_command +class GetCommand(WikkedCommand): + def __init__(self): + super(GetCommand, self).__init__() + self.name = 'get' + self.description = "Gets a page that matches the given URL." + + def setupParser(self, parser): + parser.add_argument('url', + help="The URL of the page to get", + nargs=1) + parser.add_argument('--resolve', + help="Re-resolve the page's content", + action='store_true') + parser.add_argument('--rev', + help="The revision to get", + nargs=1) + + def run(self, ctx): + page = ctx.wiki.getPage(ctx.args.url) + if ctx.args.force_resolve: + page._force_resolve = True + if ctx.args.rev is not None: + logger.info(page.getRevision(ctx.args.rev)) + return + logger.info(page.text) + + +@register_command +class SearchCommand(WikkedCommand): + def __init__(self): + super(SearchCommand, self).__init__() + self.name = 'search' + self.description = "Searches the wiki." + + def setupParser(self, parser): + parser.add_argument('query', + help="The search query", + nargs='+') + + def run(self, ctx): + query = ' '.join(ctx.args.query) + hits = ctx.wiki.index.search(query) + logger.info(hits) + + +@register_command +class LinksFromCommand(WikkedCommand): + def __init__(self): + super(LinksFromCommand, self).__init__() + self.name = 'linksfrom' + self.description = "Gets the links going out from a given page." + + def setupParser(self, parser): + parser.add_argument('url', + help="The page from which the links come from", + nargs=1) + + def run(self, ctx): + page = ctx.wiki.getPage(ctx.args.url) + for l in page.links: + logger.info(l) + + +@register_command +class LinksToCommand(WikkedCommand): + def __init__(self): + super(LinksToCommand, self).__init__() + self.name = 'linksto' + self.description = "Gets the links going to a given page." + + def setupParser(self, parser): + parser.add_argument('url', + help="The page to which the links go to", + nargs=1) + + def run(self, ctx): + page = ctx.wiki.getPage(ctx.args.url) + for l in page.getIncomingLinks(): + logger.info(l) +
--- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/wikked/commands/users.py Wed Jan 22 21:39:02 2014 -0800 @@ -0,0 +1,44 @@ +import logging +from flask.ext.script import prompt_pass +from flask.ext.bcrypt import generate_password_hash +from wikked.commands.base import WikkedCommand, register_command + + +logger = logging.getLogger(__name__) + + +@register_command +class UsersCommand(WikkedCommand): + def __init__(self): + super(UsersCommand, self).__init__() + self.name = 'users' + self.description = "Lists users of this wiki." + + def setupParser(self, parser): + pass + + def run(self, ctx): + logger.info("Users:") + for user in ctx.wiki.auth.getUsers(): + logger.info(" - " + user.username) + + +@register_command +class NewUserCommand(WikkedCommand): + def __init__(self): + super(NewUserCommand, self).__init__() + self.name = 'newuser' + self.description = ("Generates the entry for a new user so you can " + "copy/paste it in your `.wikirc`.") + + def setupParser(self, parser): + parser.add_argument('username', nargs=1) + parser.add_argument('password', nargs='?') + + def run(self, ctx): + username = ctx.args.username + password = ctx.args.password or prompt_pass('Password: ') + password = generate_password_hash(password) + logger.info("[users]") + logger.info("%s = %s" % (username, password)) +
--- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/wikked/commands/web.py Wed Jan 22 21:39:02 2014 -0800 @@ -0,0 +1,41 @@ +import logging +from wikked.commands.base import WikkedCommand, register_command + + +logger = logging.getLogger(__name__) + + +@register_command +class RunServerCommand(WikkedCommand): + def __init__(self): + super(RunServerCommand, self).__init__() + self.name = 'runserver' + self.description = ("Runs the wiki in a local web server.") + + def setupParser(self, parser): + parser.add_argument('--host', + help="The host to use", + default='127.0.0.1') + parser.add_argument('--port', + help="The port to use", + default=5000) + parser.add_argument('--production', + help="Don't enable the debugger or reloader", + action='store_true') + + def run(self, ctx): + from wikked.web import app + + if bool(app.config.get('UPDATE_WIKI_ON_START')): + ctx.wiki.update() + + use_dbg_and_rl = not ctx.args.production + + app.wiki_params = ctx.params + app.run( + host=ctx.args.host, + port=ctx.args.port, + debug=app.config.get('DEBUG', True), + use_debugger=use_dbg_and_rl, + use_reloader=use_dbg_and_rl) +
--- a/wikked/db/sql.py Sun Jan 12 01:22:20 2014 -0800 +++ b/wikked/db/sql.py Wed Jan 22 21:39:02 2014 -0800 @@ -34,11 +34,11 @@ title = Column(Text) raw_text = Column(Text) formatted_text = Column(Text) - - meta = relationship('SQLMeta', order_by='SQLMeta.id', + + meta = relationship('SQLMeta', order_by='SQLMeta.id', backref=backref('page'), cascade='all, delete, delete-orphan') - links = relationship('SQLLink', order_by='SQLLink.id', + links = relationship('SQLLink', order_by='SQLLink.id', backref=backref('source'), cascade='all, delete, delete-orphan') @@ -48,7 +48,7 @@ ready_meta = relationship('SQLReadyMeta', order_by='SQLReadyMeta.id', backref=backref('page'), cascade='all, delete, delete-orphan') - ready_links = relationship('SQLReadyLink', order_by='SQLReadyLink.id', + ready_links = relationship('SQLReadyLink', order_by='SQLReadyLink.id', backref=backref('source'), cascade='all, delete, delete-orphan') @@ -92,7 +92,7 @@ class SQLReadyLink(Base): __tablename__ = 'ready_links' - + id = Column(Integer, primary_key=True) source_id = Column(Integer, ForeignKey('pages.id')) target_url = Column(Text) @@ -116,16 +116,18 @@ """ schema_version = 3 - def __init__(self): + def __init__(self, config): Database.__init__(self) self.engine = None + self.session = None + self.engine_url = config.get('wiki', 'database_url') + self.auto_update = config.getboolean('wiki', 'auto_update') def initDb(self, wiki): self.wiki = wiki - engine_url = wiki.config.get('wiki', 'database_url') - logger.info("Using database from URL: %s" % engine_url) - self.engine = create_engine(engine_url, convert_unicode=True) + logger.info("Using database from URL: %s" % self.engine_url) + self.engine = create_engine(self.engine_url, convert_unicode=True) self.session = scoped_session(sessionmaker( autocommit=False, autoflush=False, @@ -134,7 +136,7 @@ Base.query = self.session.query_property() create_schema = False - if engine_url != 'sqlite:///:memory:': + if self.engine_url != 'sqlite:///:memory:': # The existing schema is outdated, re-create it. schema_version = self._getSchemaVersion() if schema_version < self.schema_version: @@ -212,7 +214,7 @@ all() for p in db_pages: p.is_ready = False - + self.session.commit() logger.debug("...done updating SQL database.") @@ -239,7 +241,7 @@ subdir = string.rstrip(subdir, '/') + '/%' q = q.filter(SQLPage.url.like(subdir)) for p in q.all(): - yield SQLDatabasePage(self.wiki, db_obj=p) + yield SQLDatabasePage(self, db_obj=p) def pageExists(self, url=None, path=None): # TODO: replace with an `EXIST` query. @@ -258,21 +260,21 @@ filter(SQLPage.is_ready == False).\ all() for p in q: - yield SQLDatabasePage(self.wiki, db_obj=p) + yield SQLDatabasePage(self, db_obj=p) def _getPageByUrl(self, url): q = self.session.query(SQLPage).filter_by(url=url) page = q.first() if page is None: return None - return SQLDatabasePage(self.wiki, db_obj=page) + return SQLDatabasePage(self, db_obj=page) def _getPageByPath(self, path): q = self.session.query(SQLPage).filter_by(path=path) page = q.first() if page is None: return None - return SQLDatabasePage(self.wiki, db_obj=page) + return SQLDatabasePage(self, db_obj=page) def _createSchema(self): Base.metadata.drop_all(self.engine) @@ -361,14 +363,13 @@ class SQLDatabasePage(Page): """ A page that can load its properties from a database. """ - def __init__(self, wiki, url=None, db_obj=None): + def __init__(self, db, url=None, db_obj=None): if url and db_obj: raise Exception("You can't specify both an url and a database object.") if not url and not db_obj: raise Exception("You need to specify either a url or a database object.") - super(SQLDatabasePage, self).__init__(wiki, url or db_obj.url) - self.auto_update = wiki.config.getboolean('wiki', 'auto_update') + super(SQLDatabasePage, self).__init__(db.wiki, url or db_obj.url) self._db_obj = db_obj @property @@ -397,7 +398,7 @@ self.wiki.db._cacheExtendedData(self) def _loadFromDbObject(self, db_obj, bypass_auto_update=False): - if not bypass_auto_update and self.auto_update: + if not bypass_auto_update and self.wiki.db.auto_update: path_time = datetime.datetime.fromtimestamp( os.path.getmtime(db_obj.path)) if path_time >= db_obj.time:
--- a/wikked/fs.py Sun Jan 12 01:22:20 2014 -0800 +++ b/wikked/fs.py Wed Jan 22 21:39:02 2014 -0800 @@ -34,12 +34,12 @@ file-system paths, and for scanning the file-system to list existing pages. """ - def __init__(self, root): + def __init__(self, root, config): self.root = unicode(root) self.excluded = None self.page_extensions = None - self.default_extension = '.txt' + self.default_extension = config.get('wiki', 'default_extension') def initFs(self, wiki): self.page_extensions = list(set( @@ -50,13 +50,12 @@ excluded += wiki.scm.getSpecialFilenames() self.excluded = [os.path.join(self.root, e) for e in excluded] - self.default_extension = wiki.config.get('wiki', 'default_extension') - def getPageInfos(self, subdir=None): basepath = self.root if subdir is not None: basepath = self.getPhysicalNamespacePath(subdir) + logger.debug("Scanning for pages in: %s" % basepath) for dirpath, dirnames, filenames in os.walk(basepath): incl_dirnames = [] for d in dirnames: @@ -74,6 +73,7 @@ yield page_info def getPageInfo(self, path): + logger.debug("Reading page info from: %s" % path) if not isinstance(path, unicode): path = unicode(path) for e in self.excluded: @@ -82,6 +82,7 @@ return self._getPageInfo(path) def getPage(self, url): + logger.debug("Searching for page: %s" % url) path = self.getPhysicalPagePath(url) return PageInfo(url, path) @@ -93,6 +94,7 @@ return PageInfo(url, path) def pageExists(self, url): + logger.debug("Searching for page: %s" % url) try: self.getPhysicalPagePath(url) return True
--- a/wikked/scm/mercurial.py Sun Jan 12 01:22:20 2014 -0800 +++ b/wikked/scm/mercurial.py Wed Jan 22 21:39:02 2014 -0800 @@ -27,6 +27,9 @@ } def initRepo(self, wiki): + pass + + def createRepo(self): # Make a Mercurial repo if there's none. if not os.path.isdir(os.path.join(self.root, '.hg')): logger.info("Creating Mercurial repository at: " + self.root)
--- a/wikked/web.py Sun Jan 12 01:22:20 2014 -0800 +++ b/wikked/web.py Wed Jan 22 21:39:02 2014 -0800 @@ -2,7 +2,8 @@ import os.path import logging from flask import Flask, abort, g -from utils import find_wiki_root +from wikked.wiki import Wiki + # Create the main app. static_folder = os.path.join(os.path.dirname(__file__), 'static') @@ -26,6 +27,7 @@ # config file in there. wiki_root = app.config['WIKI_ROOT'] if wiki_root is None: + from wikked.utils import find_wiki_root wiki_root = find_wiki_root() if wiki_root is None: raise Exception("Can't find the wiki root to use.") @@ -60,6 +62,8 @@ # access to the context instance for the wiki. @app.before_request def before_request(): + wiki = Wiki(app.wiki_params) + wiki.start() g.wiki = wiki @@ -69,10 +73,11 @@ # SQLAlchemy. +# TODO: this totally assumes things about the wiki's DB API. @app.teardown_appcontext def shutdown_session(exception=None): wiki = getattr(g, 'wiki', None) - if wiki: + if wiki and wiki.db.session is not None: if app.config['SQL_COMMIT_ON_TEARDOWN'] and exception is None: wiki.db.session.commit() wiki.db.session.remove() @@ -111,26 +116,13 @@ app.bcrypt = SHA512Fallback() -# Create the wiki. -from wiki import Wiki, WikiParameters - -def create_wiki(update_on_start=True): - params = WikiParameters(root=wiki_root) - wiki = Wiki(params) - wiki.start(update_on_start) - return wiki - - -wiki = create_wiki(bool(app.config.get('UPDATE_WIKI_ON_START'))) - - # Import the views. # (this creates a PyLint warning but it's OK) # pylint: disable=unused-import -import views.error -import views.read -import views.edit -import views.history -import views.special -import views.admin +import wikked.views.error +import wikked.views.read +import wikked.views.edit +import wikked.views.history +import wikked.views.special +import wikked.views.admin
--- a/wikked/wiki.py Sun Jan 12 01:22:20 2014 -0800 +++ b/wikked/wiki.py Wed Jan 22 21:39:02 2014 -0800 @@ -31,48 +31,72 @@ if root is None: root = os.getcwd() self.root = root - self.formatters = self.getFormatters() + self._config = None + self._index_factory = None + self._scm_factory = None + self._page_updater = None - def fs_factory(self, config): - return FileSystem(self.root) + @property + def config(self): + if self._config is None: + self._loadConfig() + return self._config + + def fs_factory(self): + return FileSystem(self.root, self.config) - def index_factory(self, config): - index_type = config.get('wiki', 'indexer') - if index_type == 'whoosh': - from wikked.indexer.whooshidx import WhooshWikiIndex - return WhooshWikiIndex() - elif index_type == 'elastic': - from wikked.indexer.elastic import ElasticWikiIndex - return ElasticWikiIndex() - else: - raise InitializationError("No such indexer: " + index_type) - - def db_factory(self, config): - from wikked.db.sql import SQLDatabase - return SQLDatabase() + def index_factory(self): + if self._index_factory is None: + index_type = self.config.get('wiki', 'indexer') + if index_type == 'whoosh': + def impl(): + from wikked.indexer.whooshidx import WhooshWikiIndex + return WhooshWikiIndex() + self._index_factory = impl + elif index_type == 'elastic': + def impl(): + from wikked.indexer.elastic import ElasticWikiIndex + return ElasticWikiIndex() + self._index_factory = impl + else: + raise InitializationError("No such indexer: " + index_type) + return self._index_factory() - def scm_factory(self, config): - try: - scm_type = config.get('wiki', 'sourcecontrol') - except NoOptionError: - # Auto-detect - if os.path.isdir(os.path.join(self.root, '.hg')): - scm_type = 'hg' - elif os.path.isdir(os.path.join(self.root, '.git')): - scm_type = 'git' + def db_factory(self): + from wikked.db.sql import SQLDatabase + return SQLDatabase(self.config) + + def scm_factory(self): + if self._scm_factory is None: + try: + scm_type = self.config.get('wiki', 'sourcecontrol') + except NoOptionError: + # Auto-detect + if os.path.isdir(os.path.join(self.root, '.hg')): + scm_type = 'hg' + elif os.path.isdir(os.path.join(self.root, '.git')): + scm_type = 'git' + else: + # Default to Mercurial. Yes. I just decided that myself. + scm_type = 'hg' + + if scm_type == 'hg': + def impl(): + from wikked.scm.mercurial import MercurialCommandServerSourceControl + return MercurialCommandServerSourceControl(self.root) + self._scm_factory = impl + elif scm_type == 'git': + def impl(): + from wikked.scm.git import GitLibSourceControl + return GitLibSourceControl(self.root) + self._scm_factory = impl else: - # Default to Mercurial. Yes. I just decided that myself. - scm_type = 'hg' + raise InitializationError("No such source control: " + scm_type) + return self._scm_factory() - if scm_type == 'hg': - from wikked.scm.mercurial import MercurialCommandServerSourceControl - return MercurialCommandServerSourceControl(self.root) - elif scm_type == 'git': - from wikked.scm.git import GitLibSourceControl - return GitLibSourceControl(self.root) - else: - raise InitializationError("No such source control: " + scm_type) + def auth_factory(self): + return UserManager(self.config) def getFormatters(self): formatters = {passthrough_formatter: ['txt', 'html']} @@ -81,6 +105,24 @@ self.tryAddFormatter(formatters, 'creole', 'creole2html', ['cr', 'creole']) return formatters + def getSpecialFilenames(self): + yield '.wikirc' + yield '.wiki' + if self.config.has_section('ignore'): + for name, val in self.config.items('ignore'): + yield val + + def getPageUpdater(self): + if self._page_updater is None: + if self.config.getboolean('wiki', 'async_updates'): + logger.debug("Setting up asynchronous updater.") + from tasks import update_wiki + self._page_updater = lambda url: update_wiki.delay(self.root) + else: + logger.debug("Setting up simple updater.") + self._page_updater = lambda url: self.update(url, cache_ext_data=False) + return self._page_updater + def tryAddFormatter(self, formatters, module_name, module_func, extensions): try: module = importlib.import_module(module_name) @@ -89,6 +131,20 @@ except ImportError: pass + def _loadConfig(self): + # Merge the default settings with any settings provided by + # the local config file(s). + config_path = os.path.join(self.root, '.wikirc') + local_config_path = os.path.join(self.root, '.wiki', 'wikirc') + default_config_path = os.path.join( + os.path.dirname(__file__), 'resources', 'defaults.cfg') + + config = SafeConfigParser() + config.readfp(open(default_config_path)) + config.set('wiki', 'root', self.root) + config.read([config_path, local_config_path]) + self._config = config + class Wiki(object): """ The wiki class! This is where the magic happens. @@ -104,34 +160,25 @@ logger.debug("Initializing wiki.") + self.formatters = parameters.formatters + self.special_filenames = parameters.getSpecialFilenames() - self.parameters = parameters - self.config = self._loadConfig(parameters) - self.main_page_url = '/' + self.config.get('wiki', 'main_page').strip('/') - self.templates_url = '/' + self.config.get('wiki', 'templates_dir').strip('/') + '/' - - self.formatters = parameters.formatters + self.main_page_url = '/' + parameters.config.get('wiki', 'main_page').strip('/') + self.templates_url = '/' + parameters.config.get('wiki', 'templates_dir').strip('/') + '/' - self.fs = parameters.fs_factory(self.config) - self.index = parameters.index_factory(self.config) - self.db = parameters.db_factory(self.config) - self.scm = parameters.scm_factory(self.config) - - self.auth = UserManager(self.config) + self.fs = parameters.fs_factory() + self.index = parameters.index_factory() + self.db = parameters.db_factory() + self.scm = parameters.scm_factory() + self.auth = parameters.auth_factory() - if self.config.getboolean('wiki', 'async_updates'): - logger.debug("Setting up asynchronous updater.") - from tasks import update_wiki - self._updateSetPage = lambda url: update_wiki.delay(self.root) - else: - logger.debug("Setting up simple updater.") - self._updateSetPage = lambda url: self.update(url, cache_ext_data=False) + self._updateSetPage = parameters.getPageUpdater() @property def root(self): return self.fs.root - def start(self, update=True): + def start(self, update=False): """ Properly initializes the wiki and all its sub-systems. """ self.fs.initFs(self) @@ -262,11 +309,7 @@ return self.scm.getHistory(limit=limit) def getSpecialFilenames(self): - yield '.wikirc' - yield '.wiki' - if self.config.has_section('ignore'): - for name, val in self.config.items('ignore'): - yield val + return self.special_filenames def _cachePages(self, only_urls=None): logger.debug("Caching extended page data...") @@ -278,20 +321,6 @@ for page in self.db.getUncachedPages(): page._ensureExtendedData() - def _loadConfig(self, parameters): - # Merge the default settings with any settings provided by - # the parameters. - config_path = os.path.join(parameters.root, '.wikirc') - local_config_path = os.path.join(parameters.root, '.wiki', 'wikirc') - default_config_path = os.path.join( - os.path.dirname(__file__), 'resources', 'defaults.cfg') - - config = SafeConfigParser() - config.readfp(open(default_config_path)) - config.set('wiki', 'root', parameters.root) - config.read([config_path, local_config_path]) - return config - def reloader_stat_loop(wiki, interval=1): mtimes = {}
--- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/wikked/witch.py Wed Jan 22 21:39:02 2014 -0800 @@ -0,0 +1,67 @@ +import logging +import argparse +from wikked.commands.base import command_classes +from wikked.utils import find_wiki_root +from wikked.wiki import Wiki, WikiParameters + + +logger = logging.getLogger(__name__) + + +class WitchContext(object): + def __init__(self, params, wiki, args): + self.params = params + self.wiki = wiki + self.args = args + + +def main(): + # Setup the parser. + parser = argparse.ArgumentParser( + description="Wikked command line utility") + parser.add_argument('--root', + help="Use the specified root directory instead of the current one") + parser.add_argument('--debug', + help="Show debug information", + action='store_true') + parser.add_argument('--quiet', + help="Print only important information.", + action='store_true') + parser.add_argument('--log', + help="Send log messages to the specified file.") + + # Setup the command parsers. + subparsers = parser.add_subparsers() + commands = map(lambda cls: cls(), command_classes) + logger.debug("Got %d commands." % len(commands)) + for c in commands: + cp = subparsers.add_parser(c.name, help=c.description) + c.setupParser(cp) + cp.set_defaults(func=c._doRun) + + # Parse! + result = parser.parse_args() + + # Setup logging. + root_logger = logging.getLogger() + if result.debug and result.quiet: + raise Exception("You can't specify both --debug and --quiet.") + if result.quiet: + root_logger.setLevel(logging.WARNING) + elif result.debug: + root_logger.setLevel(logging.DEBUG) + if result.log: + from logging.handlers import FileHandler + root_logger.addHandler(FileHandler(result.log)) + + # Create the wiki. + root = find_wiki_root(result.root) + params = WikiParameters(root) + wiki = Wiki(params) + wiki.start() + + # Run the command! + ctx = WitchContext(params, wiki, result) + exit_code = result.func(ctx) + return exit_code +
--- a/wk.py Sun Jan 12 01:22:20 2014 -0800 +++ b/wk.py Wed Jan 22 21:39:02 2014 -0800 @@ -1,115 +1,13 @@ #!/usr/local/bin/python - -# Configure logging. import logging -logging.basicConfig(level=logging.DEBUG) - -# Configure a simpler log format. -from wikked import settings -settings.LOG_FORMAT = "[%(levelname)s]: %(message)s" -settings.UPDATE_WIKI_ON_START = False - -# Create the app and the wiki. -from wikked.web import app, wiki - -# Create the manager. -from flask.ext.script import Manager, prompt, prompt_pass -manager = Manager(app) - - -@manager.command -def users(): - """Lists users of this wiki.""" - print "Users:" - for user in wiki.auth.getUsers(): - print " - " + user.username - print "" - - -@manager.command -def user(username=None, password=None): - """Generates the entry for a new user so you can - copy/paste it in your `.wikirc`. - """ - username = username or prompt('Username: ') - password = password or prompt_pass('Password: ') - password = app.bcrypt.generate_password_hash(password) - print "[users]" - print "%s = %s" % (username, password) - - -@manager.command -def reset(cache=False, index_only=False): - """ Re-generates the database and the full-text-search index. - """ - if index_only: - wiki.index.reset(wiki.getPages()) - else: - wiki.reset(cache_ext_data=cache) +from wikked.witch import main -@manager.command -def update(url=None, cache=False): - """ Updates the database and the full-text-search index with any - changed/new files. - """ - wiki.update(url, cache_ext_data=cache) - - -@manager.command -def cache(): - """ Makes sure the extended cache is valid for the whole wiki. - """ - wiki._cachePages() - - -@manager.command -def list(fs=False): - """ Lists page names in the wiki. - """ - if fs: - for pi in wiki.fs.getPageInfos(): - print pi.url - else: - for url in wiki.db.getPageUrls(): - print url - - -@manager.command -def get(url, force_resolve=False, rev=None): - """ Gets a page that matches the given URL. - """ - page = wiki.getPage(url) - if force_resolve: - page._force_resolve = True - if rev is not None: - print page.getRevision(rev) - return - print page.text - - -@manager.command -def search(query): - """ Searches the wiki. - """ - hits = wiki.index.search(query) - print hits - - -@manager.command -def linksfrom(url): - page = wiki.getPage(url) - for l in page.links: - print l - - -@manager.command -def linksto(url): - page = wiki.getPage(url) - for l in page.getIncomingLinks(): - print l +# Configure logging. +logging.basicConfig(level=logging.DEBUG, + format="[%(levelname)s]: %(message)s") if __name__ == "__main__": - manager.run() + main()