changeset 4825:8db07c73e972

Groups2009: The datastruct.dicts module is refactored to be similar to the groups module. The events.wikidictsrescan was removed.
author Dmitrijs Milajevs <dimazest@gmail.com>
date Fri, 26 Jun 2009 20:06:12 +0200
parents e5ef9d9f9eaa
children 9bee7b3e9235
files MoinMoin/_tests/wikiconfig_groups.py MoinMoin/datastruct/dicts/backends/__init__.py MoinMoin/datastruct/dicts/backends/wiki_dicts.py MoinMoin/events/wikidictsrescan.py
diffstat 4 files changed, 118 insertions(+), 178 deletions(-) [+]
line wrap: on
line diff
--- a/MoinMoin/_tests/wikiconfig_groups.py	Fri Jun 26 18:13:35 2009 +0200
+++ b/MoinMoin/_tests/wikiconfig_groups.py	Fri Jun 26 20:06:12 2009 +0200
@@ -12,8 +12,5 @@
 
 class Config(LocalConfig):
     group_manager_init = lambda self, request: WikiGroups(request)
+    dict_manager_init = lambda self, request: WikiDicts(request)
 
-    def dict_manager_init(self, request):
-        dicts = WikiDicts(request)
-        dicts.load_dicts()
-        return dicts
--- a/MoinMoin/datastruct/dicts/backends/__init__.py	Fri Jun 26 18:13:35 2009 +0200
+++ b/MoinMoin/datastruct/dicts/backends/__init__.py	Fri Jun 26 20:06:12 2009 +0200
@@ -0,0 +1,74 @@
+# -*- coding: iso-8859-1 -*-
+"""
+    MoinMoin - MoinMoin.datastruct.dicts.backends.
+
+    @copyright: 2009 by MoinMoin:DmitrijsMilajevs
+    @license: GNU GPL, see COPYING for details.
+"""
+
+class DictDoesNotExistError(Exception):
+    """
+    Raised when a dict name is not found in the backend.
+    """
+
+class BaseDict(object):
+    def __init__(self, request, name, backend):
+        """
+        Initialize a dict.
+
+        @param request
+        @param name: moin dict name
+        @backend: backend object which created this object
+
+        """
+        self.request = request
+        self.name = name
+        self._backend = backend
+        self._dict = self._load_dict()
+
+    def __iter__(self):
+        return self._dict.__iter__()
+
+    def __len__(self):
+        return self._dict.__len__()
+
+    def __getitem__(self, *kargs):
+        return self._dict.__getitem__(*kargs)
+
+    def get(self, *kargs):
+        return self._dict.get(*kargs)
+
+    def _load_dict(self):
+        """
+        Retrieve dict data from the backend.
+        """
+        return self._backend._retrieve_members(self.name)
+
+    def __repr__(self):
+        return "<%r name=%r items=%r>" % (self.__class__, self.name, self._dict.items())
+
+
+class BaseDictBackend(object):
+
+    def __init__(self, request):
+        self.request = request
+        self.page_dict_regex = request.cfg.cache.page_dict_regexact
+
+    def is_dict_name(self, name):
+        return self.page_dict_regex.match(name)
+
+    def __contains__(self, dict_name):
+        """
+        Check if a dict called <dict_name> is available in this backend.
+        """
+        raise NotImplementedError()
+
+    def __getitem__(self, dict_name):
+        """
+        Get a dict by its moin dict name.
+        """
+        raise NotImplementedError()
+
+    def _retrieve_members(self, dict_name):
+        raise NotImplementedError()
+
--- a/MoinMoin/datastruct/dicts/backends/wiki_dicts.py	Fri Jun 26 18:13:35 2009 +0200
+++ b/MoinMoin/datastruct/dicts/backends/wiki_dicts.py	Fri Jun 26 20:06:12 2009 +0200
@@ -7,16 +7,14 @@
                 2009 MoinMoin:DmitrijsMilajevs
     @license: GNU GPL, see COPYING for details.
 """
-import re, time
+import re
 
-from MoinMoin import caching, Page
-
-# Version of the internal data structure which is pickled.
-# Please increment if you have changed the structure.
-DICTS_PICKLE_VERSION = 7
+from MoinMoin import caching, wikiutil
+from MoinMoin.Page import Page
+from MoinMoin.datastruct.dicts.backends import BaseDict, BaseDictBackend, DictDoesNotExistError
 
 
-class WikiDict(dict):
+class WikiDict(BaseDict):
     """ Mapping of keys to values in a wiki page.
 
        How a Dict definition page should look like:
@@ -29,31 +27,36 @@
         keyn:: ....
        any text ignored
     """
-    # Key:: Value - ignore all but key:: value pairs, strip whitespace, exactly one space after the :: is required
-    regex = re.compile(ur'^ (?P<key>.+?):: (?P<val>.*?) *$', re.MULTILINE | re.UNICODE)
-
-    def __init__(self, request=None, pagename=None):
-        dict.__init__(self)
-        self.name = None
-        if request is not None and pagename is not None:
-            self._loadFromPage(request, pagename)
-
-    def _loadFromPage(self, request, name):
-        """ load the dict from wiki page <name>'s content """
-        self.name = name
-        text = Page.Page(request, name).get_raw_body()
-        self._initFromText(text)
-
-    def _initFromText(self, text):
-        for match in self.regex.finditer(text):
-            key, val = match.groups()
-            self[key] = val
-
-    def __repr__(self):
-        return "<Dict name=%r items=%r>" % (self.name, self.items())
 
 
-class WikiDicts:
+    def _load_dict(self):
+        request = self.request
+        dict_name = self.name
+
+        page = Page(request, dict_name)
+        if page.exists():
+            arena = 'pagedicts'
+            key = wikiutil.quoteWikinameFS(dict_name)
+            cache = caching.CacheEntry(request, arena, key, scope='wiki', use_pickle=True)
+            try:
+                cache_mtime = cache.mtime()
+                page_mtime = wikiutil.version2timestamp(page.mtime_usecs())
+                # TODO: fix up-to-date check mtime granularity problems
+                if cache_mtime > page_mtime:
+                    # cache is uptodate
+                    return cache.content()
+                else:
+                    raise caching.CacheError
+            except caching.CacheError:
+                # either cache does not exist, is erroneous or not uptodate: recreate it
+                d = super(WikiDict, self)._load_dict()
+                cache.update(d)
+                return d
+        else:
+            raise DictDoesNotExistError(dict_name)
+
+
+class WikiDicts(BaseDictBackend):
     """ a dictionary of Dict objects
 
        Config:
@@ -61,112 +64,17 @@
                Default: ".*Dict$"  Defs$ Vars$ ???????????????????
     """
 
-    def __init__(self, request):
-        self.cfg = request.cfg
-        self.request = request
-
-    def reset(self):
-        self.dictdict = {}
-        self.namespace_timestamp = 0
-        self.pageupdate_timestamp = 0
-        self.base_timestamp = 0
-        self.picklever = DICTS_PICKLE_VERSION
-        self.disk_cache_id = None
-
-    def values(self, dictname):
-        """ get values of dict <dictname> """
-        try:
-            d = self.dictdict[dictname]
-        except KeyError:
-            return []
-        return d.values()
-
-    def __getitem__(self, dictname):
-        try:
-            d = self.dictdict[dictname]
-        except KeyError:
-            return {}
-        return d
-
-    def _adddict(self, request, dictname):
-        """ add a new dict (will be read from the wiki page) """
-        self.dictdict[dictname] = WikiDict(request, dictname)
-
-    def __contains__(self, dictname):
-        return self.dictdict.has_key(dictname)
+    # Key:: Value - ignore all but key:: value pairs, strip whitespace, exactly one space after the :: is required
+    _dict_page_parse_regex = re.compile(ur'^ (?P<key>.+?):: (?P<val>.*?) *$', re.MULTILINE | re.UNICODE)
 
-    def load_dicts(self):
-        """ load the dict from the cache """
-        request = self.request
-        rescan = False
-        arena = 'wikidicts'
-        key = 'dicts'
-        cache = caching.CacheEntry(request, arena, key, scope='wiki', use_pickle=True)
-        current_disk_cache_id = cache.uid()
-        try:
-            self.__dict__.update(self.cfg.cache.DICTS_DATA)
-            if (current_disk_cache_id is None or
-                current_disk_cache_id != self.disk_cache_id):
-                self.reset()
-                raise AttributeError # not fresh, force load from disk
-            else:
-                return
-        except AttributeError:
-            try:
-                data = cache.content()
-                self.__dict__.update(data)
-                self.disk_cache_id = current_disk_cache_id
-
-                # invalidate the cache if the pickle version changed
-                if self.picklever != DICTS_PICKLE_VERSION:
-                    raise # force rescan
-            except:
-                self.reset()
-                rescan = True
-
-        if rescan:
-            self.scan_dicts()
-            self.load_dicts() # try again
-            return
+    def __contains__(self, dict_name):
+        return self.is_dict_name(dict_name) and Page(self.request, dict_name).exists()
 
-        data = {
-            "disk_cache_id": self.disk_cache_id,
-            "dictdict": self.dictdict,
-            "picklever": self.picklever
-        }
-
-        # remember it (persistent environments)
-        self.cfg.cache.DICTS_DATA = data
-
-    def scan_dicts(self):
-        """ scan all pages matching the dict regex and cache the
-            results on disk
-        """
-        request = self.request
-        self.reset()
-
-        # XXX get cache write lock here
-        scan_begin_time = time.time()
+    def __getitem__(self, dict_name):
+        return WikiDict(request=self.request, name=dict_name, backend=self)
 
-        # Get all pages in the wiki - without user filtering using filter
-        # function - this makes the page list about 10 times faster.
-        isdict = self.cfg.cache.page_dict_regexact.search
-        dictpages = request.rootpage.getPageList(user='', filter=isdict)
-        for pagename in dictpages:
-            self._adddict(request, pagename)
-
-        scan_end_time = time.time()
+    def _retrieve_members(self, dict_name):
+        page = Page(self.request, dict_name)
+        text = page.get_raw_body()
+        return dict([match.groups() for match in self._dict_page_parse_regex.finditer(text)])
 
-        arena = 'wikidicts'
-        key = 'dicts'
-        cache = caching.CacheEntry(request, arena, key, scope='wiki', use_pickle=True)
-        data = {
-            "scan_begin_time": scan_begin_time,
-            "scan_end_time": scan_end_time,
-            "dictdict": self.dictdict,
-            "picklever": self.picklever
-        }
-        cache.update(data)
-        # XXX release cache write lock here
-
-
--- a/MoinMoin/events/wikidictsrescan.py	Fri Jun 26 18:13:35 2009 +0200
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,39 +0,0 @@
-# -*- coding: iso-8859-1 -*-
-"""
-    MoinMoin - wikidicts notification plugin for event system
-
-    When a Group or Dict page changes, we rescan them and recreate the cache.
-
-    @copyright: 2007 by MoinMoin:ThomasWaldmann
-    @license: GNU GPL, see COPYING for details.
-"""
-
-from MoinMoin import log
-logging = log.getLogger(__name__)
-
-from MoinMoin import events as ev
-
-def handle(event):
-    # "changed" includes creation, deletion, renamed and copied
-    if (isinstance(event, ev.PageChangedEvent) or isinstance(event, ev.PageRenamedEvent) or
-        isinstance(event, ev.PageCopiedEvent) or isinstance(event, ev.TrivialPageChangedEvent)):
-        cfg = event.request.cfg
-        pagename = event.page.page_name
-        if cfg.cache.page_dict_regexact.search(pagename) or \
-           cfg.cache.page_group_regexact.search(pagename):
-            return handle_groupsdicts_changed(event)
-
-
-def handle_groupsdicts_changed(event):
-    """ Handles events related to groups and dicts page changes:
-        Scans all pages matching the dict / group regex and pickles the
-        data to disk.
-    """
-    request = event.request
-    page = event.page
-
-    logging.debug("groupsdicts changed: %r, scan_dicts started", page.page_name)
-    request.dicts = request.cfg.dict_manager_init(request)
-    request.dicts.scan_dicts()
-    logging.debug("groupsdicts changed: scan_dicts finished")
-