view MoinMoin/ @ 4792:7a826f946da3

Groups2009: wikidicts were refactored. request.dict provides access only to WikiDicts. DictBase class was merged with Dict. Group class was removed. DictDict was merged with GroupDict removing methods related to the group functionality. The cache key for dicts changed from 'dicts_groups' to 'dicts'. wikidicts test was refined to capture new functionality. Changes in the other code to use DictDict.__contains__ instead of has_dict and DictDict.__getitem__ in place of dict.
author Dmitrijs Milajevs <>
date Sun, 07 Jun 2009 16:04:05 +0200
parents fef3c5039997
line wrap: on
line source
# -*- coding: iso-8859-1 -*-
    MoinMoin - WikiDict functions.

    @copyright: 2003-2007 MoinMoin:ThomasWaldmann,
                2003 by Gustavo Niemeyer
                2009 MoinMoin:DmitrijsMilajevs
    @license: GNU GPL, see COPYING for details.
import re, time

from MoinMoin import caching, Page

# Version of the internal data structure which is pickled.
# Please increment if you have changed the structure.

class Dict(dict):
    """ Mapping of keys to values in a wiki page.

       How a Dict definition page should look like:

       any text ignored
        key1:: value1
        * ignored, too
        key2:: value2 containing spaces
        keyn:: ....
       any text ignored
    # Key:: Value - ignore all but key:: value pairs, strip whitespace, exactly one space after the :: is required
    regex = re.compile(ur'^ (?P<key>.+?):: (?P<val>.*?) *$', re.MULTILINE | re.UNICODE)

    def __init__(self, request=None, pagename=None):
        dict.__init__(self) = None
        if request is not None and pagename is not None:
            self._loadFromPage(request, pagename)

    def _loadFromPage(self, request, name):
        """ load the dict from wiki page <name>'s content """ = name
        text = Page.Page(request, name).get_raw_body()

    def _initFromText(self, text):
        for match in self.regex.finditer(text):
            key, val = match.groups()
            self[key] = val

    def __repr__(self):
        return "<Dict name=%r items=%r>" % (, self.items())

class DictDict:
    """ a dictionary of Dict objects

               Default: ".*Dict$"  Defs$ Vars$ ???????????????????

    def __init__(self, request):
        self.cfg = request.cfg
        self.request = request

    def reset(self):
        self.dictdict = {}
        self.namespace_timestamp = 0
        self.pageupdate_timestamp = 0
        self.base_timestamp = 0
        self.picklever = DICTS_PICKLE_VERSION
        self.disk_cache_id = None

    def values(self, dictname):
        """ get values of dict <dictname> """
            d = self.dictdict[dictname]
        except KeyError:
            return []
        return d.values()

    def __getitem__(self, dictname):
            d = self.dictdict[dictname]
        except KeyError:
            return {}
        return d

    def _adddict(self, request, dictname):
        """ add a new dict (will be read from the wiki page) """
        self.dictdict[dictname] = Dict(request, dictname)

    def __contains__(self, dictname):
        return self.dictdict.has_key(dictname)

    def load_dicts(self):
        """ load the dict from the cache """
        request = self.request
        rescan = False
        arena = 'wikidicts'
        key = 'dicts'
        cache = caching.CacheEntry(request, arena, key, scope='wiki', use_pickle=True)
        current_disk_cache_id = cache.uid()
            if (current_disk_cache_id is None or
                current_disk_cache_id != self.disk_cache_id):
                raise AttributeError # not fresh, force load from disk
        except AttributeError:
                data = cache.content()
                self.disk_cache_id = current_disk_cache_id

                # invalidate the cache if the pickle version changed
                if self.picklever != DICTS_PICKLE_VERSION:
                    raise # force rescan
                rescan = True

        if rescan:
            self.load_dicts() # try again

        data = {
            "disk_cache_id": self.disk_cache_id,
            "dictdict": self.dictdict,
            "picklever": self.picklever

        # remember it (persistent environments)
        self.cfg.cache.DICTS_DATA = data

    def scan_dicts(self):
        """ scan all pages matching the dict regex and cache the
            results on disk
        request = self.request

        # XXX get cache write lock here
        scan_begin_time = time.time()

        # Get all pages in the wiki - without user filtering using filter
        # function - this makes the page list about 10 times faster.
        isdict =
        dictpages = request.rootpage.getPageList(user='', filter=isdict)
        for pagename in dictpages:
            self._adddict(request, pagename)

        scan_end_time = time.time()

        arena = 'wikidicts'
        key = 'dicts'
        cache = caching.CacheEntry(request, arena, key, scope='wiki', use_pickle=True)
        data = {
            "scan_begin_time": scan_begin_time,
            "scan_end_time": scan_end_time,
            "dictdict": self.dictdict,
            "picklever": self.picklever
        # XXX release cache write lock here