view MoinMoin/groups/backends/ @ 4795:64ff4ea0b597

Groups2009: wiki_group backend changes. backend_type and backend_name were removed both for Group and Backend. Group.__init__ takes backed as a parameter. is used instead of Group.group_name. Group.__repr__ prints its class.
author Dmitrijs Milajevs <>
date Tue, 09 Jun 2009 23:34:54 +0200
parents d9151c006aab
children 7dd5d15de911
line wrap: on
line source
# -*- coding: iso-8859-1 -*-
MoinMoin - wiki group backend

The wiki group backend enables you to define groups on wiki pages.  To
find group pages, request.cfg.cache.page_group_regexact pattern is
used.  To find group members, it parses theses pages and extracts the
first level list (wiki markup).

@copyright: 2008 MoinMoin:ThomasWaldmann,
            2008 MoinMoin:MelitaMihaljevic,
            2009 MoinMoin:DmitrijsMilajevs
@license: GPL, see COPYING for details

import re

from MoinMoin import caching, wikiutil
from MoinMoin.Page import Page

class Group(object):

    # * Member - ignore all but first level list items, strip
    # whitespace, strip free links markup. This is used for parsing
    # pages in order to find group page members
    group_page_parse_re = re.compile(ur'^ \* +(?:\[\[)?(?P<member>.+?)(?:\]\])? *$', re.MULTILINE | re.UNICODE)

    def __init__(self, request, name, backend):
        Initialize a wiki group.

        @parm request: request object
        @parm name: group name (== group page name)
        self.request = request = name
        self.backend = backend


    def _load_group(self):
        request = self.request
        group_name =

        page = Page(request, group_name)
        if page.exists():
            arena = 'pagegroups'
            key = wikiutil.quoteWikinameFS(group_name)
            cache = caching.CacheEntry(request, arena, key, scope='wiki', use_pickle=True)
                cache_mtime = cache.mtime()
                page_mtime = wikiutil.version2timestamp(page.mtime_usecs())
                # TODO: fix up-to-date check mtime granularity problems
                if cache_mtime > page_mtime:
                    # cache is uptodate
                    self.members, self.member_groups = cache.content()
                    raise caching.CacheError
            except caching.CacheError:
                # either cache does not exist, is erroneous or not uptodate: recreate it
                text = page.get_raw_body()
                self.members, self.member_groups = self._parse_page(text)
                cache.update((self.members, self. member_groups))
            raise KeyError("There is no such group page %s" % group_name)

    def _parse_page(self, text):
        Parse <text> and return members and groups defined in the <text>
        groups = self.request.groups

        text_members = ('member') for match in self.group_page_parse_re.finditer(text))
        members_final = set()
        member_groups = set()

        for member in text_members:
            if self.request.cfg.cache.page_group_regexact.match(member):

        return members_final, member_groups

    def _contains(self, member, processed_groups):
        First check if <member> is part of this group and then check
        for every subgroup in this group.

        <processed_groups> is needed to avoid infinite recursion, if
        groups are defined recursively.

        @param member: member name [unicode]
        @param processed_groups: groups which were checked for containment before [set]

        if member in self.members:
            return True
            groups = self.request.groups
            for group_name in self.member_groups:
                if group_name not in processed_groups and groups[group_name]._contains(member, processed_groups):
                    return True

        return False

    def __contains__(self, member):
        Check if <member> is defined in this group. Checks also for subgroups.
        return self._contains(member, set())

    def _iter(self, yielded_members, processed_groups):
        Iterate first over members of this group, then over subgroups of this group.

        <yielded_members> and <processed_groups> are needed to avoid infinite recursion.
        This can happen if there are two groups like these:
           OneGroup: Something, OtherGroup
           OtherGroup: OneGroup, SomethingOther

        @param yielded_members: members which have been already yielded before [set]
        @param processed_groups: group names which have been iterated before [set]

        for member in self.members:
            if member not in yielded_members:
                yield member

        groups = self.request.groups
        for group_name in self.member_groups:
            if group_name not in processed_groups:
                for member in groups[group_name]._iter(yielded_members, processed_groups):
                    if member not in yielded_members:
                        yield member

    def __iter__(self):
        Iterate over members of this group. Iterates also over subgroups if any.
        return self._iter(set(), set())

    def __repr__(self):
        return "<%s group_name=%s members=%s member_groups=%s>" % (self.__class__,

class Backend(object):

    def __init__(self, request):
        Create a group manager backend object.
        self.request = request
        self.page_group_regex = request.cfg.cache.page_group_regexact

    def __contains__(self, group_name):
        Check if there is group page <group_name>. <group_name> must satisfy page_group_regex.
        return self.page_group_regex.match(group_name) and Page(self.request, group_name).exists()

    def __iter__(self):
        Iterate over group names of groups available in the wiki.
        grouppages = self.request.rootpage.getPageList(user='',
        return iter(grouppages)

    def __getitem__(self, group_name):
        Return wiki group backend object.
        return Group(request=self.request, name=group_name, backend=self)