changeset 1230:039e25408bac

merge with main
author Franz Pletz <fpletz AT franz-pletz DOT org>
date Mon, 07 Aug 2006 23:08:59 +0200
parents 51579f15abf9 (current diff) 40059e38427a (diff)
children 10099880cf8f
files MoinMoin/macro/SystemInfo.py MoinMoin/macro/__init__.py
diffstat 15 files changed, 252 insertions(+), 131 deletions(-) [+]
line wrap: on
line diff
--- a/MoinMoin/Page.py	Sun Aug 06 19:55:05 2006 +0200
+++ b/MoinMoin/Page.py	Mon Aug 07 23:08:59 2006 +0200
@@ -1385,17 +1385,21 @@
     def execute(self, request, parser, code):
         """ Write page content by executing cache code """            
         formatter = self.formatter
-        from MoinMoin.macro import Macro
-        macro_obj = Macro(parser)        
-        # Fix __file__ when running from a zip package
-        import MoinMoin
-        if hasattr(MoinMoin, '__loader__'):
-            __file__ = os.path.join(MoinMoin.__loader__.archive, 'dummy')
-
+        request.clock.start("Page.execute")
         try:
-            exec code
-        except "CacheNeedsUpdate": # convert the exception
-            raise Exception("CacheNeedsUpdate")
+            from MoinMoin.macro import Macro
+            macro_obj = Macro(parser)        
+            # Fix __file__ when running from a zip package
+            import MoinMoin
+            if hasattr(MoinMoin, '__loader__'):
+                __file__ = os.path.join(MoinMoin.__loader__.archive, 'dummy')
+    
+            try:
+                exec code
+            except "CacheNeedsUpdate": # convert the exception
+                raise Exception("CacheNeedsUpdate")
+        finally:
+            request.clock.stop("Page.execute")
 
     def loadCache(self, request):
         """ Return page content cache or raises 'CacheNeedsUpdate' """
@@ -1407,8 +1411,6 @@
         import marshal
         try:
             return marshal.loads(cache.content())
-        except "CacheNeedsUpdate": # convert old exception into a new one
-            raise Exception('CacheNeedsUpdate')
         except (EOFError, ValueError, TypeError):
             # Bad marshal data, must update the cache.
             # See http://docs.python.org/lib/module-marshal.html
--- a/MoinMoin/PageEditor.py	Sun Aug 06 19:55:05 2006 +0200
+++ b/MoinMoin/PageEditor.py	Mon Aug 07 23:08:59 2006 +0200
@@ -18,6 +18,12 @@
 from MoinMoin.mail import sendmail
 
 
+# used for merging
+conflict_markers = ("\n---- /!\ '''Edit conflict - other version:''' ----\n",
+                    "\n---- /!\ '''Edit conflict - your version:''' ----\n",
+                    "\n---- /!\ '''End of edit conflict''' ----\n")
+
+
 #############################################################################
 ### Javascript code for editor page
 #############################################################################
@@ -101,10 +107,7 @@
         
         # And try to merge all into one with edit conflict separators
         verynewtext = diff3.text_merge(original_text, saved_text, savetext,
-                                       allow_conflicts,
-                                       "\n---- /!\ '''Edit conflict - other version:''' ----\n",
-                                       "\n---- /!\ '''Edit conflict - your version:''' ----\n",
-                                       "\n---- /!\ '''End of edit conflict''' ----\n")
+                                       allow_conflicts, *conflict_markers)
         if verynewtext:
             self.set_raw_body(verynewtext)
             return True
--- a/MoinMoin/action/SyncPages.py	Sun Aug 06 19:55:05 2006 +0200
+++ b/MoinMoin/action/SyncPages.py	Mon Aug 07 23:08:59 2006 +0200
@@ -23,26 +23,43 @@
 
 from MoinMoin import wikiutil, config, user
 from MoinMoin.packages import unpackLine, packLine
-from MoinMoin.PageEditor import PageEditor
+from MoinMoin.PageEditor import PageEditor, conflict_markers
 from MoinMoin.Page import Page
 from MoinMoin.wikidicts import Dict, Group
+from MoinMoin.wikisync import TagStore
+from MoinMoin.util.bdiff import decompress, patch, compress, textdiff
+from MoinMoin.util import diff3
 
 # directions
 UP, DOWN, BOTH = range(3)
 directions_map = {"up": UP, "down": DOWN, "both": BOTH}
 
+
+def normalise_pagename(page_name, prefix):
+    if prefix:
+        if not page_name.startswith(prefix):
+            return None
+        else:
+            return page_name[len(prefix):]
+    else:
+        return page_name
+
+
 class ActionStatus(Exception): pass
 
 class UnsupportedWikiException(Exception): pass
 
-# Move these classes to MoinMoin.wikisync
+# XXX Move these classes to MoinMoin.wikisync
 class SyncPage(object):
-    """ This class represents a page in (another) wiki. """
-    def __init__(self, name, local_rev=None, remote_rev=None):
+    """ This class represents a page in one or two wiki(s). """
+    def __init__(self, name, local_rev=None, remote_rev=None, local_name=None, remote_name=None):
         self.name = name
         self.local_rev = local_rev
         self.remote_rev = remote_rev
+        self.local_name = local_name
+        self.remote_name = remote_name
         assert local_rev or remote_rev
+        assert local_name or remote_name
 
     def __repr__(self):
         return repr("<Remote Page %r>" % unicode(self))
@@ -61,6 +78,18 @@
             return false
         return self.name == other.name
 
+    def add_missing_pagename(self, local, remote):
+        if self.local_name is None:
+            n_name = normalise_pagename(self.remote_name, remote.prefix)
+            assert n_name is not None
+            self.local_name = (local.prefix or "") + n_name
+        elif self.remote_name is None:
+            n_name = normalise_pagename(self.local_name, local.prefix)
+            assert n_name is not None
+            self.remote_name = (local.prefix or "") + n_name
+
+        return self # makes using list comps easier
+
     def filter(cls, sp_list, func):
         return [x for x in sp_list if func(x.name)]
     filter = classmethod(filter)
@@ -71,6 +100,7 @@
         for sp in remote_list:
             if sp in d:
                 d[sp].remote_rev = sp.remote_rev
+                d[sp].remote_name = sp.remote_name
             else:
                 d[sp] = sp
         return d.keys()
@@ -111,20 +141,26 @@
         """ Returns a representation of the instance for debugging purposes. """
         return NotImplemented
 
-    def getInterwikiName(self):
+    def get_interwiki_name(self):
         """ Returns the interwiki name of the other wiki. """
         return NotImplemented
 
-    def getPages(self):
+    def get_iwid(self):
+        """ Returns the InterWiki ID. """
+        return NotImplemented
+
+    def get_pages(self):
         """ Returns a list of SyncPage instances. """
         return NotImplemented
 
 
 class MoinRemoteWiki(RemoteWiki):
     """ Used for MoinMoin wikis reachable via XMLRPC. """
-    def __init__(self, request, interwikiname):
+    def __init__(self, request, interwikiname, prefix):
         self.request = request
+        self.prefix = prefix
         _ = self.request.getText
+
         wikitag, wikiurl, wikitail, wikitag_bad = wikiutil.resolve_wiki(self.request, '%s:""' % (interwikiname, ))
         self.wiki_url = wikiutil.mapURL(self.request, wikiurl)
         self.valid = not wikitag_bad
@@ -132,11 +168,14 @@
         if not self.valid:
             self.connection = None
             return
+
         self.connection = self.createConnection()
+
         version = self.connection.getMoinVersion()
         if not isinstance(version, (tuple, list)):
             raise UnsupportedWikiException(_("The remote version of MoinMoin is too old, the version 1.6 is required at least."))
-        remote_interwikiname = self.getInterwikiName()
+
+        remote_interwikiname = self.get_interwiki_name()
         remote_iwid = self.connection.interwikiName()[1]
         self.is_anonymous = remote_interwikiname is None
         if not self.is_anonymous and interwikiname != remote_interwikiname:
@@ -152,13 +191,26 @@
     def createConnection(self):
         return xmlrpclib.ServerProxy(self.xmlrpc_url, allow_none=True, verbose=True)
 
+    # Public methods
+    def get_diff(self, pagename, from_rev, to_rev):
+        return str(self.connection.getDiff(pagename, from_rev, to_rev))
+
     # Methods implementing the RemoteWiki interface
-    def getInterwikiName(self):
+    def get_interwiki_name(self):
         return self.connection.interwikiName()[0]
 
-    def getPages(self):
+    def get_iwid(self):
+        return self.connection.interwikiName()[1]
+
+    def get_pages(self):
         pages = self.connection.getAllPagesEx({"include_revno": True, "include_deleted": True})
-        return [SyncPage(unicode(name), remote_rev=revno) for name, revno in pages]
+        rpages = []
+        for name, revno in pages:
+            normalised_name = normalise_pagename(name, self.prefix)
+            if normalised_name is None:
+                continue
+            rpages.append(SyncPage(normalised_name, remote_rev=revno, remote_name=name))
+        return rpages
 
     def __repr__(self):
         return "<MoinRemoteWiki wiki_url=%r valid=%r>" % (self.wiki_url, self.valid)
@@ -166,24 +218,34 @@
 
 class MoinLocalWiki(RemoteWiki):
     """ Used for the current MoinMoin wiki. """
-    def __init__(self, request):
+    def __init__(self, request, prefix):
         self.request = request
+        self.prefix = prefix
 
     def getGroupItems(self, group_list):
+        """ Returns all page names that are listed on the page group_list. """
         pages = []
         for group_pagename in group_list:
             pages.extend(Group(self.request, group_pagename).members())
         return [self.createSyncPage(x) for x in pages]
 
     def createSyncPage(self, page_name):
-        return SyncPage(page_name, local_rev=Page(self.request, page_name).get_real_rev())
+        normalised_name = normalise_pagename(page_name, self.prefix)
+        if normalised_name is None:
+            return None
+        return SyncPage(normalised_name, local_rev=Page(self.request, page_name).get_real_rev(), local_name=page_name)
+
+    # Public methods:
 
     # Methods implementing the RemoteWiki interface
-    def getInterwikiName(self):
+    def get_interwiki_name(self):
         return self.request.cfg.interwikiname
 
-    def getPages(self):
-        return [self.createSyncPage(x) for x in self.request.rootpage.getPageList(exists=0)]
+    def get_iwid(self):
+        return self.request.cfg.iwid
+
+    def get_pages(self):
+        return [x for x in [self.createSyncPage(x) for x in self.request.rootpage.getPageList(exists=0)] if x]
 
     def __repr__(self):
         return "<MoinLocalWiki>"
@@ -252,9 +314,9 @@
             if not params["remoteWiki"]:
                 raise ActionStatus(_("Incorrect parameters. Please supply at least the ''remoteWiki'' parameter."))
 
-            local = MoinLocalWiki(self.request)
+            local = MoinLocalWiki(self.request, params["localPrefix"])
             try:
-                remote = MoinRemoteWiki(self.request, params["remoteWiki"])
+                remote = MoinRemoteWiki(self.request, params["remoteWiki"], params["remotePrefix"])
             except UnsupportedWikiException, (msg, ):
                 raise ActionStatus(msg)
 
@@ -269,16 +331,16 @@
     
     def sync(self, params, local, remote):
         """ This method does the syncronisation work. """
-        
-        r_pages = remote.getPages()
-        l_pages = local.getPages()
+
+        l_pages = local.get_pages()
+        r_pages = remote.get_pages()
 
         if params["groupList"]:
             pages_from_groupList = set(local.getGroupItems(params["groupList"]))
             r_pages = SyncPage.filter(r_pages, pages_from_groupList.__contains__)
             l_pages = SyncPage.filter(l_pages, pages_from_groupList.__contains__)
 
-        m_pages = SyncPage.merge(l_pages, r_pages)
+        m_pages = [elem.add_missing_pagename(local, remote) for elem in SyncPage.merge(l_pages, r_pages)]
 
         print "Got %i local, %i remote pages, %i merged pages" % (len(l_pages), len(r_pages), len(m_pages))
         
@@ -290,15 +352,66 @@
         remote_but_not_local = list(SyncPage.iter_remote_only(m_pages))
         local_but_not_remote = list(SyncPage.iter_local_only(m_pages))
         
-        # some initial test code
-        r_new_pages = u", ".join([unicode(x) for x in remote_but_not_local])
-        l_new_pages = u", ".join([unicode(x) for x in local_but_not_remote])
-        raise ActionStatus("These pages are in the remote wiki, but not local: " + wikiutil.escape(r_new_pages) + "<br>These pages are in the local wiki, but not in the remote one: " + wikiutil.escape(l_new_pages))
+        # some initial test code (XXX remove)
+        #r_new_pages = u", ".join([unicode(x) for x in remote_but_not_local])
+        #l_new_pages = u", ".join([unicode(x) for x in local_but_not_remote])
+        #raise ActionStatus("These pages are in the remote wiki, but not local: " + wikiutil.escape(r_new_pages) + "<br>These pages are in the local wiki, but not in the remote one: " + wikiutil.escape(l_new_pages))
         #if params["direction"] in (DOWN, BOTH):
         #    for rp in remote_but_not_local:
-                # XXX add locking, acquire read-lock on rp
-                
 
+        # let's do the simple case first, can be refactored later to match all cases
+        # XXX handle deleted pages
+        for rp in on_both_sides:
+            # XXX add locking, acquire read-lock on rp
+
+            current_page = Page(self.request, local_pagename)
+            current_rev = current_page.get_real_rev()
+            local_pagename = rp.local_pagename
+
+            tags = TagStore(current_page)
+            matching_tags = tags.fetch(iwid_full=remote.iwid_full)
+            matching_tags.sort()
+
+            if not matching_tags:
+                remote_rev = None
+                local_rev = rp.local_rev # merge against the newest version
+                old_contents = ""
+            else:
+                newest_tag = matching_tags[-1]
+                local_rev = newest_tag.current_rev
+                remote_rev = newest_tag.remote_rev
+                if remote_rev == rp.remote_rev and local_rev == current_rev:
+                    continue # no changes done, next page
+                old_page = Page(self.request, local_pagename, rev=local_rev)
+                old_contents = old_page.get_raw_body_str()
+
+            diff_result = remote.get_diff(rp.remote_pagename, remote_rev, None)
+            is_remote_conflict = diff_result["conflict"]
+            assert diff_result["diffversion"] == 1
+            diff = diff_result["diff"]
+            current_remote_rev = diff_result["current"]
+
+            if remote_rev is None: # set the remote_rev for the case without any tags
+                remote_rev = current_remote_rev
+
+            new_contents = patch(old_contents, decompress(diff)).decode("utf-8")
+            old_contents = old_contents.encode("utf-8")
+
+            # here, the actual merge happens
+            verynewtext = diff3.text_merge(old_contents, new_contents, current_page.get_raw_body(), 1, *conflict_markers)
+
+            new_local_rev = current_rev + 1 # XXX commit first?
+            local_full_iwid = packLine([local.get_iwid(), local.get_interwiki_name()])
+            remote_full_iwid = packLine([remote.get_iwid(), remote.get_interwiki_name()])
+            # XXX add remote conflict handling
+            very_current_remote_rev = remote.merge_diff(rp.remote_pagename, compress(textdiff(new_contents, verynewtext)), new_local_rev, remote_rev, current_remote_rev, local_full_iwid)
+            tags.add(remote_wiki=remote_full_iwid, remote_rev=very_current_remote_rev, current_rev=new_local_rev)
+            comment = u"Local Merge - %r" % (remote.get_interwiki_name() or remote.get_iwid())
+            try:
+                current_page.saveText(verynewtext, current_rev, comment=comment)
+            except PageEditor.EditConflict:
+                assert False, "You stumbled on a problem with the current storage system - I cannot lock pages"
+            # XXX untested
 
 def execute(pagename, request):
     ActionClass(pagename, request).render()
--- a/MoinMoin/action/__init__.py	Sun Aug 06 19:55:05 2006 +0200
+++ b/MoinMoin/action/__init__.py	Mon Aug 07 23:08:59 2006 +0200
@@ -29,9 +29,11 @@
 from MoinMoin import wikiutil
 from MoinMoin.Page import Page
 
-# create a list of extension actions from the subpackage directory
-extension_actions = pysupport.getPackageModules(__file__)
-modules = extension_actions
+# create a list of extension actions from the package directory
+modules = pysupport.getPackageModules(__file__)
+
+# builtin-stuff (see do_<name> below):
+names = ['show', 'recall', 'raw', 'format', 'content', 'print', 'refresh', 'goto', 'userform', ]
 
 class ActionBase:
     """ action base class with some generic stuff to inherit
@@ -284,15 +286,14 @@
     Page(request, pagename).send_page(request, msg=savemsg)
 
 # Dispatching ----------------------------------------------------------------
-import os
-
-def getPlugins(request):
-    """ return the path to the action plugin directory and a list of plugins there """
-    dir = os.path.join(request.cfg.plugin_dir, 'action')
-    plugins = []
-    if os.path.isdir(dir):
-        plugins = pysupport.getPackageModules(os.path.join(dir, 'dummy'))
-    return dir, plugins
+def getNames(cfg):
+    if hasattr(cfg, 'action_names'):
+        return cfg.action_names
+    else:
+        lnames = names[:]
+        lnames.extend(wikiutil.getPlugins('action', cfg))
+        cfg.action_names = lnames # remember it
+        return lnames
 
 def getHandler(request, action, identifier="execute"):
     """ return a handler function for a given action or None """
--- a/MoinMoin/converter/__init__.py	Sun Aug 06 19:55:05 2006 +0200
+++ b/MoinMoin/converter/__init__.py	Mon Aug 07 23:08:59 2006 +0200
@@ -6,8 +6,3 @@
     @license: GNU GPL, see COPYING for details.
 """
 
-from MoinMoin.util import pysupport
-
-# create a list of extension converters from the subpackage directory
-extension_converters = pysupport.getPackageModules(__file__)
-modules = extension_converters
--- a/MoinMoin/filter/__init__.py	Sun Aug 06 19:55:05 2006 +0200
+++ b/MoinMoin/filter/__init__.py	Mon Aug 07 23:08:59 2006 +0200
@@ -9,8 +9,7 @@
 import os
 from MoinMoin.util import pysupport
 
-filters = pysupport.getPackageModules(__file__)
-modules = filters
+modules = pysupport.getPackageModules(__file__)
 
 standard_codings = ['utf-8', 'iso-8859-15', 'iso-8859-1', ]
 
--- a/MoinMoin/logfile/__init__.py	Sun Aug 06 19:55:05 2006 +0200
+++ b/MoinMoin/logfile/__init__.py	Mon Aug 07 23:08:59 2006 +0200
@@ -6,10 +6,6 @@
     @license: GNU GPL, see COPYING for details.
 """
 
-from MoinMoin.util import pysupport
-
-logfiles = pysupport.getPackageModules(__file__)
-
 import os, codecs, errno
 from MoinMoin import config, wikiutil
 
--- a/MoinMoin/macro/SystemInfo.py	Sun Aug 06 19:55:05 2006 +0200
+++ b/MoinMoin/macro/SystemInfo.py	Mon Aug 07 23:08:59 2006 +0200
@@ -97,15 +97,16 @@
     nonestr = _("NONE")
     row('Event log', _formatInReadableUnits(eventlogger.size()))
 
-    row(_('Global extension macros'), ', '.join(macro.extension_macros) or nonestr)
+    row(_('Global extension macros'), ', '.join(macro.modules) or nonestr)
     row(_('Local extension macros'),
         ', '.join(wikiutil.wikiPlugins('macro', Macro.cfg)) or nonestr)
 
-    ext_actions = [x for x in action.extension_actions
+    glob_actions = [x for x in action.modules
+                    if not x in request.cfg.actions_excluded]
+    row(_('Global extension actions'), ', '.join(glob_actions) or nonestr)
+    loc_actions = [x for x in wikiutil.wikiPlugins('action', Macro.cfg)
                    if not x in request.cfg.actions_excluded]
-    row(_('Global extension actions'), ', '.join(ext_actions) or nonestr)
-    row(_('Local extension actions'),
-        ', '.join(action.getPlugins(request)[1]) or nonestr)
+    row(_('Local extension actions'), ', '.join(loc_actions) or nonestr)
 
     row(_('Global parsers'), ', '.join(parser.modules) or nonestr)
     row(_('Local extension parsers'),
--- a/MoinMoin/macro/__init__.py	Sun Aug 06 19:55:05 2006 +0200
+++ b/MoinMoin/macro/__init__.py	Mon Aug 07 23:08:59 2006 +0200
@@ -17,9 +17,7 @@
 """
 
 from MoinMoin.util import pysupport
-
-extension_macros = pysupport.getPackageModules(__file__)
-modules = extension_macros
+modules = pysupport.getPackageModules(__file__)
 
 import re, time, os
 from MoinMoin import action, config, util
--- a/MoinMoin/parser/__init__.py	Sun Aug 06 19:55:05 2006 +0200
+++ b/MoinMoin/parser/__init__.py	Mon Aug 07 23:08:59 2006 +0200
@@ -9,5 +9,5 @@
     @license: GNU GPL, see COPYING for details.
 """
 from MoinMoin.util import pysupport
+modules = pysupport.getPackageModules(__file__)
 
-modules = pysupport.getPackageModules(__file__)
--- a/MoinMoin/request/__init__.py	Sun Aug 06 19:55:05 2006 +0200
+++ b/MoinMoin/request/__init__.py	Mon Aug 07 23:08:59 2006 +0200
@@ -28,42 +28,46 @@
 class Clock:
     """ Helper class for code profiling
         we do not use time.clock() as this does not work across threads
+        This is not thread-safe when it comes to multiple starts for one timer.
+        It is possible to recursively call the start and stop methods, you
+        should just ensure that you call them often enough :)
     """
 
     def __init__(self):
         self.timings = {}
         self.states = {}
 
+    def _get_name(timer, generation):
+        if generation == 0:
+            return timer
+        else:
+            return "%s|%i" % (timer, generation)
+    _get_name = staticmethod(_get_name)
+
     def start(self, timer):
-        state = self.states.setdefault(timer, 'new')
-        if state == 'new':
-            self.timings[timer] = time.time()
-            self.states[timer] = 'running'
-        elif state == 'running':
-            pass # this timer is already running, do nothing
-        elif state == 'stopped':
-            # if a timer is stopped, timings has the sum of all times it was running
-            self.timings[timer] = time.time() - self.timings[timer]
-            self.states[timer] = 'running'
+        state = self.states.setdefault(timer, -1)
+        new_level = state + 1
+        name = Clock._get_name(timer, new_level)
+        self.timings[name] = time.time() - self.timings.get(name, 0)
+        self.states[timer] = new_level
 
     def stop(self, timer):
-        state = self.states.setdefault(timer, 'neverstarted')
-        if state == 'running':
-            self.timings[timer] = time.time() - self.timings[timer]
-            self.states[timer] = 'stopped'
-        elif state == 'stopped':
-            pass # this timer already has been stopped, do nothing
-        elif state == 'neverstarted':
-            pass # this timer never has been started, do nothing
+        state = self.states.setdefault(timer, -1)
+        if state >= 0: # timer is active
+            name = Clock._get_name(timer, state)
+            self.timings[name] = time.time() - self.timings[name]
+            self.states[timer] = state - 1
 
     def value(self, timer):
-        state = self.states.setdefault(timer, 'nosuchtimer')
-        if state == 'stopped':
+        base_timer = timer.split("|")[0]
+        state = self.states.get(base_timer, None)
+        if state == -1:
             result = "%.3fs" % self.timings[timer]
-        elif state == 'running':
+        elif state is None:
+            result = "- (%s)" % state
+        else:
+            print "Got state %r" % state
             result = "%.3fs (still running)" % (time.time() - self.timings[timer])
-        else:
-            result = "- (%s)" % state
         return result
 
     def dump(self):
@@ -663,21 +667,9 @@
             self.cfg._known_actions # check
         except AttributeError:
             from MoinMoin import action
-            # Add built in actions
-            actions = [name[3:] for name in action.__dict__ if name.startswith('do_')]
-
-            # Add plugins           
-            dummy, plugins = action.getPlugins(self)
-            actions.extend(plugins)
+            self.cfg._known_actions = set(action.getNames(self.cfg))
 
-            # Add extensions
-            actions.extend(action.extension_actions)
-
-            # TODO: Use set when we require Python 2.3
-            actions = dict(zip(actions, [''] * len(actions)))
-            self.cfg._known_actions = actions
-
-        # Return a copy, so clients will not change the dict.
+        # Return a copy, so clients will not change the set.
         return self.cfg._known_actions.copy()
 
     def getAvailableActions(self, page):
@@ -698,14 +690,10 @@
 
             # Filter non ui actions (starts with lower case letter)
             actions = self.getKnownActions()
-            for key in actions.keys():
-                if key[0].islower():
-                    del actions[key]
+            actions = [action for action in actions if not action[0].islower()]
 
             # Filter wiki excluded actions
-            for key in self.cfg.actions_excluded:
-                if key in actions:
-                    del actions[key]
+            actions = [action for action in actions if not action in self.cfg.actions_excluded]
 
             # Filter actions by page type, acl and user state
             excluded = []
@@ -715,11 +703,9 @@
                 # Prevent modification of underlay only pages, or pages
                 # the user can't write and can't delete
                 excluded = [u'RenamePage', u'DeletePage', ] # AttachFile must NOT be here!
-            for key in excluded:
-                if key in actions:
-                    del actions[key]
+            actions = [action for action in actions if not action in excluded]
 
-            self._available_actions = actions
+            self._available_actions = set(actions)
 
         # Return a copy, so clients will not change the dict.
         return self._available_actions.copy()
--- a/MoinMoin/script/__init__.py	Sun Aug 06 19:55:05 2006 +0200
+++ b/MoinMoin/script/__init__.py	Mon Aug 07 23:08:59 2006 +0200
@@ -7,12 +7,6 @@
     @license: GNU GPL, see COPYING for details.
 """
 
-from MoinMoin.util import pysupport
-
-# create a list of extension scripts from the subpackage directory
-extension_scripts = pysupport.getPackageModules(__file__)
-modules = extension_scripts
-
 import os, sys, time
 
 flag_quiet = 0
--- a/MoinMoin/wikisync.py	Sun Aug 06 19:55:05 2006 +0200
+++ b/MoinMoin/wikisync.py	Mon Aug 07 23:08:59 2006 +0200
@@ -14,6 +14,7 @@
     import pickle
 
 from MoinMoin.util import lock
+from MoinMoin.packages import unpackLine
 
 
 class Tag(object):
@@ -33,6 +34,11 @@
     def __repr__(self):
         return u"<Tag remote_wiki=%r remote_rev=%r current_rev=%r>" % (self.remote_wiki, self.remote_rev, self.current_rev)
 
+    def __cmp__(self, other):
+        if not isinstance(other, Tag):
+            return NotImplemented
+        return cmp(self.current_rev, other.current_rev)
+
 
 class AbstractTagStore(object):
     """ This class is an abstract base class that shows how to implement classes
@@ -56,6 +62,10 @@
         """ Removes all tags. """
         return NotImplemented
 
+    def fetch(self, iwid_full=None, iw_name=None):
+        """ Fetches tags by a special IWID or interwiki name. """
+        return NotImplemented
+
 
 class PickleTagStore(AbstractTagStore):
     """ This class manages the storage of tags in pickle files. """
@@ -111,6 +121,17 @@
         self.tags = []
         self.commit()
 
+    def fetch(self, iwid_full=None, iw_name=None):
+        assert iwid_full ^ iw_name
+        if iwid_full:
+            iwid_full = unpackLine(iwid_full)
+            if len(iwid_full) == 1:
+                assert False, "This case is not supported yet" # XXX
+            iw_name = iwid_full[1]
+
+        return [t for t in self.tags if t.remote_wiki == iw_name]
+
+
 # currently we just have one implementation, so we do not need
 # a factory method
 TagStore = PickleTagStore
\ No newline at end of file
--- a/MoinMoin/xmlrpc/__init__.py	Sun Aug 06 19:55:05 2006 +0200
+++ b/MoinMoin/xmlrpc/__init__.py	Mon Aug 07 23:08:59 2006 +0200
@@ -617,7 +617,7 @@
         else:
             return [self._outstr(name), iwid]
 
-    def xmlrpc_mergeChanges(self, pagename, diff, local_rev, delta_remote_rev, last_remote_rev, interwiki_name):
+    def xmlrpc_mergeDiff(self, pagename, diff, local_rev, delta_remote_rev, last_remote_rev, interwiki_name):
         """ Merges a diff sent by the remote machine and returns the number of the new revision.
             Additionally, this method tags the new revision.
             
@@ -634,7 +634,7 @@
 
         pagename = self._instr(pagename)
 
-        comment = u"Remote - %r" % interwiki_name
+        comment = u"Remote Merge - %r" % interwiki_name
         
         # User may read page?
         if not self.request.user.may.read(pagename) or not self.request.user.may.write(pagename):
--- a/docs/CHANGES.aschremmer	Sun Aug 06 19:55:05 2006 +0200
+++ b/docs/CHANGES.aschremmer	Mon Aug 07 23:08:59 2006 +0200
@@ -8,11 +8,13 @@
 
   ToDo:
     * Implement actual syncronisation.
+      * Add correct IWID_full handling.
     * Implement a cross-site authentication system, i.e. mainly an
       identity storage.
     * Clean up trailing whitespace.
     * Add page locking, i.e. use the one in the new storage layer.
     * Check what needs to be documented on MoinMaster.
+    * Search for XXX
 
   New Features:
     * XMLRPC method to return the Moin version
@@ -83,6 +85,8 @@
          fixed option handling again, refined semantics of options, introduced
          direction option, replaced "localMatch"/"remoteMatch" by "pageMatch".
          Store mtime for InterWiki list updates and detect changes based on it.
+         Added support for localPrefix and remotePrefix.
+Week 32: Continued work on the merge logic, finished prefix handling.
 
 2006-07-18: the requested daily entry is missing here, see http://moinmoin.wikiwikiweb.de/GoogleSoc2006/BetterProgress
 2006-07-19: the requested daily entry is missing here, see http://moinmoin.wikiwikiweb.de/GoogleSoc2006/BetterProgress
@@ -96,6 +100,14 @@
 2006-07-27: student didnt work on project
 2006-07-28: the requested daily entry is missing here, see http://moinmoin.wikiwikiweb.de/GoogleSoc2006/BetterProgress
 2006-07-29: the requested daily entry is missing here, see http://moinmoin.wikiwikiweb.de/GoogleSoc2006/BetterProgress
+2006-07-30: the requested daily entry is missing here, see http://moinmoin.wikiwikiweb.de/GoogleSoc2006/BetterProgress
+2006-07-31: the requested daily entry is missing here, see http://moinmoin.wikiwikiweb.de/GoogleSoc2006/BetterProgress
+2006-08-01: student didn't work on project
+2006-08-02: the requested daily entry is missing here, see http://moinmoin.wikiwikiweb.de/GoogleSoc2006/BetterProgress
+2006-08-03: the requested daily entry is missing here, see http://moinmoin.wikiwikiweb.de/GoogleSoc2006/BetterProgress
+2006-08-04: the requested daily entry is missing here, see http://moinmoin.wikiwikiweb.de/GoogleSoc2006/BetterProgress
+2006-08-05: student didn't work on project
+2006-08-06: student didn't work on project -- a Sunday
 
 Time plan
 =========