changeset 1284:82d1f983bcfb

merged xapian branch
author Thomas Waldmann <tw AT waldmann-edv DOT de>
date Tue, 15 Aug 2006 11:29:45 +0200
parents ed68b5d6f47e (diff) e26799c574b0 (current diff)
children 1ed1a0ac1715
files
diffstat 48 files changed, 868 insertions(+), 718 deletions(-) [+]
line wrap: on
line diff
--- a/MoinMoin/Page.py	Tue Aug 15 00:06:07 2006 +0200
+++ b/MoinMoin/Page.py	Tue Aug 15 11:29:45 2006 +0200
@@ -636,7 +636,7 @@
 
         return count
 
-    def getPageList(self, user=None, exists=1, filter=None):
+    def getPageList(self, user=None, exists=1, filter=None, include_underlay=True):
         """ List user readable pages under current page
 
         Currently only request.rootpage is used to list pages, but if we
@@ -693,8 +693,14 @@
                 if filter and not filter(name):
                     continue
 
+                page = Page(request, name)
+
+                # Filter underlay pages
+                if not include_underlay and page.getPageStatus()[0]: # is an underlay page
+                    continue
+
                 # Filter deleted pages
-                if exists and not Page(request, name).exists():
+                if exists and not page.exists():
                     continue
 
                 # Filter out page user may not read.
@@ -1649,7 +1655,7 @@
         try:
             lastRevision = self.getRevList()[0]
         except IndexError:
-            return security.AccessControlList(self.request)
+            return security.AccessControlList(self.request.cfg)
         body = Page(self.request, self.page_name,
                     rev=lastRevision).get_raw_body()
         return security.parseACL(self.request, body)
--- a/MoinMoin/PageEditor.py	Tue Aug 15 00:06:07 2006 +0200
+++ b/MoinMoin/PageEditor.py	Tue Aug 15 11:29:45 2006 +0200
@@ -808,6 +808,9 @@
 
         self.copypage()
 
+        # remember conflict state
+        self.setConflict(wikiutil.containsConflictMarker(text))
+
         # Write always on the standard directory, never change the
         # underlay directory copy!
         pagedir = self.getPagePath(use_underlay=0, check_create=0)
--- a/MoinMoin/_tests/test_security.py	Tue Aug 15 00:06:07 2006 +0200
+++ b/MoinMoin/_tests/test_security.py	Tue Aug 15 11:29:45 2006 +0200
@@ -204,7 +204,7 @@
             "BadGuy:  "
             "All:read  "
             ]
-        acl = security.AccessControlList(self.request, acl_rights)
+        acl = security.AccessControlList(self.request.cfg, acl_rights)
 
         # Should apply these rights:
         users = (
--- a/MoinMoin/_tests/test_wikisync.py	Tue Aug 15 00:06:07 2006 +0200
+++ b/MoinMoin/_tests/test_wikisync.py	Tue Aug 15 11:29:45 2006 +0200
@@ -11,7 +11,7 @@
 from MoinMoin.PageEditor import PageEditor
 from MoinMoin._tests import TestConfig, TestSkipped
 
-from MoinMoin.wikisync import TagStore
+from MoinMoin.wikisync import TagStore, BOTH
 
 
 class UnsafeSyncTestcase(TestCase):
@@ -27,7 +27,7 @@
     def testBasicTagThings(self):
         tags = TagStore(self.page)
         self.assert_(not tags.get_all_tags())
-        tags.add(remote_wiki="foo", remote_rev=1, current_rev=2)
+        tags.add(remote_wiki="foo", remote_rev=1, current_rev=2, direction=BOTH, normalised_name="FrontPage")
         tags = TagStore(self.page) # reload
         self.assert_(tags.get_all_tags()[0].remote_rev == 1)
     
--- a/MoinMoin/action/SpellCheck.py	Tue Aug 15 00:06:07 2006 +0200
+++ b/MoinMoin/action/SpellCheck.py	Tue Aug 15 11:29:45 2006 +0200
@@ -99,7 +99,6 @@
 
 
 def _addLocalWords(request):
-    import types
     from MoinMoin.PageEditor import PageEditor
 
     # get the new words as a string (if any are marked at all)
--- a/MoinMoin/action/SyncPages.py	Tue Aug 15 00:06:07 2006 +0200
+++ b/MoinMoin/action/SyncPages.py	Tue Aug 15 11:29:45 2006 +0200
@@ -10,9 +10,7 @@
 
 import os
 import re
-import zipfile
 import xmlrpclib
-from datetime import datetime
 
 # Compatiblity to Python 2.3
 try:
@@ -26,238 +24,35 @@
 from MoinMoin.PageEditor import PageEditor, conflict_markers
 from MoinMoin.Page import Page
 from MoinMoin.wikidicts import Dict, Group
-from MoinMoin.wikisync import TagStore
+from MoinMoin.wikisync import (TagStore, UnsupportedWikiException, SyncPage,
+                               MoinLocalWiki, MoinRemoteWiki, UP, DOWN, BOTH)
 from MoinMoin.util.bdiff import decompress, patch, compress, textdiff
 from MoinMoin.util import diff3
 
-# directions
-UP, DOWN, BOTH = range(3)
-directions_map = {"up": UP, "down": DOWN, "both": BOTH}
-
 
-def normalise_pagename(page_name, prefix):
-    if prefix:
-        if not page_name.startswith(prefix):
-            return None
-        else:
-            return page_name[len(prefix):]
-    else:
-        return page_name
+# map sync directions
+directions_map = {"up": UP, "down": DOWN, "both": BOTH}
 
 
 class ActionStatus(Exception): pass
 
-class UnsupportedWikiException(Exception): pass
-
-# XXX Move these classes to MoinMoin.wikisync
-class SyncPage(object):
-    """ This class represents a page in one or two wiki(s). """
-    def __init__(self, name, local_rev=None, remote_rev=None, local_name=None, remote_name=None):
-        self.name = name
-        self.local_rev = local_rev
-        self.remote_rev = remote_rev
-        self.local_name = local_name
-        self.remote_name = remote_name
-        assert local_rev or remote_rev
-        assert local_name or remote_name
-
-    def __repr__(self):
-        return repr("<Remote Page %r>" % unicode(self))
-
-    def __unicode__(self):
-        return u"%s<%r:%r>" % (self.name, self.local_rev, self.remote_rev)
-
-    def __lt__(self, other):
-        return self.name < other.name
-
-    def __hash__(self):
-        return hash(self.name)
-
-    def __eq__(self, other):
-        if not isinstance(other, SyncPage):
-            return false
-        return self.name == other.name
-
-    def add_missing_pagename(self, local, remote):
-        if self.local_name is None:
-            n_name = normalise_pagename(self.remote_name, remote.prefix)
-            assert n_name is not None
-            self.local_name = (local.prefix or "") + n_name
-        elif self.remote_name is None:
-            n_name = normalise_pagename(self.local_name, local.prefix)
-            assert n_name is not None
-            self.remote_name = (local.prefix or "") + n_name
-
-        return self # makes using list comps easier
-
-    def filter(cls, sp_list, func):
-        return [x for x in sp_list if func(x.name)]
-    filter = classmethod(filter)
-
-    def merge(cls, local_list, remote_list):
-        # map page names to SyncPage objects :-)
-        d = dict(zip(local_list, local_list))
-        for sp in remote_list:
-            if sp in d:
-                d[sp].remote_rev = sp.remote_rev
-                d[sp].remote_name = sp.remote_name
-            else:
-                d[sp] = sp
-        return d.keys()
-    merge = classmethod(merge)
-
-    def is_only_local(self):
-        return not self.remote_rev
-
-    def is_only_remote(self):
-        return not self.local_rev
-
-    def is_local_and_remote(self):
-        return self.local_rev and self.remote_rev
-
-    def iter_local_only(cls, sp_list):
-        for x in sp_list:
-            if x.is_only_local():
-                yield x
-    iter_local_only = classmethod(iter_local_only)
-
-    def iter_remote_only(cls, sp_list):
-        for x in sp_list:
-            if x.is_only_remote():
-                yield x
-    iter_remote_only = classmethod(iter_remote_only)
-
-    def iter_local_and_remote(cls, sp_list):
-        for x in sp_list:
-            if x.is_local_and_remote():
-                yield x
-    iter_local_and_remote = classmethod(iter_local_and_remote)
-
-class RemoteWiki(object):
-    """ This class should be the base for all implementations of remote wiki
-        classes. """
-
-    def __repr__(self):
-        """ Returns a representation of the instance for debugging purposes. """
-        return NotImplemented
-
-    def get_interwiki_name(self):
-        """ Returns the interwiki name of the other wiki. """
-        return NotImplemented
-
-    def get_iwid(self):
-        """ Returns the InterWiki ID. """
-        return NotImplemented
-
-    def get_pages(self):
-        """ Returns a list of SyncPage instances. """
-        return NotImplemented
-
-
-class MoinRemoteWiki(RemoteWiki):
-    """ Used for MoinMoin wikis reachable via XMLRPC. """
-    def __init__(self, request, interwikiname, prefix):
-        self.request = request
-        self.prefix = prefix
-        _ = self.request.getText
-
-        wikitag, wikiurl, wikitail, wikitag_bad = wikiutil.resolve_wiki(self.request, '%s:""' % (interwikiname, ))
-        self.wiki_url = wikiutil.mapURL(self.request, wikiurl)
-        self.valid = not wikitag_bad
-        self.xmlrpc_url = self.wiki_url + "?action=xmlrpc2"
-        if not self.valid:
-            self.connection = None
-            return
-
-        self.connection = self.createConnection()
-
-        version = self.connection.getMoinVersion()
-        if not isinstance(version, (tuple, list)):
-            raise UnsupportedWikiException(_("The remote version of MoinMoin is too old, the version 1.6 is required at least."))
-
-        remote_interwikiname = self.get_interwiki_name()
-        remote_iwid = self.connection.interwikiName()[1]
-        self.is_anonymous = remote_interwikiname is None
-        if not self.is_anonymous and interwikiname != remote_interwikiname:
-            raise UnsupportedWikiException(_("The remote wiki uses a different InterWiki name (%(remotename)s)"
-                                             " internally than you specified (%(localname)s).") % {
-                "remotename": wikiutil.escape(remote_interwikiname), "localname": wikiutil.escape(interwikiname)})
-
-        if self.is_anonymous:
-            self.iwid_full = packLine([remote_iwid])
-        else:
-            self.iwid_full = packLine([remote_iwid, interwikiname])
-
-    def createConnection(self):
-        return xmlrpclib.ServerProxy(self.xmlrpc_url, allow_none=True, verbose=True)
-
-    # Public methods
-    def get_diff(self, pagename, from_rev, to_rev):
-        return str(self.connection.getDiff(pagename, from_rev, to_rev))
-
-    # Methods implementing the RemoteWiki interface
-    def get_interwiki_name(self):
-        return self.connection.interwikiName()[0]
-
-    def get_iwid(self):
-        return self.connection.interwikiName()[1]
-
-    def get_pages(self):
-        pages = self.connection.getAllPagesEx({"include_revno": True, "include_deleted": True})
-        rpages = []
-        for name, revno in pages:
-            normalised_name = normalise_pagename(name, self.prefix)
-            if normalised_name is None:
-                continue
-            rpages.append(SyncPage(normalised_name, remote_rev=revno, remote_name=name))
-        return rpages
-
-    def __repr__(self):
-        return "<MoinRemoteWiki wiki_url=%r valid=%r>" % (self.wiki_url, self.valid)
-
-
-class MoinLocalWiki(RemoteWiki):
-    """ Used for the current MoinMoin wiki. """
-    def __init__(self, request, prefix):
-        self.request = request
-        self.prefix = prefix
-
-    def getGroupItems(self, group_list):
-        """ Returns all page names that are listed on the page group_list. """
-        pages = []
-        for group_pagename in group_list:
-            pages.extend(Group(self.request, group_pagename).members())
-        return [self.createSyncPage(x) for x in pages]
-
-    def createSyncPage(self, page_name):
-        normalised_name = normalise_pagename(page_name, self.prefix)
-        if normalised_name is None:
-            return None
-        return SyncPage(normalised_name, local_rev=Page(self.request, page_name).get_real_rev(), local_name=page_name)
-
-    # Public methods:
-
-    # Methods implementing the RemoteWiki interface
-    def get_interwiki_name(self):
-        return self.request.cfg.interwikiname
-
-    def get_iwid(self):
-        return self.request.cfg.iwid
-
-    def get_pages(self):
-        return [x for x in [self.createSyncPage(x) for x in self.request.rootpage.getPageList(exists=0)] if x]
-
-    def __repr__(self):
-        return "<MoinLocalWiki>"
-
 
 class ActionClass:
+    INFO, WARN, ERROR = range(3) # used for logging
+
     def __init__(self, pagename, request):
         self.request = request
         self.pagename = pagename
         self.page = Page(request, pagename)
+        self.status = []
+        request.flush()
+
+    def log_status(self, level, message):
+        """ Appends the message with a given importance level to the internal log. """
+        self.status.append((level, message))
 
     def parse_page(self):
+        """ Parses the parameter page and returns the read arguments. """
         options = {
             "remotePrefix": "",
             "localPrefix": "",
@@ -276,7 +71,7 @@
         if options["groupList"] is not None:
             options["groupList"] = unpackLine(options["groupList"], ",")
 
-        options["direction"] = directions_map.get(options["direction"], BOTH)
+        options["direction"] = directions_map.get(options["direction"].lower(), BOTH)
 
         return options
 
@@ -287,7 +82,6 @@
         if params["pageList"] is not None:
             params["pageMatch"] = u'|'.join([r'^%s$' % re.escape(name)
                                              for name in params["pageList"]])
-            del params["pageList"]
 
         if params["pageMatch"] is not None:
             params["pageMatch"] = re.compile(params["pageMatch"], re.U)
@@ -295,6 +89,7 @@
         # we do not support matching or listing pages if there is a group of pages
         if params["groupList"]:
             params["pageMatch"] = None
+            params["pageList"] = None
 
         return params
 
@@ -308,15 +103,18 @@
         params = self.fix_params(self.parse_page())
 
         try:
+            if params["direction"] == UP:
+                raise ActionStatus(_("The only supported directions are BOTH and DOWN."))
+
             if not self.request.cfg.interwikiname:
                 raise ActionStatus(_("Please set an interwikiname in your wikiconfig (see HelpOnConfiguration) to be able to use this action."))
 
             if not params["remoteWiki"]:
                 raise ActionStatus(_("Incorrect parameters. Please supply at least the ''remoteWiki'' parameter."))
 
-            local = MoinLocalWiki(self.request, params["localPrefix"])
+            local = MoinLocalWiki(self.request, params["localPrefix"], params["pageList"])
             try:
-                remote = MoinRemoteWiki(self.request, params["remoteWiki"], params["remotePrefix"])
+                remote = MoinRemoteWiki(self.request, params["remoteWiki"], params["remotePrefix"], params["pageList"])
             except UnsupportedWikiException, (msg, ):
                 raise ActionStatus(msg)
 
@@ -325,15 +123,51 @@
 
             self.sync(params, local, remote)
         except ActionStatus, e:
-            return self.page.send_page(self.request, msg=u'<p class="error">%s</p>\n' % (e.args[0], ))
+            msg = u'<p class="error">%s</p><p>%s</p>\n' % (e.args[0], repr(self.status))
+        else:
+            msg = u"%s<p>%s</p>" % (_("Syncronisation finished."), repr(self.status))
 
-        return self.page.send_page(self.request, msg=_("Syncronisation finished."))
+        # XXX append self.status to the job page
+        return self.page.send_page(self.request, msg=msg)
     
     def sync(self, params, local, remote):
-        """ This method does the syncronisation work. """
+        """ This method does the syncronisation work.
+            Currently, it handles the case where the pages exist on both sides.
+            Now there are a few other cases left that have to be implemented:
+                Wiki A    | Wiki B   | Remark
+                ----------+----------+------------------------------
+                exists    | deleted  | In this case, we do a normal merge if there
+                          |          | are no tags. If there were changes in
+                          |          | Wiki A, there is a merge with a conflict.
+                          |          | Otherwise (no changes past last merge),
+                          |          | the page is deleted in Wiki A.
+                          |          | This needs static info that could be
+                          |          | transferred with the pagelist.
+                ----------+----------+-------------------------------
+                exists    | non-     | Now the wiki knows that the page was renamed.
+                with tags | existant | There should be an RPC method that asks
+                          |          | for the new name (which could be recorded
+                          |          | on page rename). Then the page is
+                          |          | renamed in Wiki A as well and the sync
+                          |          | is done normally.
+                          |          | Every wiki retains a dict that maps
+                          |          | (IWID, oldname) => newname and that is
+                          |          | updated on every rename. oldname refers
+                          |          | to the pagename known by the old wiki (can be
+                          |          | gathered from tags).
+                ----------+----------+-------------------------------
+                exists    | any case | Try a rename search first, then
+                          |          | do a sync without considering tags
+                with tags | with non | to ensure data integrity.
+                          | matching |
+                          | tags     |
+                ----------+----------+-------------------------------
+        """
+        _ = self.request.getText
+        direction = params["direction"]
 
         l_pages = local.get_pages()
-        r_pages = remote.get_pages()
+        r_pages = remote.get_pages(exclude_non_writable=direction != DOWN)
 
         if params["groupList"]:
             pages_from_groupList = set(local.getGroupItems(params["groupList"]))
@@ -342,11 +176,11 @@
 
         m_pages = [elem.add_missing_pagename(local, remote) for elem in SyncPage.merge(l_pages, r_pages)]
 
-        print "Got %i local, %i remote pages, %i merged pages" % (len(l_pages), len(r_pages), len(m_pages))
-        
+        print "Got %i local, %i remote pages, %i merged pages" % (len(l_pages), len(r_pages), len(m_pages)) # XXX remove
+
         if params["pageMatch"]:
             m_pages = SyncPage.filter(m_pages, params["pageMatch"].match)
-        print "After filtering: Got %i merges pages" % (len(m_pages), )
+        print "After filtering: Got %i merges pages" % (len(m_pages), ) # XXX remove
 
         on_both_sides = list(SyncPage.iter_local_and_remote(m_pages))
         remote_but_not_local = list(SyncPage.iter_remote_only(m_pages))
@@ -356,21 +190,25 @@
         #r_new_pages = u", ".join([unicode(x) for x in remote_but_not_local])
         #l_new_pages = u", ".join([unicode(x) for x in local_but_not_remote])
         #raise ActionStatus("These pages are in the remote wiki, but not local: " + wikiutil.escape(r_new_pages) + "<br>These pages are in the local wiki, but not in the remote one: " + wikiutil.escape(l_new_pages))
-        #if params["direction"] in (DOWN, BOTH):
-        #    for rp in remote_but_not_local:
 
         # let's do the simple case first, can be refactored later to match all cases
         # XXX handle deleted pages
         for rp in on_both_sides:
             # XXX add locking, acquire read-lock on rp
+            print "Processing %r" % rp
 
-            current_page = Page(self.request, local_pagename)
+            local_pagename = rp.local_name
+            current_page = PageEditor(self.request, local_pagename) # YYY direct access
             current_rev = current_page.get_real_rev()
-            local_pagename = rp.local_pagename
 
             tags = TagStore(current_page)
-            matching_tags = tags.fetch(iwid_full=remote.iwid_full)
+            if direction == BOTH:
+                match_direction = direction
+            else:
+                match_direction = None
+            matching_tags = tags.fetch(iwid_full=remote.iwid_full,direction=match_direction)
             matching_tags.sort()
+            #print "------ TAGS: " + repr(matching_tags) + repr(tags.tags)
 
             if not matching_tags:
                 remote_rev = None
@@ -380,38 +218,83 @@
                 newest_tag = matching_tags[-1]
                 local_rev = newest_tag.current_rev
                 remote_rev = newest_tag.remote_rev
-                if remote_rev == rp.remote_rev and local_rev == current_rev:
+                if remote_rev == rp.remote_rev and (direction == DOWN or local_rev == current_rev):
                     continue # no changes done, next page
-                old_page = Page(self.request, local_pagename, rev=local_rev)
-                old_contents = old_page.get_raw_body_str()
+                old_contents = Page(self.request, local_pagename, rev=local_rev).get_raw_body_str() # YYY direct access
 
-            diff_result = remote.get_diff(rp.remote_pagename, remote_rev, None)
-            is_remote_conflict = diff_result["conflict"]
-            assert diff_result["diffversion"] == 1
-            diff = diff_result["diff"]
-            current_remote_rev = diff_result["current"]
+            self.log_status(ActionClass.INFO, _("Synchronising page %(pagename)s with remote page %(remotepagename)s ...") % {"pagename": local_pagename, "remotepagename": rp.remote_name})
 
-            if remote_rev is None: # set the remote_rev for the case without any tags
-                remote_rev = current_remote_rev
+            if direction == DOWN:
+                remote_rev = None # always fetch the full page, ignore remote conflict check
+                patch_base_contents = ""
+            else:
+                patch_base_contents = old_contents
 
-            new_contents = patch(old_contents, decompress(diff)).decode("utf-8")
-            old_contents = old_contents.encode("utf-8")
+            if remote_rev != rp.remote_rev:
+                diff_result = remote.get_diff(rp.remote_name, remote_rev, None) # XXX might raise ALREADY_CURRENT
+                is_remote_conflict = diff_result["conflict"]
+                assert diff_result["diffversion"] == 1
+                diff = diff_result["diff"]
+                current_remote_rev = diff_result["current"]
+            else:
+                current_remote_rev = remote_rev
+                is_remote_conflict = wikiutil.containsConflictMarker(old_contents.decode("utf-8"))
+                diff = None
+
+            # do not sync if the conflict is remote and local, or if it is local
+            # and the page has never been syncronised
+            if (wikiutil.containsConflictMarker(current_page.get_raw_body())
+                and (remote_rev is None or is_remote_conflict)):
+                self.log_status(ActionClass.WARN, _("Skipped page %(pagename)s because of a locally or remotely unresolved conflict.") % {"pagename": local_pagename})
+                continue
+
+            if remote_rev is None and direction == BOTH:
+                self.log_status(ActionClass.INFO, _("This is the first synchronisation between this page and the remote wiki."))
+
+            if diff is None:
+                new_contents = old_contents.decode("utf-8")
+            else:
+                new_contents = patch(patch_base_contents, decompress(diff)).decode("utf-8")
 
             # here, the actual merge happens
-            verynewtext = diff3.text_merge(old_contents, new_contents, current_page.get_raw_body(), 1, *conflict_markers)
+            print "Merging %r, %r and %r" % (old_contents.decode("utf-8"), new_contents, current_page.get_raw_body())
+            verynewtext = diff3.text_merge(old_contents.decode("utf-8"), new_contents, current_page.get_raw_body(), 2, *conflict_markers)
 
-            new_local_rev = current_rev + 1 # XXX commit first?
             local_full_iwid = packLine([local.get_iwid(), local.get_interwiki_name()])
             remote_full_iwid = packLine([remote.get_iwid(), remote.get_interwiki_name()])
-            # XXX add remote conflict handling
-            very_current_remote_rev = remote.merge_diff(rp.remote_pagename, compress(textdiff(new_contents, verynewtext)), new_local_rev, remote_rev, current_remote_rev, local_full_iwid)
-            tags.add(remote_wiki=remote_full_iwid, remote_rev=very_current_remote_rev, current_rev=new_local_rev)
+
+            diff = textdiff(new_contents.encode("utf-8"), verynewtext.encode("utf-8"))
+            #print "Diff against %r" % new_contents.encode("utf-8")
+
             comment = u"Local Merge - %r" % (remote.get_interwiki_name() or remote.get_iwid())
+
+            # XXX upgrade to write lock
             try:
-                current_page.saveText(verynewtext, current_rev, comment=comment)
+                current_page.saveText(verynewtext, current_rev, comment=comment) # YYY direct access
+            except PageEditor.Unchanged:
+                pass
             except PageEditor.EditConflict:
                 assert False, "You stumbled on a problem with the current storage system - I cannot lock pages"
-            # XXX untested
+
+            new_local_rev = current_page.get_real_rev()
+
+            if direction == BOTH:
+                try:
+                    very_current_remote_rev = remote.merge_diff(rp.remote_name, compress(diff), new_local_rev, current_remote_rev, current_remote_rev, local_full_iwid, rp.name)
+                except Exception, e:
+                    raise # XXX rollback locally and do not tag locally
+            else:
+                very_current_remote_rev = current_remote_rev
+
+            tags.add(remote_wiki=remote_full_iwid, remote_rev=very_current_remote_rev, current_rev=new_local_rev, direction=direction, normalised_name=rp.name)
+
+            if not wikiutil.containsConflictMarker(verynewtext):
+                self.log_status(ActionClass.INFO, _("Page successfully merged."))
+            else:
+                self.log_status(ActionClass.WARN, _("Page merged with conflicts."))
+
+            # XXX release lock
+
 
 def execute(pagename, request):
     ActionClass(pagename, request).render()
--- a/MoinMoin/action/edit.py	Tue Aug 15 00:06:07 2006 +0200
+++ b/MoinMoin/action/edit.py	Tue Aug 15 11:29:45 2006 +0200
@@ -135,8 +135,6 @@
     # Save new text
     else:
         try:
-            still_conflict = wikiutil.containsConflictMarker(savetext)
-            pg.setConflict(still_conflict)
             savemsg = pg.saveText(savetext, rev, trivial=trivial, comment=comment)
         except pg.EditConflict, e:
             msg = e.message
--- a/MoinMoin/action/newpage.py	Tue Aug 15 00:06:07 2006 +0200
+++ b/MoinMoin/action/newpage.py	Tue Aug 15 11:29:45 2006 +0200
@@ -29,7 +29,7 @@
         @return: error message
         """
         _ = self.request.getText
-        need_replace = self.nametemplate.find('%s') != -1
+        need_replace = '%s' in self.nametemplate
         if not self.pagename and need_replace:
             return _("Cannot create a new page without a page name."
                      "  Please specify a page name.")
@@ -38,7 +38,7 @@
         # template variable
             repl = 'A@'
             i = 0
-            while self.nametemplate.find(repl) != -1:
+            while repl in self.nametemplate:
                 repl += ['#', '&', '$', 'x', 'X', ':', '@'][i]
                 i += 1
                 i = i % 7
--- a/MoinMoin/action/rss_rc.py	Tue Aug 15 00:06:07 2006 +0200
+++ b/MoinMoin/action/rss_rc.py	Tue Aug 15 11:29:45 2006 +0200
@@ -54,7 +54,7 @@
     for line in log.reverse():
         if not request.user.may.read(line.pagename):
             continue
-        if ((line.action[:4] != 'SAVE') or
+        if (not line.action.startswith('SAVE') or
             ((line.pagename in pages) and unique)): continue
         #if log.dayChanged() and log.daycount > _MAX_DAYS: break
         line.editor = line.getInterwikiEditorData(request)
--- a/MoinMoin/config/multiconfig.py	Tue Aug 15 00:06:07 2006 +0200
+++ b/MoinMoin/config/multiconfig.py	Tue Aug 15 11:29:45 2006 +0200
@@ -15,6 +15,7 @@
 from MoinMoin import error, util, wikiutil
 import MoinMoin.auth as authmodule
 from MoinMoin.packages import packLine
+from MoinMoin.security import AccessControlList
 
 _url_re_cache = None
 _farmconfig_mtime = None
@@ -557,6 +558,10 @@
         # Cache variables for the properties below
         self._iwid = self._iwid_full = self._meta_dict = None
 
+        self._acl_rights_before = AccessControlList(self, [self.acl_rights_before])
+        self._acl_rights_default = AccessControlList(self, [self.acl_rights_default])
+        self._acl_rights_after = AccessControlList(self, [self.acl_rights_after])
+
     def load_meta_dict(self):
         """ The meta_dict contains meta data about the wiki instance. """
         if getattr(self, "_meta_dict", None) is None:
--- a/MoinMoin/filter/EXIF.py	Tue Aug 15 00:06:07 2006 +0200
+++ b/MoinMoin/filter/EXIF.py	Tue Aug 15 11:29:45 2006 +0200
@@ -1004,7 +1004,7 @@
             return
 
         # Olympus
-        if make[:7] == 'OLYMPUS':
+        if make.startswith('OLYMPUS'):
             self.dump_IFD(note.field_offset+8, 'MakerNote',
                           dict=MAKERNOTE_OLYMPUS_TAGS)
             return
--- a/MoinMoin/formatter/__init__.py	Tue Aug 15 00:06:07 2006 +0200
+++ b/MoinMoin/formatter/__init__.py	Tue Aug 15 11:29:45 2006 +0200
@@ -7,12 +7,13 @@
     @copyright: 2000-2004 by Jürgen Hermann <jh@web.de>
     @license: GNU GPL, see COPYING for details.
 """
+import re
+
 from MoinMoin.util import pysupport
+from MoinMoin import wikiutil
 
 modules = pysupport.getPackageModules(__file__)
 
-from MoinMoin import wikiutil
-import re, types
 
 class FormatterBase:
     """ This defines the output interface used all over the rest of the code.
@@ -37,7 +38,7 @@
         self._base_depth = 0
 
     def set_highlight_re(self, hi_re=None):
-        if type(hi_re) in [types.StringType, types.UnicodeType]:
+        if isinstance(hi_re, (str, unicode)):
             try:
                 self._highlight_re = re.compile(hi_re, re.U + re.IGNORECASE)
             except re.error:
@@ -96,7 +97,7 @@
         """
         wikitag, wikiurl, wikitail, wikitag_bad = wikiutil.resolve_wiki(self.request, '%s:"%s"' % (interwiki, pagename))
         if wikitag == 'Self' or wikitag == self.request.cfg.interwikiname:
-            if wikitail.find('#') > -1:
+            if '#' in wikitail:
                 wikitail, kw['anchor'] = wikitail.split('#', 1)
                 wikitail = wikiutil.url_unquote(wikitail)
             return self.pagelink(on, wikitail, **kw)
@@ -295,7 +296,7 @@
         return macro_obj.execute(name, args)
 
     def _get_bang_args(self, line):
-        if line[:2] == '#!':
+        if line.startswith('#!'):
             try:
                 name, args = line[2:].split(None, 1)
             except ValueError:
--- a/MoinMoin/formatter/dom_xml.py	Tue Aug 15 00:06:07 2006 +0200
+++ b/MoinMoin/formatter/dom_xml.py	Tue Aug 15 11:29:45 2006 +0200
@@ -191,7 +191,7 @@
         return self._set_tag('lang', on, value=lang_name)
 
     def pagelink(self, on, pagename='', page=None, **kw):
-        apply(FormatterBase.pagelink, (self, pagename, page), kw)
+        FormatterBase.pagelink(self, pagename, page, **kw)
         if not pagename and page is not None:
             pagename = page.page_name
         kw['pagename'] = pagename
--- a/MoinMoin/formatter/text_gedit.py	Tue Aug 15 00:06:07 2006 +0200
+++ b/MoinMoin/formatter/text_gedit.py	Tue Aug 15 11:29:45 2006 +0200
@@ -40,7 +40,7 @@
 
             See wikiutil.link_tag() for possible keyword parameters.
         """
-        apply(FormatterBase.pagelink, (self, on, pagename, page), kw)
+        FormatterBase.pagelink(self, on, pagename, page, **kw)
         if page is None:
             page = Page(self.request, pagename, formatter=self)
         return page.link_to(self.request, on=on, **kw)
--- a/MoinMoin/formatter/text_html.py	Tue Aug 15 00:06:07 2006 +0200
+++ b/MoinMoin/formatter/text_html.py	Tue Aug 15 11:29:45 2006 +0200
@@ -173,7 +173,7 @@
     indentspace = ' '
 
     def __init__(self, request, **kw):
-        apply(FormatterBase.__init__, (self, request), kw)
+        FormatterBase.__init__(self, request, **kw)
 
         # inline tags stack. When an inline tag is called, it goes into
         # the stack. When a block element starts, all inline tags in
@@ -484,7 +484,7 @@
 
             See wikiutil.link_tag() for possible keyword parameters.
         """
-        apply(FormatterBase.pagelink, (self, on, pagename, page), kw)
+        FormatterBase.pagelink(self, on, pagename, page, **kw)
         if page is None:
             page = Page(self.request, pagename, formatter=self)
         if self.request.user.show_nonexist_qm and on and not page.exists():
@@ -506,13 +506,13 @@
         wikiurl = wikiutil.mapURL(self.request, wikiurl)
         if wikitag == 'Self': # for own wiki, do simple links
             if on:
-                if wikitail.find('#') > -1:
+                if '#' in wikitail:
                     wikitail, kw['anchor'] = wikitail.split('#', 1)
                 wikitail = wikiutil.url_unquote(wikitail)
                 try: # XXX this is the only place where we access self.page - do we need it? Crashes silently on actions!
-                    return apply(self.pagelink, (on, wikiutil.AbsPageName(self.request, self.page.page_name, wikitail)), kw)
+                    return self.pagelink(on, wikiutil.AbsPageName(self.request, self.page.page_name, wikitail), **kw)
                 except:
-                    return apply(self.pagelink, (on, wikitail), kw)
+                    return self.pagelink(on, wikitail, **kw)
             else:
                 return self.pagelink(0)
         else: # return InterWiki hyperlink
--- a/MoinMoin/formatter/text_plain.py	Tue Aug 15 00:06:07 2006 +0200
+++ b/MoinMoin/formatter/text_plain.py	Tue Aug 15 11:29:45 2006 +0200
@@ -16,7 +16,7 @@
     hardspace = u' '
 
     def __init__(self, request, **kw):
-        apply(FormatterBase.__init__, (self, request), kw)
+        FormatterBase.__init__(self, request, **kw)
         self._in_code_area = 0
         self._in_code_line = 0
         self._code_area_state = [0, -1, -1, 0]
@@ -36,7 +36,7 @@
         return (u'\n\n*** ', u' ***\n\n')[not on]
 
     def pagelink(self, on, pagename='', page=None, **kw):
-        apply(FormatterBase.pagelink, (self, on, pagename, page), kw)
+        FormatterBase.pagelink(self, on, pagename, page, **kw)
         return (u">>", u"<<") [not on]
 
     def interwikilink(self, on, interwiki='', pagename='', **kw):
--- a/MoinMoin/formatter/text_xml.py	Tue Aug 15 00:06:07 2006 +0200
+++ b/MoinMoin/formatter/text_xml.py	Tue Aug 15 11:29:45 2006 +0200
@@ -19,7 +19,7 @@
     hardspace = '&nbsp;'
 
     def __init__(self, request, **kw):
-        apply(FormatterBase.__init__, (self, request), kw)
+        FormatterBase.__init__(self, request, **kw)
         self._current_depth = 1
         self._base_depth = 0
         self.in_pre = 0
@@ -49,7 +49,7 @@
         return '<![CDATA[' + markup.replace(']]>', ']]>]]&gt;<![CDATA[') + ']]>'
 
     def pagelink(self, on, pagename='', page=None, **kw):
-        apply(FormatterBase.pagelink, (self, on, pagename, page), kw)
+        FormatterBase.pagelink(self, on, pagename, page, **kw)
         if page is None:
             page = Page(self.request, pagename, formatter=self)
         return page.link_to(self.request, on=on, **kw)
@@ -202,7 +202,7 @@
         for key, value in kw.items():
             if key in valid_attrs:
                 attrs[key] = value
-        return apply(FormatterBase.image, (self,), attrs) + '</img>'
+        return FormatterBase.image(self, **attrs) + '</img>'
 
     def code_area(self, on, code_id, code_type='code', show=0, start=-1, step=-1):
         return ('<codearea id="%s">' % code_id, '</codearea')[not on]
--- a/MoinMoin/macro/NewPage.py	Tue Aug 15 00:06:07 2006 +0200
+++ b/MoinMoin/macro/NewPage.py	Tue Aug 15 11:29:45 2006 +0200
@@ -79,7 +79,7 @@
         if parent == '@ME' and self.request.user.valid:
             parent = self.request.user.name
 
-        requires_input = nametemplate.find('%s') != -1
+        requires_input = '%s' in nametemplate
 
         if label:
             # Try to get a translation, this will probably not work in
--- a/MoinMoin/macro/RecentChanges.py	Tue Aug 15 00:06:07 2006 +0200
+++ b/MoinMoin/macro/RecentChanges.py	Tue Aug 15 11:29:45 2006 +0200
@@ -38,7 +38,7 @@
         elif line.action == 'ATTDRW':
             comment = _("Drawing '%(filename)s' saved.") % {
                 'filename': filename}
-    elif line.action.find('/REVERT') != -1:
+    elif '/REVERT' in line.action:
         rev = int(line.extra)
         comment = _("Revert to revision %(rev)d.") % {'rev': rev}
 
--- a/MoinMoin/macro/TableOfContents.py	Tue Aug 15 00:06:07 2006 +0200
+++ b/MoinMoin/macro/TableOfContents.py	Tue Aug 15 11:29:45 2006 +0200
@@ -67,7 +67,7 @@
         if self.include_macro is None:
             self.include_macro = wikiutil.importPlugin(self.macro.request.cfg,
                                                        'macro', "Include")
-        return self.pre_re.sub('', apply(self.include_macro, args, kwargs)).split('\n')
+        return self.pre_re.sub('', self.include_macro(*args, **kwargs)).split('\n')
 
     def run(self):
         _ = self._
--- a/MoinMoin/macro/__init__.py	Tue Aug 15 00:06:07 2006 +0200
+++ b/MoinMoin/macro/__init__.py	Tue Aug 15 11:29:45 2006 +0200
@@ -295,7 +295,7 @@
         for tag, url in list:
             buf.write('<dt><tt><a href="%s">%s</a></tt></dt>' % (
                 wikiutil.join_wiki(url, 'RecentChanges'), tag))
-            if url.find('$PAGE') == -1:
+            if '$PAGE' not in url:
                 buf.write('<dd><tt><a href="%s">%s</a></tt></dd>' % (url, url))
             else:
                 buf.write('<dd><tt>%s</tt></dd>' % url)
@@ -415,7 +415,7 @@
         from MoinMoin.mail.sendmail import decodeSpamSafeEmail
 
         args = args or ''
-        if args.find(',') == -1:
+        if ',' not in args:
             email = args
             text = ''
         else:
--- a/MoinMoin/parser/text_moin_wiki.py	Tue Aug 15 00:06:07 2006 +0200
+++ b/MoinMoin/parser/text_moin_wiki.py	Tue Aug 15 11:29:45 2006 +0200
@@ -758,7 +758,7 @@
     
     def _parser_repl(self, word):
         """Handle parsed code displays."""
-        if word[:3] == '{{{':
+        if word.startswith('{{{'):
             word = word[3:]
 
         self.parser = None
@@ -770,7 +770,7 @@
             word = ''
             self.in_pre = 3
             return self._closeP() + self.formatter.preformatted(1)
-        elif s_word[:2] == '#!':
+        elif s_word.startswith('#!'):
             # First try to find a parser for this (will go away in 2.0)
             parser_name = s_word[2:].split()[0]
             self.setParser(parser_name)
@@ -972,7 +972,7 @@
                 if self.in_pre == 1:
                     self.parser = None
                     parser_name = ''
-                    if (line.strip()[:2] == "#!"):
+                    if line.strip().startswith("#!"):
                         parser_name = line.strip()[2:].split()[0]
                         self.setParser(parser_name)
 
@@ -1054,7 +1054,7 @@
                 # Table mode
                 # TODO: move into function?                
                 if (not self.in_table and line[indlen:indlen + 2] == "||"
-                    and line[-3:] == "|| " and len(line) >= 5 + indlen):
+                    and line.endswith("|| ") and len(line) >= 5 + indlen):
                     # Start table
                     if self.list_types and not self.in_li:
                         self.request.write(self.formatter.listitem(1, style="list-style-type:none"))
@@ -1071,9 +1071,9 @@
                     self.in_table = True # self.lineno
                 elif (self.in_table and not
                       # intra-table comments should not break a table
-                      (line[:2] == "##" or  
+                      (line.startswith("##") or
                        line[indlen:indlen + 2] == "||" and
-                       line[-3:] == "|| " and
+                       line.endswith("|| ") and
                        len(line) >= 5 + indlen)):
                     
                     # Close table
--- a/MoinMoin/parser/text_rst.py	Tue Aug 15 00:06:07 2006 +0200
+++ b/MoinMoin/parser/text_rst.py	Tue Aug 15 11:29:45 2006 +0200
@@ -15,7 +15,6 @@
 import __builtin__
 import sys
 
-import types
 import os
 
 # docutils imports are below
@@ -227,7 +226,7 @@
     def append(self, text):
         f = sys._getframe()
         if f.f_back.f_code.co_filename.endswith('html4css1.py'):
-            if isinstance(text, types.StringType) or isinstance(text, types.UnicodeType):
+            if isinstance(text, (str, unicode)):
                 text = self.formatter.rawHTML(text)
         list.append(self, text)
 
@@ -256,7 +255,7 @@
         # Make all internal lists RawHTMLLists, see RawHTMLList class
         # comment for more information.
         for i in self.__dict__:
-            if isinstance(getattr(self, i), types.ListType):
+            if isinstance(getattr(self, i), list):
                 setattr(self, i, RawHTMLList(formatter))
 
     def depart_docinfo(self, node):
@@ -383,7 +382,7 @@
                 # Default case - make a link to a wiki page.
                 pagename = refuri
                 anchor = ''
-                if refuri.find('#') != -1:
+                if '#' in refuri:
                     pagename, anchor = refuri.split('#', 1)
                     anchor = '#' + anchor
                 page = Page(self.request, pagename)
--- a/MoinMoin/request/__init__.py	Tue Aug 15 00:06:07 2006 +0200
+++ b/MoinMoin/request/__init__.py	Tue Aug 15 11:29:45 2006 +0200
@@ -220,13 +220,16 @@
 
     def surge_protect(self):
         """ check if someone requesting too much from us """
+        limits = self.cfg.surge_action_limits
+        if not limits:
+            return False
+                                    
         validuser = self.user.valid
         current_id = validuser and self.user.name or self.remote_addr
         if not validuser and current_id.startswith('127.'): # localnet
             return False
         current_action = self.action
 
-        limits = self.cfg.surge_action_limits
         default_limit = self.cfg.surge_action_limits.get('default', (30, 60))
 
         now = int(time.time())
@@ -327,7 +330,7 @@
             accept_charset = accept_charset.lower()
             # Add iso-8859-1 if needed
             if (not '*' in accept_charset and
-                accept_charset.find('iso-8859-1') < 0):
+                'iso-8859-1' not in accept_charset):
                 accept_charset += ',iso-8859-1'
 
             # Make a list, sorted by quality value, using Schwartzian Transform
@@ -433,7 +436,7 @@
         """
         # Fix the script_name when using Apache on Windows.
         server_software = env.get('SERVER_SOFTWARE', '')
-        if os.name == 'nt' and server_software.find('Apache/') != -1:
+        if os.name == 'nt' and 'Apache/' in server_software:
             # Removes elements ending in '.' from the path.
             self.script_name = '/'.join([x for x in self.script_name.split('/')
                                          if not x.endswith('.')])
--- a/MoinMoin/script/account/check.py	Tue Aug 15 00:06:07 2006 +0200
+++ b/MoinMoin/script/account/check.py	Tue Aug 15 11:29:45 2006 +0200
@@ -189,12 +189,11 @@
                 self.process(uids)
 
     def make_WikiNames(self):
-        import string
         for uid, u in self.users.items():
             if u.disabled:
                 continue
             if not wikiutil.isStrictWikiname(u.name):
-                newname = string.capwords(u.name).replace(" ", "").replace("-", "")
+                newname = u.name.capwords().replace(" ", "").replace("-", "")
                 if not wikiutil.isStrictWikiname(newname):
                     print " %-20s %-30r - no WikiName, giving up" % (uid, u.name)
                 else:
--- a/MoinMoin/search/queryparser.py	Tue Aug 15 00:06:07 2006 +0200
+++ b/MoinMoin/search/queryparser.py	Tue Aug 15 11:29:45 2006 +0200
@@ -10,7 +10,7 @@
     @license: GNU GPL, see COPYING for details
 """
 
-import re, string
+import re
 from MoinMoin import config
 from MoinMoin.search.results import Match, TitleMatch, TextMatch
 
--- a/MoinMoin/security/__init__.py	Tue Aug 15 00:06:07 2006 +0200
+++ b/MoinMoin/security/__init__.py	Tue Aug 15 11:29:45 2006 +0200
@@ -169,38 +169,23 @@
 
     special_users = ["All", "Known", "Trusted"] # order is important
 
-    def __init__(self, request, lines=[]):
+    def __init__(self, cfg, lines=[]):
         """Initialize an ACL, starting from <nothing>.
         """
-        self.setLines(request.cfg, lines)
-
-    def setLines(self, cfg, lines=[]):
-        self.clean()
-        self.addBefore(cfg)
-        if not lines:
-            self.addDefault(cfg)
+        self._is_group = {}
+        if lines:
+            self.acl = [] # [ ('User', {"read": 0, ...}), ... ]
+            self.acl_lines = []
+            for line in lines:
+                self._addLine(cfg, line)
         else:
-            for line in lines:
-                self.addLine(cfg, line)
-        self.addAfter(cfg)
+            self.acl = None
+            self.acl_lines = None
 
-    def clean(self):
-        self.acl = [] # [ ('User', {"read": 0, ...}), ... ]
-        self.acl_lines = []
-        self._is_group = {}
-
-    def addBefore(self, cfg):
-        self.addLine(cfg, cfg.acl_rights_before, remember=0)
-    def addDefault(self, cfg):
-        self.addLine(cfg, cfg.acl_rights_default, remember=0)
-    def addAfter(self, cfg):
-        self.addLine(cfg, cfg.acl_rights_after, remember=0)
-
-    def addLine(self, cfg, aclstring, remember=1):
+    def _addLine(self, cfg, aclstring, remember=1):
         """ Add another ACL line
 
-        This can be used in multiple subsequent calls to process longer
-        lists.
+        This can be used in multiple subsequent calls to process longer lists.
 
         @param cfg: current config
         @param aclstring: acl string from page or cfg
@@ -216,26 +201,25 @@
         acliter = ACLStringIterator(cfg.acl_rights_valid, aclstring)
         for modifier, entries, rights in acliter:
             if entries == ['Default']:
-                self.addDefault(cfg)
-                continue
-
-            for entry in entries:
-                if group_re.search(entry):
-                    self._is_group[entry] = 1
-                rightsdict = {}
-                if modifier:
-                    # Only user rights are added to the right dict.
-                    # + add rights with value of 1
-                    # - add right with value of 0
-                    for right in rights:
-                        rightsdict[right] = (modifier == '+')
-                else:
-                    # All rights from acl_rights_valid are added to the
-                    # dict, user rights with value of 1, and other with
-                    # value of 0
-                    for right in cfg.acl_rights_valid:
-                        rightsdict[right] = (right in rights)
-                self.acl.append((entry, rightsdict))
+                self._addLine(cfg, cfg.acl_rights_default, remember=0)
+            else:
+                for entry in entries:
+                    if group_re.search(entry):
+                        self._is_group[entry] = 1
+                    rightsdict = {}
+                    if modifier:
+                        # Only user rights are added to the right dict.
+                        # + add rights with value of 1
+                        # - add right with value of 0
+                        for right in rights:
+                            rightsdict[right] = (modifier == '+')
+                    else:
+                        # All rights from acl_rights_valid are added to the
+                        # dict, user rights with value of 1, and other with
+                        # value of 0
+                        for right in cfg.acl_rights_valid:
+                            rightsdict[right] = (right in rights)
+                    self.acl.append((entry, rightsdict))
 
     def may(self, request, name, dowhat):
         """May <name> <dowhat>?
@@ -243,8 +227,10 @@
         """
         is_group_member = request.dicts.has_member
 
+        acl_page = self.acl or request.cfg._acl_rights_default.acl
+        acl = request.cfg._acl_rights_before.acl + acl_page + request.cfg._acl_rights_after.acl
         allowed = None
-        for entry, rightsdict in self.acl:
+        for entry, rightsdict in acl:
             if entry in self.special_users:
                 handler = getattr(self, "_special_"+entry, None)
                 allowed = handler(request, name, dowhat, rightsdict)
@@ -266,7 +252,11 @@
 
     def getString(self, b='#acl ', e='\n'):
         """print the acl strings we were fed with"""
-        return ''.join(["%s%s%s" % (b, l, e) for l in self.acl_lines])
+        if self.acl_lines:
+            acl_lines = ''.join(["%s%s%s" % (b, l, e) for l in self.acl_lines])
+        else:
+            acl_lines = ''
+        return acl_lines
 
     def _special_All(self, request, name, dowhat, rightsdict):
         return rightsdict.get(dowhat)
@@ -291,6 +281,7 @@
 
     def __eq__(self, other):
         return self.acl_lines == other.acl_lines
+
     def __ne__(self, other):
         return self.acl_lines != other.acl_lines
 
@@ -403,5 +394,5 @@
             else:
                 args = ""
             acl_lines.append(args)
-    return AccessControlList(request, acl_lines)
+    return AccessControlList(request.cfg, acl_lines)
 
--- a/MoinMoin/support/BasicAuthTransport.py	Tue Aug 15 00:06:07 2006 +0200
+++ b/MoinMoin/support/BasicAuthTransport.py	Tue Aug 15 11:29:45 2006 +0200
@@ -1,13 +1,13 @@
 # taken from Amos' XML-RPC HowTo:
 
-import string, xmlrpclib, httplib
+import xmlrpclib, httplib
 from base64 import encodestring
 
 class BasicAuthTransport(xmlrpclib.Transport):
     def __init__(self, username=None, password=None):
-        self.username=username
-        self.password=password
-        self.verbose=0
+        self.username = username
+        self.password = password
+        self.verbose = 0
 
     def request(self, host, handler, request_body, **kw):
         # issue XML-RPC request
@@ -21,13 +21,10 @@
         h.putheader("User-Agent", self.user_agent)
         h.putheader("Content-Type", "text/xml")
         h.putheader("Content-Length", str(len(request_body)))
-        #h.putheader("Connection", "close") # TW XXX just trying if that cures twisted ...
 
         # basic auth
         if self.username is not None and self.password is not None:
-            authhdr = "Basic %s" % string.replace(
-                    encodestring("%s:%s" % (self.username, self.password)),
-                    "\012", "")
+            authhdr = "Basic %s" % encodestring("%s:%s" % (self.username, self.password)).replace("\012", "")
             h.putheader("Authorization", authhdr)
         h.endheaders()
 
@@ -43,5 +40,5 @@
                 headers
                 )
 
-        return self.parse_response(h.getfile()) 
+        return self.parse_response(h.getfile())
 
--- a/MoinMoin/support/__init__.py	Tue Aug 15 00:06:07 2006 +0200
+++ b/MoinMoin/support/__init__.py	Tue Aug 15 11:29:45 2006 +0200
@@ -4,13 +4,8 @@
 
     This package collects small third party utilities in order
     to reduce the necessary steps in installing MoinMoin. Each
-    source file is copyrighted by its respective author. I've done
-    my best to assure those files are freely redistributable.
-
-    Further details on the modules:
-
-    cgitb
-        from python 2.2 + patches (see XXX)
+    source file is copyrighted by its respective author. We've done
+    our best to assure those files are freely redistributable.
 
     @copyright: 2001-2004 by Jürgen Hermann <jh@web.de>
     @license: GNU GPL, see COPYING for details.
--- a/MoinMoin/support/cgitb.py	Tue Aug 15 00:06:07 2006 +0200
+++ b/MoinMoin/support/cgitb.py	Tue Aug 15 11:29:45 2006 +0200
@@ -16,7 +16,7 @@
 
 By default, tracebacks are displayed but not saved, the context is 5 lines
 and the output format is 'html' (for backwards compatibility with the
-original use of this module)
+original use of this module).
 
 Alternatively, if you have caught an exception and want cgitb to display it
 for you, call cgitb.handler().  The optional argument to handler() is a
@@ -30,19 +30,19 @@
  - Refactor html and text functions to View class, HTMLFormatter and
    TextFormatter. No more duplicate formating code.
  - Layout is done with minimal html and css, in a way it can't be
-   effected by souranding code.
- - Built to be easy to subclass and modify without duplicating code
+   affected by surrounding code.
+ - Built to be easy to subclass and modify without duplicating code.
  - Change layout, important details come first.
- - Factor frame analaizing and formatting into separate class
+ - Factor frame analyzing and formatting into separate class.
  - Add debug argument, can be used to change error display e.g. user
-   error view, developer error view
- - Add viewClass argument, make it easy to customize the traceback view
- - Easy to customize system details and application details
+   error view, developer error view.
+ - Add viewClass argument, make it easy to customize the traceback view.
+ - Easy to customize system details and application details.
 
 The main goal of this rewrite was to have a traceback that can render
-few tracebacks combined. Its needed when you wrap an expection and want
+few tracebacks combined. It's needed when you wrap an expection and want
 to print both the traceback up to the wrapper exception, and the
-original traceback. There is no code to support this here, but its easy
+original traceback. There is no code to support this here, but it's easy
 to add by using your own View sub class.
 """
 
@@ -58,14 +58,14 @@
     Return a string that resets the CGI and browser to a known state.
     TODO: probably some of this is not needed any more.
     """
-    return '''<!--: spam
+    return """<!--: spam
 Content-Type: text/html
 
 <body><font style="color: white; font-size: 1px"> -->
 <body><font style="color: white; font-size: 1px"> --> -->
 </font> </font> </font> </script> </object> </blockquote> </pre>
 </table> </table> </table> </table> </table> </font> </font> </font>
-'''
+"""
 
 __UNDEF__ = [] # a special sentinel object
 
@@ -77,17 +77,16 @@
 
 class HTMLFormatter:
     """ Minimal html formatter """
-    
+
     def attributes(self, attributes=None):
         if attributes:
-            result = [' %s="%s"' % (k, v) for k, v in attributes.items()]           
+            result = [' %s="%s"' % (k, v) for k, v in attributes.items()]
             return ''.join(result)
         return ''
-    
+
     def tag(self, name, text, attributes=None):
-        return '<%s%s>%s</%s>\n' % (name, self.attributes(attributes), 
-                                    text, name)
-    
+        return '<%s%s>%s</%s>\n' % (name, self.attributes(attributes), text, name)
+
     def section(self, text, attributes=None):
         return self.tag('div', text, attributes)
 
@@ -114,9 +113,9 @@
         if isinstance(items, (list, tuple)):
             items = '\n' + ''.join([self.listItem(i) for i in items])
         return self.tag(name, items, attributes)
-    
+
     def listItem(self, text, attributes=None):
-        return self.tag('li', text, attributes)        
+        return self.tag('li', text, attributes)
 
     def link(self, href, text, attributes=None):
         if attributes is None:
@@ -125,14 +124,14 @@
         return self.tag('a', text, attributes)
 
     def strong(self, text, attributes=None):
-        return self.tag('strong', text, attributes)        
+        return self.tag('strong', text, attributes)
 
     def em(self, text, attributes=None):
-        return self.tag('em', text, attributes)        
+        return self.tag('em', text, attributes)
 
     def repr(self, object):
         return pydoc.html.repr(object)
-        
+
 
 class TextFormatter:
     """ Plain text formatter """
@@ -170,20 +169,20 @@
         return items
 
     def listItem(self, text, attributes=None):
-        return ' * %s\n' % text       
+        return ' * %s\n' % text
 
     def link(self, href, text, attributes=None):
         return '[[%s]]' % text
 
     def strong(self, text, attributes=None):
         return text
-   
+
     def em(self, text, attributes=None):
         return text
-   
+
     def repr(self, object):
         return repr(object)
-        
+
 
 class Frame:
     """ Analyze and format single frame in a traceback """
@@ -207,19 +206,19 @@
 
     # -----------------------------------------------------------------
     # Private - formatting
-        
+
     def formatCall(self):
         call = '%s in %s%s' % (self.formatFile(),
                                self.formatter.strong(self.func),
                                self.formatArguments(),)
         return self.formatter.paragraph(call, {'class': 'call'})
-    
+
     def formatFile(self):
         """ Return formatted file link """
         if not self.file:
             return '?'
         file = pydoc.html.escape(os.path.abspath(self.file))
-        return self.formatter.link('file://' + file, file)        
+        return self.formatter.link('file://' + file, file)
 
     def formatArguments(self):
         """ Return formated arguments list """
@@ -250,11 +249,11 @@
         return self.formatter.orderedList(context, {'class': 'context'})
 
     def formatVariables(self, vars):
-        """ Return formatted variables """ 
+        """ Return formatted variables """
         done = {}
         dump = []
         for name, where, value in vars:
-            if name in done: 
+            if name in done:
                 continue
             done[name] = 1
             if value is __UNDEF__:
@@ -280,12 +279,12 @@
     def scan(self):
         """ Scan frame for vars while setting highlight line """
         highlight = {}
-        
+
         def reader(lnum=[self.lnum]):
             highlight[lnum[0]] = 1
-            try: 
+            try:
                 return linecache.getline(self.file, lnum[0])
-            finally: 
+            finally:
                 lnum[0] += 1
 
         vars = self.scanVariables(reader)
@@ -295,7 +294,7 @@
         """ Lookup variables in one logical Python line """
         vars, lasttoken, parent, prefix, value = [], None, None, '', __UNDEF__
         for ttype, token, start, end, line in tokenize.generate_tokens(reader):
-            if ttype == tokenize.NEWLINE: 
+            if ttype == tokenize.NEWLINE:
                 break
             if ttype == tokenize.NAME and token not in keyword.kwlist:
                 if lasttoken == '.':
@@ -341,14 +340,14 @@
 
 class View:
     """ Traceback view """
-    
+
     frameClass = Frame # analyze and format a frame
-    
+
     def __init__(self, info=None, debug=0):
         """ Save starting info or current exception info """
         self.info = info or sys.exc_info()
         self.debug = debug
-        
+
     def format(self, formatter, context=5):
         self.formatter = formatter
         self.context = context
@@ -411,20 +410,20 @@
 
     # -----------------------------------------------------------------
     # Head
-    
+
     def formatTitle(self):
         return self.formatter.title(self.exceptionTitle(self.info))
-        
+
     def formatMessage(self):
         return self.formatter.paragraph(self.exceptionMessage(self.info))
-        
+
     # -----------------------------------------------------------------
     # Traceback
 
     def formatTraceback(self):
         """ Return formatted traceback """
         return self.formatOneTraceback(self.info)
-    
+
     def formatOneTraceback(self, info):
         """ Format one traceback
         
@@ -435,7 +434,7 @@
                   self.formatter.orderedList(self.tracebackFrames(info),
                                             {'class': 'frames'}),
                   self.formatter.section(self.formatException(info),
-                                         {'class': 'exception'}),]
+                                         {'class': 'exception'}), ]
         return self.formatter.section(''.join(output), {'class': 'traceback'})
 
     def tracebackFrames(self, info):
@@ -458,12 +457,12 @@
     def formatException(self, info):
         items = [self.formatExceptionTitle(info),
                  self.formatExceptionMessage(info),
-                 self.formatExceptionAttributes(info),]
+                 self.formatExceptionAttributes(info), ]
         return ''.join(items)
 
     def formatExceptionTitle(self, info):
         return self.formatter.subSubTitle(self.exceptionTitle(info))
-        
+
     def formatExceptionMessage(self, info):
         return self.formatter.paragraph(self.exceptionMessage(info))
 
@@ -471,7 +470,7 @@
         attribtues = []
         for name, value in self.exceptionAttributes(info):
             value = self.formatter.repr(value)
-            attribtues.append('%s = %s' % (name, value))           
+            attribtues.append('%s = %s' % (name, value))
         return self.formatter.list(attribtues)
 
     def exceptionAttributes(self, info):
@@ -488,7 +487,7 @@
     def exceptionTitle(self, info):
         type = info[0]
         return getattr(type, '__name__', str(type))
-        
+
     def exceptionMessage(self, info):
         instance = info[1]
         return pydoc.html.escape(str(instance))
@@ -500,15 +499,14 @@
     def formatSystemDetails(self):
         details = ['Date: %s' % self.date(),
                    'Platform: %s' % self.platform(),
-                   'Python: %s' % self.python(),]
+                   'Python: %s' % self.python(), ]
         details += self.applicationDetails()
         return (self.formatter.subTitle('System Details') +
                 self.formatter.list(details, {'class': 'system'}))
 
     def date(self):
         import time
-        rfc2822Date = time.strftime("%a, %d %b %Y %H:%M:%S +0000",
-                                    time.gmtime())
+        rfc2822Date = time.strftime("%a, %d %b %Y %H:%M:%S +0000", time.gmtime())
         return rfc2822Date
 
     def platform(self):
@@ -535,7 +533,7 @@
         """ Separate to enable formatting multiple tracebacks. """
         import traceback
         return ''.join(traceback.format_exception(*info))
-    
+
     def textTracebackTemplate(self):
         return '''
     
@@ -593,7 +591,7 @@
 
         if self.logdir is not None:
             import os, tempfile
-            suffix = ['.txt', '.html'][self.format=="html"]
+            suffix = ['.txt', '.html'][self.format == "html"]
             (fd, path) = tempfile.mkstemp(suffix=suffix, dir=self.logdir)
             try:
                 file = os.fdopen(fd, 'w')
@@ -610,8 +608,7 @@
 
 handler = Hook().handle
 
-def enable(display=1, logdir=None, context=5, format="html",
-           viewClass=View, debug=0):
+def enable(display=1, logdir=None, context=5, format="html", viewClass=View, debug=0):
     """Install an exception handler that formats tracebacks as HTML.
 
     The optional argument 'display' can be set to 0 to suppress sending the
@@ -619,3 +616,4 @@
     tracebacks to be written to files there."""
     sys.excepthook = Hook(display=display, logdir=logdir, context=context,
                           format=format, viewClass=viewClass, debug=debug)
+
--- a/MoinMoin/support/thfcgi.py	Tue Aug 15 00:06:07 2006 +0200
+++ b/MoinMoin/support/thfcgi.py	Tue Aug 15 11:29:45 2006 +0200
@@ -1,6 +1,5 @@
 # -*- coding: iso-8859-1 -*-
 """
-    
     thfcgi.py - FastCGI communication with thread support
 
     Copyright Peter Åstrand <astrand@lysator.liu.se> 2001
@@ -10,6 +9,10 @@
     Added "external application" support, refactored code
         by Alexander Schremmer <alex AT alexanderweb DOT de>
 
+    Cleanup, fixed typos, PEP-8, support for limiting creation of threads,
+    limited number of requests lifetime, configurable backlog for socket
+    .listen() by Thomas Waldmann <tw AT waldmann-edv DOT de>
+
     For code base see:
     http://cvs.lysator.liu.se/viewcvs/viewcvs.cgi/webkom/thfcgi.py?cvsroot=webkom
 
@@ -33,21 +36,27 @@
 # CONTENT_LENGTH and abort the update if the two numbers are not equal.
 #
 
-# Imports
+debug = False
+
 import os
 import sys
 import select
-import string
 import socket
 import errno
 import cgi
 from cStringIO import StringIO
 import struct
 
+try:
+    import threading as _threading
+except ImportError:
+    import dummy_threading as _threading
+
 # Maximum number of requests that can be handled
 FCGI_MAX_REQS = 50
 FCGI_MAX_CONNS = 50
 FCGI_VERSION_1 = 1
+
 # Can this application multiplex connections?
 FCGI_MPXS_CONNS = 0
 
@@ -90,8 +99,15 @@
 FCGI_UnknownTypeBody = "!B7x"
 FCGI_EndRequestBody = "!IB3x"
 
+LOGFILE = sys.stderr
+
+def log(s):
+    if debug:
+        LOGFILE.write(s)
+        LOGFILE.write('\n')
+
 class SocketErrorOnWrite:
-    """ Is raised if a write fails in the socket code."""
+    """Is raised if a write fails in the socket code."""
     pass
 
 class Record:
@@ -139,7 +155,7 @@
         value = data[pos:pos+valuelen]
         pos += valuelen
 
-        return (name, value, pos)
+        return name, value, pos
 
     def write_pair(self, name, value):
         """Write a FastCGI key-value pair to the server."""
@@ -158,29 +174,27 @@
             data += struct.pack("!I", value | 0x80000000L)
 
         return data + name + value
-        
+
     def readRecord(self, sock):
         """Read a FastCGI record from the server."""
         data = sock.recv(8)
         if not data:
-            # No data recieved. This means EOF. 
+            # No data received. This means EOF. 
             return None
-        
-        fields = struct.unpack(FCGI_Record_header, data)
-        (self.version, self.rec_type, self.req_id,
-         contentLength, paddingLength) = fields
-        
+
+        self.version, self.rec_type, self.req_id, contentLength, paddingLength = \
+            struct.unpack(FCGI_Record_header, data)
+
         self.content = ""
         while len(self.content) < contentLength:
             data = sock.recv(contentLength - len(self.content))
             self.content = self.content + data
         if paddingLength != 0:
             sock.recv(paddingLength)
-        
+
         # Parse the content information
         if self.rec_type == FCGI_BEGIN_REQUEST:
-            (self.role, self.flags) = struct.unpack(FCGI_BeginRequestBody,
-                                                    self.content)
+            self.role, self.flags = struct.unpack(FCGI_BeginRequestBody, self.content)
             self.keep_conn = self.flags & FCGI_KEEP_CONN
 
         elif self.rec_type == FCGI_UNKNOWN_TYPE:
@@ -192,10 +206,9 @@
             while pos < len(self.content):
                 name, value, pos = self.read_pair(self.content, pos)
                 self.values[name] = value
+
         elif self.rec_type == FCGI_END_REQUEST:
-            (self.appStatus,
-             self.protocolStatus) = struct.unpack(FCGI_EndRequestBody,
-                                                  self.content)
+            self.appStatus, self.protocolStatus = struct.unpack(FCGI_EndRequestBody, self.content)
 
         return 1
 
@@ -214,16 +227,14 @@
                 content = content + self.write_pair(i, self.values[i])
 
         elif self.rec_type == FCGI_END_REQUEST:
-            content = struct.pack(FCGI_EndRequestBody, self.appStatus,
-                                  self.protocolStatus)
+            content = struct.pack(FCGI_EndRequestBody, self.appStatus, self.protocolStatus)
 
         # Align to 8-byte boundary
         clen = len(content)
         padlen = ((clen + 7) & 0xfff8) - clen
-        
-        hdr = struct.pack(FCGI_Record_header, self.version, self.rec_type,
-                          self.req_id, clen, padlen)
-        
+
+        hdr = struct.pack(FCGI_Record_header, self.version, self.rec_type, self.req_id, clen, padlen)
+
         try:
             sock.sendall(hdr + content + padlen*"\x00")
         except socket.error:
@@ -234,13 +245,13 @@
 class Request:
     """A request, corresponding to an accept():ed connection and
     a FCGI request."""
-    
-    def __init__(self, conn, req_handler, multi=1):
+
+    def __init__(self, conn, req_handler, inthread=False):
         """Initialize Request container."""
         self.conn = conn
         self.req_handler = req_handler
-        self.multi = multi
-        
+        self.inthread = inthread
+
         self.keep_conn = 0
         self.req_id = None
 
@@ -276,14 +287,13 @@
             else:
                 # EOF, connection closed. Break loop, end thread. 
                 return
-                
+
     def getFieldStorage(self):
         """Return a cgi FieldStorage constructed from the stdin and
         environ read from the server for this request."""
         self.stdin.reset()
         # cgi.FieldStorage will eat the input here...
-        r = cgi.FieldStorage(fp=self.stdin, environ=self.env,
-                             keep_blank_values=1)
+        r = cgi.FieldStorage(fp=self.stdin, environ=self.env, keep_blank_values=1)
         # hence, we reset here so we can obtain
         # the data again...
         self.stdin.reset()
@@ -301,7 +311,7 @@
         if not data:
             # Writing zero bytes would mean stream termination
             return
-        
+
         while data:
             chunk, data = self.getNextChunk(data)
             rec.content = chunk
@@ -362,10 +372,9 @@
         rec.writeRecord(self.conn)
         if not self.keep_conn:
             self.conn.close()
-            if self.multi:
-                import thread
-                thread.exit()
-    
+            if self.inthread:
+                raise SystemExit
+
     #
     # Record handlers
     #
@@ -384,8 +393,7 @@
         if rec_type in KNOWN_MANAGEMENT_TYPES:
             self._handle_known_man_types(rec)
         else:
-            # It's a management record of an unknown
-            # type. Signal the error.
+            # It's a management record of an unknown type. Signal the error.
             rec = Record()
             rec.rec_type = FCGI_UNKNOWN_TYPE
             rec.unknownType = rec_type
@@ -397,9 +405,10 @@
             reply_rec = Record()
             reply_rec.rec_type = FCGI_GET_VALUES_RESULT
 
-            params = {'FCGI_MAX_CONNS' : FCGI_MAX_CONNS,
-                      'FCGI_MAX_REQS' : FCGI_MAX_REQS,
-                      'FCGI_MPXS_CONNS' : FCGI_MPXS_CONNS}
+            params = {'FCGI_MAX_CONNS': FCGI_MAX_CONNS,
+                      'FCGI_MAX_REQS': FCGI_MAX_REQS,
+                      'FCGI_MPXS_CONNS': FCGI_MPXS_CONNS,
+                     }
 
             for name in rec.values.keys():
                 if params.has_key(name):
@@ -416,7 +425,7 @@
             self._handle_begin_request(rec)
             return
         elif rec.req_id != self.req_id:
-            #print >> sys.stderr, "Recieved unknown request ID", rec.req_id
+            log("Received unknown request ID %r" % rec.req_id)
             # Ignore requests that aren't active
             return
         if rec.rec_type == FCGI_ABORT_REQUEST:
@@ -437,7 +446,7 @@
             self._handle_data(rec)
         else:
             # Should never happen. 
-            #print >> sys.stderr, "Recieved unknown FCGI record type", rec.rec_type
+            log("Received unknown FCGI record type %r" % rec.rec_type)
             pass
 
         if self.env_complete and self.stdin_complete:
@@ -461,14 +470,14 @@
 
         self.req_id = rec.req_id
         self.keep_conn = rec.keep_conn
-        
+
     def _handle_params(self, rec):
         """Handle environment."""
         if self.env_complete:
             # Should not happen
-            #print >> sys.stderr, "Recieved FCGI_PARAMS more than once"
+            log("Received FCGI_PARAMS more than once")
             return
-        
+
         if not rec.content:
             self.env_complete = 1
 
@@ -479,9 +488,9 @@
         """Handle stdin."""
         if self.stdin_complete:
             # Should not happen
-            #print >> sys.stderr, "Recieved FCGI_STDIN more than once"
+            log("Received FCGI_STDIN more than once")
             return
-        
+
         if not rec.content:
             self.stdin_complete = 1
             self.stdin.reset()
@@ -493,12 +502,12 @@
         """Handle data."""
         if self.data_complete:
             # Should not happen
-            #print >> sys.stderr, "Recieved FCGI_DATA more than once"
+            log("Received FCGI_DATA more than once")
             return
 
         if not rec.content:
             self.data_complete = 1
-        
+
         self.data.write(rec.content)
 
     def getNextChunk(self, data):
@@ -507,31 +516,30 @@
         data = data[8192:]
         return chunk, data
 
-class FCGIbase:
-    """Base class for FCGI requests."""
-    
-    def __init__(self, req_handler, fd, port):
+class FCGI:
+    """FCGI requests"""
+
+    def __init__(self, req_handler, fd=sys.stdin, port=None, max_requests=-1, backlog=5, max_threads=5):
         """Initialize main loop and set request_handler."""
         self.req_handler = req_handler
         self.fd = fd
         self.__port = port
         self._make_socket()
+        # how many requests we have left before terminating this process, -1 means infinite lifetime:
+        self.requests_left = max_requests
+        # for socket.listen(backlog):
+        self.backlog = backlog
+        # how many threads we have at maximum (including the main program = 1. thread)
+        self.max_threads = max_threads
 
-    def run(self):
-        raise NotImplementedError
-
-    def accept_handler(self, conn, addr):
+    def accept_handler(self, conn, addr, inthread=False):
         """Construct Request and run() it."""
         self._check_good_addrs(addr)
         try:
-            req = Request(conn, self.req_handler, self.multi)
+            req = Request(conn, self.req_handler, inthread)
             req.run()
         except SocketErrorOnWrite:
-            if self.multi:
-                import thread
-                thread.exit()
-            #else:
-            #    raise SystemExit
+            raise SystemExit
 
     def _make_socket(self):
         """Create socket and verify FCGI environment."""
@@ -550,54 +558,45 @@
                     raise ValueError("FastCGI port is not setup correctly")
         except socket.error, (err, errmsg):
             if err != errno.ENOTCONN:
-                raise RuntimeError("No FastCGI environment: %s - %s" % (`err`, errmsg))
+                raise RuntimeError("No FastCGI environment: %s - %s" % (repr(err), errmsg))
 
         self.sock = s
-        
+
     def _check_good_addrs(self, addr):
         """Check if request is done from the right server."""
         # Apaches mod_fastcgi seems not to use FCGI_WEB_SERVER_ADDRS. 
         if os.environ.has_key('FCGI_WEB_SERVER_ADDRS'):
-            good_addrs = string.split(os.environ['FCGI_WEB_SERVER_ADDRS'], ',')
-            good_addrs = map(string.strip, good_addrs) # Remove whitespace
+            good_addrs = os.environ['FCGI_WEB_SERVER_ADDRS'].split(',')
+            good_addrs = [addr.strip() for addr in good_addrs] # Remove whitespace
         else:
             good_addrs = None
-        
-        # Check if the connection is from a legal address
-        if good_addrs != None and addr not in good_addrs:
-            raise RuntimeError("Connection from invalid server!")
 
-class THFCGI(FCGIbase):
-    """Multi-threaded main loop to handle FastCGI Requests."""
-    
-    def __init__(self, req_handler, fd=sys.stdin, port=None):
-        """Initialize main loop and set request_handler."""
-        self.multi = 1
-        FCGIbase.__init__(self, req_handler, fd, port)
+        # Check if the connection is from a legal address
+        if good_addrs is not None and addr not in good_addrs:
+            raise RuntimeError("Connection from invalid server!")
 
     def run(self):
-        """Wait & serve. Calls request_handler in new
-        thread on every request."""
-        import thread
-        self.sock.listen(50)
-        
-        while 1:
-            (conn, addr) = self.sock.accept()
-            thread.start_new_thread(self.accept_handler, (conn, addr))
-
-class unTHFCGI(FCGIbase):
-    """Single-threaded main loop to handle FastCGI Requests."""
+        """Wait & serve. Calls request_handler on every request."""
+        self.sock.listen(self.backlog)
+        log("Starting Process")
+        running = True
+        while running:
+            if not self.requests_left:
+                # self.sock.shutdown(RDWR) here does NOT help with backlog
+                running = False
+            elif self.requests_left > 0:
+                self.requests_left -= 1
+            if running:
+                conn, addr = self.sock.accept()
+                threadcount = _threading.activeCount()
+                if threadcount < self.max_threads:
+                    log("Accepted connection, starting thread...")
+                    t = _threading.Thread(target=self.accept_handler, args=(conn, addr, True))
+                    t.start()
+                else:
+                    log("Accepted connection, running in main-thread...")
+                    self.accept_handler(conn, addr, False)
+                log("Active Threads: %d" % _threading.activeCount())
+        self.sock.close()
+        log("Ending Process")
 
-    def __init__(self, req_handler, fd=sys.stdin, port=None):
-        """Initialize main loop and set request_handler."""
-        self.multi = 0
-        FCGIbase.__init__(self, req_handler, fd, port)
-    
-    def run(self):
-        """Wait & serve. Calls request handler for every request (blocking)."""
-        self.sock.listen(50)
-        
-        while 1:
-            (conn, addr) = self.sock.accept()
-            self.accept_handler(conn, addr)
-   
--- a/MoinMoin/support/xapwrap/document.py	Tue Aug 15 00:06:07 2006 +0200
+++ b/MoinMoin/support/xapwrap/document.py	Tue Aug 15 11:29:45 2006 +0200
@@ -1,7 +1,6 @@
 """
     xapwrap.document - Pythonic wrapper around Xapian's Document API
 """
-import string
 import datetime
 import re
 import cPickle
--- a/MoinMoin/userform.py	Tue Aug 15 00:06:07 2006 +0200
+++ b/MoinMoin/userform.py	Tue Aug 15 11:29:45 2006 +0200
@@ -6,7 +6,7 @@
     @license: GNU GPL, see COPYING for details.
 """
 
-import string, time, re
+import time, re
 from MoinMoin import user, util, wikiutil
 from MoinMoin.util import web, timefuncs
 from MoinMoin.widget import html
@@ -359,8 +359,8 @@
                 '%s [%s%s:%s]' % (
                     time.strftime(self.cfg.datetime_fmt, timefuncs.tmtuple(t)),
                     "+-"[offset < 0],
-                    string.zfill("%d" % (abs(offset) / 3600), 2),
-                    string.zfill("%d" % (abs(offset) % 3600 / 60), 2),
+                    "%02d" % (abs(offset) / 3600),
+                    "%02d" % (abs(offset) % 3600 / 60),
                 ),
             ))
 
--- a/MoinMoin/util/__init__.py	Tue Aug 15 00:06:07 2006 +0200
+++ b/MoinMoin/util/__init__.py	Tue Aug 15 11:29:45 2006 +0200
@@ -14,9 +14,9 @@
 #############################################################################
 
 g_xmlIllegalCharPattern = re.compile('[\x01-\x08\x0B-\x0D\x0E-\x1F\x80-\xFF]')
-g_undoUtf8Pattern       = re.compile('\xC2([^\xC2])')
-g_cdataCharPattern      = re.compile('[&<\'\"]')
-g_textCharPattern       = re.compile('[&<]')
+g_undoUtf8Pattern = re.compile('\xC2([^\xC2])')
+g_cdataCharPattern = re.compile('[&<\'\"]')
+g_textCharPattern = re.compile('[&<]')
 g_charToEntity = {
     '&': '&amp;',
     '<': '&lt;',
@@ -60,11 +60,11 @@
     for i in range(len(numbers)-1):
         if pattern[-1] == ',':
             pattern = pattern + str(numbers[i])
-            if numbers[i]+1 == numbers[i+1]:
+            if numbers[i] + 1 == numbers[i+1]:
                 pattern = pattern + '-'
             else:
                 pattern = pattern + ','
-        elif numbers[i]+1 != numbers[i+1]:
+        elif numbers[i] + 1 != numbers[i+1]:
             pattern = pattern + str(numbers[i]) + ','
 
     if pattern[-1] in ',-':
@@ -116,3 +116,4 @@
 def random_string(length):
     chars = ''.join([chr(random.randint(0, 255)) for x in xrange(length)])
     return chars
+
--- a/MoinMoin/util/bdiff.py	Tue Aug 15 00:06:07 2006 +0200
+++ b/MoinMoin/util/bdiff.py	Tue Aug 15 11:29:45 2006 +0200
@@ -79,8 +79,8 @@
     a = ("foo\n" * 30)
     b = ("  fao" * 30)
 
-    a = file(r"C:\Dokumente und Einstellungen\Administrator\Eigene Dateien\Progra\Python\MoinMoin\moin-1.6-sync\MoinMoin\util\test.1").read()
-    b = file(r"C:\Dokumente und Einstellungen\Administrator\Eigene Dateien\Progra\Python\MoinMoin\moin-1.6-sync\MoinMoin\util\test.2").read()
+    a = file(r"test.1").read()
+    b = file(r"test.2").read()
     a = a.splitlines(1)
     b = b.splitlines(1)
 
--- a/MoinMoin/util/diff3.py	Tue Aug 15 00:06:07 2006 +0200
+++ b/MoinMoin/util/diff3.py	Tue Aug 15 11:29:45 2006 +0200
@@ -6,25 +6,28 @@
     @license: GNU GPL, see COPYING for details.
 """
 
-def text_merge(old, other, new, allow_conflicts=1,
-               marker1='<<<<<<<<<<<<<<<<<<<<<<<<<\n',
-               marker2='=========================\n',
-               marker3='>>>>>>>>>>>>>>>>>>>>>>>>>\n'):
+default_markers = ('<<<<<<<<<<<<<<<<<<<<<<<<<\n',
+                   '=========================\n',
+                   '>>>>>>>>>>>>>>>>>>>>>>>>>\n')
+
+def text_merge(old, other, new, allow_conflicts=1, *markers):
     """ do line by line diff3 merge with three strings """
     result = merge(old.splitlines(1), other.splitlines(1), new.splitlines(1),
-                   allow_conflicts, marker1, marker2, marker3)
+                   allow_conflicts, *markers)
     return ''.join(result)
 
-def merge(old, other, new, allow_conflicts=1,
-          marker1='<<<<<<<<<<<<<<<<<<<<<<<<<\n',
-          marker2='=========================\n',
-          marker3='>>>>>>>>>>>>>>>>>>>>>>>>>\n'):
+def merge(old, other, new, allow_conflicts=1, *markers):
     """ do line by line diff3 merge
         input must be lists containing single lines   
     """
+    if not markers:
+        markers = default_markers
+    marker1, marker2, marker3 = markers
+
     old_nr, other_nr, new_nr = 0, 0, 0
     old_len, other_len, new_len = len(old), len(other), len(new)
     result = []
+
     while old_nr < old_len and other_nr < other_len and new_nr < new_len:
         # unchanged
         if old[old_nr] == other[other_nr] == new[new_nr]:
@@ -33,6 +36,12 @@
             other_nr += 1
             new_nr += 1
         else:
+            if allow_conflicts == 2: # experimental addition to the algorithm
+                if other[other_nr] == new[new_nr]:
+                    result.append(new[new_nr])
+                    other_nr += 1
+                    new_nr += 1
+                    continue
             new_match = find_match(old, new, old_nr, new_nr)
             other_match = find_match(old, other, old_nr, other_nr)
             # new is changed
@@ -100,13 +109,16 @@
         pass
     # conflict
     else:
-        if not allow_conflicts:
-            return None
-        result.append(marker1)
-        result.extend(other[other_nr:])
-        result.append(marker2)
-        result.extend(new[new_nr:])
-        result.append(marker3)
+        if new == other:
+            result.extend(new[new_nr:])
+        else:
+            if not allow_conflicts:
+                return None
+            result.append(marker1)
+            result.extend(other[other_nr:])
+            result.append(marker2)
+            result.extend(new[new_nr:])
+            result.append(marker3)
     return result
 
 def tripple_match(old, other, new, other_match, new_match):
@@ -255,4 +267,3 @@
 
 if __name__ == '__main__':
     main()
-
--- a/MoinMoin/util/web.py	Tue Aug 15 00:06:07 2006 +0200
+++ b/MoinMoin/util/web.py	Tue Aug 15 11:29:45 2006 +0200
@@ -6,7 +6,7 @@
     @license: GNU GPL, see COPYING for details.
 """
 
-import re, types
+import re
 from MoinMoin import config
 
 def getIntegerInput(request, fieldname, default=None, minval=None, maxval=None):
@@ -224,11 +224,11 @@
             another Color instance, a tuple containing 3 color values, 
             a Netscape color name or a HTML color ("#RRGGBB").
         """
-        if isinstance(color, types.TupleType) and len(color) == 3:
+        if isinstance(color, tuple) and len(color) == 3:
             self.r, self.g, self.b = map(int, color)
         elif isinstance(color, Color):
             self.r, self.g, self.b = color.r, color.g, color.b
-        elif not isinstance(color, types.StringType):
+        elif not isinstance(color, str):
             raise TypeError("Color() expects a Color instance, a RGB triple or a color string")
         elif color[0] == '#':
             color = long(color[1:], 16)
--- a/MoinMoin/wikisync.py	Tue Aug 15 00:06:07 2006 +0200
+++ b/MoinMoin/wikisync.py	Tue Aug 15 11:29:45 2006 +0200
@@ -7,32 +7,321 @@
 """
 
 import os
+import xmlrpclib
 
 try:
     import cPickle as pickle
 except ImportError:
     import pickle
 
+
+from MoinMoin import wikiutil
 from MoinMoin.util import lock
-from MoinMoin.packages import unpackLine
+from MoinMoin.Page import Page
+from MoinMoin.packages import unpackLine, packLine
+
+
+# sync directions
+UP, DOWN, BOTH = range(3)
+
+
+def normalise_pagename(page_name, prefix):
+    """ Checks if the page_name starts with the prefix.
+        Returns None if it does not, otherwise strips the prefix.
+    """
+    if prefix:
+        if not page_name.startswith(prefix):
+            return None
+        else:
+            return page_name[len(prefix):]
+    else:
+        return page_name
+
+
+class UnsupportedWikiException(Exception): pass
+
+
+class SyncPage(object):
+    """ This class represents a page in one or two wiki(s). """
+    def __init__(self, name, local_rev=None, remote_rev=None, local_name=None, remote_name=None):
+        """ Creates a SyncPage instance.
+            @param name: The canonical name of the page, without prefixes.
+            @param local_rev: The revision of the page in the local wiki.
+            @param remote_rev: The revision of the page in the remote wiki.
+            @param local_name: The page name of the page in the local wiki.
+            @param remote_name: The page name of the page in the remote wiki.
+        """
+        self.name = name
+        self.local_rev = local_rev
+        self.remote_rev = remote_rev
+        self.local_name = local_name
+        self.remote_name = remote_name
+        assert local_rev or remote_rev
+        assert local_name or remote_name
+
+    def __repr__(self):
+        return repr("<Remote Page %r>" % unicode(self))
+
+    def __unicode__(self):
+        return u"%s[%s|%s]<%r:%r>" % (self.name, self.local_name, self.remote_name, self.local_rev, self.remote_rev)
+
+    def __lt__(self, other):
+        return self.name < other.name
+
+    def __hash__(self):
+        """ Ensures that the hash value of this page only depends on the canonical name. """
+        return hash(self.name)
+
+    def __eq__(self, other):
+        if not isinstance(other, SyncPage):
+            return false
+        return self.name == other.name
+
+    def add_missing_pagename(self, local, remote):
+        """ Checks if the particular concrete page names are unknown and fills
+            them in.
+        """
+        if self.local_name is None:
+            n_name = normalise_pagename(self.remote_name, remote.prefix)
+            assert n_name is not None
+            self.local_name = (local.prefix or "") + n_name
+        elif self.remote_name is None:
+            n_name = normalise_pagename(self.local_name, local.prefix)
+            assert n_name is not None
+            self.remote_name = (local.prefix or "") + n_name
+
+        return self # makes using list comps easier
+
+    def filter(cls, sp_list, func):
+        """ Returns all pages in sp_list that let func return True
+            for the canonical page name.
+        """
+        return [x for x in sp_list if func(x.name)]
+    filter = classmethod(filter)
+
+    def merge(cls, local_list, remote_list):
+        """ Merges two lists of SyncPages into one, migrating attributes like the names. """
+        # map page names to SyncPage objects :-)
+        d = dict(zip(local_list, local_list))
+        for sp in remote_list:
+            if sp in d:
+                d[sp].remote_rev = sp.remote_rev
+                d[sp].remote_name = sp.remote_name
+            else:
+                d[sp] = sp
+        return d.keys()
+    merge = classmethod(merge)
+
+    def is_only_local(self):
+        """ Is true if the page is only in the local wiki. """
+        return not self.remote_rev
+
+    def is_only_remote(self):
+        """ Is true if the page is only in the remote wiki. """
+        return not self.local_rev
+
+    def is_local_and_remote(self):
+        """ Is true if the page is in both wikis. """
+        return self.local_rev and self.remote_rev
+
+    def iter_local_only(cls, sp_list):
+        """ Iterates over all pages that are local only. """
+        for x in sp_list:
+            if x.is_only_local():
+                yield x
+    iter_local_only = classmethod(iter_local_only)
+
+    def iter_remote_only(cls, sp_list):
+        """ Iterates over all pages that are remote only. """
+        for x in sp_list:
+            if x.is_only_remote():
+                yield x
+    iter_remote_only = classmethod(iter_remote_only)
+
+    def iter_local_and_remote(cls, sp_list):
+        """ Iterates over all pages that are local and remote. """
+        for x in sp_list:
+            if x.is_local_and_remote():
+                yield x
+    iter_local_and_remote = classmethod(iter_local_and_remote)
+
+class RemoteWiki(object):
+    """ This class should be the base for all implementations of remote wiki
+        classes. """
+
+    def __repr__(self):
+        """ Returns a representation of the instance for debugging purposes. """
+        return NotImplemented
+
+    def get_interwiki_name(self):
+        """ Returns the interwiki name of the other wiki. """
+        return NotImplemented
+
+    def get_iwid(self):
+        """ Returns the InterWiki ID. """
+        return NotImplemented
+
+    def get_pages(self, **kwargs):
+        """ Returns a list of SyncPage instances. """
+        return NotImplemented
+
+
+class MoinRemoteWiki(RemoteWiki):
+    """ Used for MoinMoin wikis reachable via XMLRPC. """
+    def __init__(self, request, interwikiname, prefix, pagelist):
+        self.request = request
+        self.prefix = prefix
+        self.pagelist = pagelist
+        _ = self.request.getText
+
+        wikitag, wikiurl, wikitail, wikitag_bad = wikiutil.resolve_wiki(self.request, '%s:""' % (interwikiname, ))
+        self.wiki_url = wikiutil.mapURL(self.request, wikiurl)
+        self.valid = not wikitag_bad
+        self.xmlrpc_url = self.wiki_url + "?action=xmlrpc2"
+        if not self.valid:
+            self.connection = None
+            return
+
+        self.connection = self.createConnection()
+
+        try:
+            iw_list = self.connection.interwikiName()
+        except xmlrpclib.Fault, e:
+            raise UnsupportedWikiException(_("The remote version of MoinMoin is too old, the version 1.6 is required at least."))
+
+        self.remote_interwikiname = remote_interwikiname = iw_list[0]
+        self.remote_iwid = remote_iwid = iw_list[1]
+        self.is_anonymous = remote_interwikiname is None
+        if not self.is_anonymous and interwikiname != remote_interwikiname:
+            raise UnsupportedWikiException(_("The remote wiki uses a different InterWiki name (%(remotename)s)"
+                                             " internally than you specified (%(localname)s).") % {
+                "remotename": wikiutil.escape(remote_interwikiname), "localname": wikiutil.escape(interwikiname)})
+
+        if self.is_anonymous:
+            self.iwid_full = packLine([remote_iwid])
+        else:
+            self.iwid_full = packLine([remote_iwid, interwikiname])
+
+    def createConnection(self):
+        return xmlrpclib.ServerProxy(self.xmlrpc_url, allow_none=True, verbose=True)
+
+    # Public methods
+    def get_diff(self, pagename, from_rev, to_rev):
+        """ Returns the binary diff of the remote page named pagename, given
+            from_rev and to_rev. """
+        result = self.connection.getDiff(pagename, from_rev, to_rev)
+        result["diff"] = str(result["diff"]) # unmarshal Binary object
+        return result
+
+    def merge_diff(self, pagename, diff, local_rev, delta_remote_rev, last_remote_rev, interwiki_name, n_name):
+        """ Merges the diff into the page on the remote side. """
+        result = self.connection.mergeDiff(pagename, xmlrpclib.Binary(diff), local_rev, delta_remote_rev, last_remote_rev, interwiki_name, n_name)
+        return result
+
+    # Methods implementing the RemoteWiki interface
+    def get_interwiki_name(self):
+        return self.remote_interwikiname
+
+    def get_iwid(self):
+        return self.remote_iwid
+
+    def get_pages(self, **kwargs):
+        options = {"include_revno": True,
+                   "include_deleted": True,
+                   "exclude_non_writable": kwargs["exclude_non_writable"],
+                   "include_underlay": False,
+                   "prefix": self.prefix,
+                   "pagelist": self.pagelist}
+        pages = self.connection.getAllPagesEx(options)
+        rpages = []
+        for name, revno in pages:
+            normalised_name = normalise_pagename(name, self.prefix)
+            if normalised_name is None:
+                continue
+            rpages.append(SyncPage(normalised_name, remote_rev=revno, remote_name=name))
+        return rpages
+
+    def __repr__(self):
+        return "<MoinRemoteWiki wiki_url=%r valid=%r>" % (getattr(self, "wiki_url", Ellipsis), getattr(self, "valid", Ellipsis))
+
+
+class MoinLocalWiki(RemoteWiki):
+    """ Used for the current MoinMoin wiki. """
+    def __init__(self, request, prefix, pagelist):
+        self.request = request
+        self.prefix = prefix
+        self.pagelist = pagelist
+
+    def getGroupItems(self, group_list):
+        """ Returns all page names that are listed on the page group_list. """
+        pages = []
+        for group_pagename in group_list:
+            pages.extend(Group(self.request, group_pagename).members())
+        return [self.createSyncPage(x) for x in pages]
+
+    def createSyncPage(self, page_name):
+        normalised_name = normalise_pagename(page_name, self.prefix)
+        if normalised_name is None:
+            return None
+        return SyncPage(normalised_name, local_rev=Page(self.request, page_name).get_real_rev(), local_name=page_name)
+
+    # Public methods:
+
+    # Methods implementing the RemoteWiki interface
+    def get_interwiki_name(self):
+        return self.request.cfg.interwikiname
+
+    def get_iwid(self):
+        return self.request.cfg.iwid
+
+    def get_pages(self, **kwargs):
+        assert not kwargs
+        if self.prefix or self.pagelist:
+            def page_filter(name, prefix=(self.prefix or ""), pagelist=self.pagelist):
+                n_name = normalise_pagename(name, prefix)
+                if not n_name:
+                    return False
+                if not pagelist:
+                    return True
+                return n_name in pagelist
+        else:
+            page_filter = lambda x: True
+        pages = []
+        for x in self.request.rootpage.getPageList(exists=1, include_underlay=False, filter=page_filter):
+            sp = self.createSyncPage(x)
+            if sp:
+                pages.append(sp)
+        return pages
+
+    def __repr__(self):
+        return "<MoinLocalWiki>"
+
+
+# ------------------ Tags ------------------ 
 
 
 class Tag(object):
     """ This class is used to store information about merging state. """
     
-    def __init__(self, remote_wiki, remote_rev, current_rev):
+    def __init__(self, remote_wiki, remote_rev, current_rev, direction, normalised_name):
         """ Creates a new Tag.
         
         @param remote_wiki: The identifier of the remote wiki.
         @param remote_rev: The revision number on the remote end.
         @param current_rev: The related local revision.
+        @param direction: The direction of the sync, encoded as an integer.
         """
+        assert (isinstance(remote_wiki, basestring) and isinstance(remote_rev, int)
+                and isinstance(current_rev, int) and isinstance(direction, int)
+                and isinstance(normalised_name, basestring))
         self.remote_wiki = remote_wiki
         self.remote_rev = remote_rev
         self.current_rev = current_rev
+        self.direction = direction
+        self.normalised_name = normalised_name
 
     def __repr__(self):
-        return u"<Tag remote_wiki=%r remote_rev=%r current_rev=%r>" % (self.remote_wiki, self.remote_rev, self.current_rev)
+        return u"<Tag normalised_pagename=%r remote_wiki=%r remote_rev=%r current_rev=%r>" % (self.normalised_name, self.remote_wiki, self.remote_rev, self.current_rev)
 
     def __cmp__(self, other):
         if not isinstance(other, Tag):
@@ -49,6 +338,10 @@
         them having accept a page argument at least. """
         pass
 
+    def dump(self):
+        """ Returns all tags for a given item as a string. """
+        return repr(self.get_all_tags())
+
     def add(self, **kwargs):
         """ Adds a Tag object to the current TagStore. """
         print "Got tag for page %r: %r" % (self.page, kwargs)
@@ -121,17 +414,18 @@
         self.tags = []
         self.commit()
 
-    def fetch(self, iwid_full=None, iw_name=None):
-        assert iwid_full ^ iw_name
-        if iwid_full:
-            iwid_full = unpackLine(iwid_full)
-            if len(iwid_full) == 1:
-                assert False, "This case is not supported yet" # XXX
-            iw_name = iwid_full[1]
-
-        return [t for t in self.tags if t.remote_wiki == iw_name]
+    def fetch(self, iwid_full, direction=None):
+        iwid_full = unpackLine(iwid_full)
+        matching_tags = []
+        for t in self.tags:
+            t_iwid_full = unpackLine(t.remote_wiki)
+            if ((t_iwid_full[0] == iwid_full[0]) # either match IWID or IW name
+                or (len(t_iwid_full) == 2 and len(iwid_full) == 2 and t_iwid_full[1] == iwid_full[1])
+                ) and (direction is None or t.direction == direction):
+                matching_tags.append(t)
+        return matching_tags
 
 
 # currently we just have one implementation, so we do not need
 # a factory method
-TagStore = PickleTagStore
\ No newline at end of file
+TagStore = PickleTagStore
--- a/MoinMoin/wikiutil.py	Tue Aug 15 00:06:07 2006 +0200
+++ b/MoinMoin/wikiutil.py	Tue Aug 15 11:29:45 2006 +0200
@@ -11,7 +11,6 @@
 import os
 import re
 import time
-import types
 import urllib
 
 from MoinMoin import util, version, config
@@ -1131,7 +1130,7 @@
                 continue
             if hasattr(Parser, 'extensions'):
                 exts = Parser.extensions
-                if type(exts) == types.ListType:
+                if isinstance(exts, list):
                     for ext in Parser.extensions:
                         etp[ext] = Parser
                 elif str(exts) == '*':
@@ -1531,14 +1530,14 @@
 
 def createTicket(tm=None):
     """Create a ticket using a site-specific secret (the config)"""
-    import sha, time, types
+    import sha
     ticket = tm or "%010x" % time.time()
     digest = sha.new()
     digest.update(ticket)
 
     cfgvars = vars(config)
     for var in cfgvars.values():
-        if type(var) is types.StringType:
+        if isinstance(var, str):
             digest.update(repr(var))
 
     return "%s.%s" % (ticket, digest.hexdigest())
--- a/MoinMoin/wikixml/marshal.py	Tue Aug 15 00:06:07 2006 +0200
+++ b/MoinMoin/wikixml/marshal.py	Tue Aug 15 11:29:45 2006 +0200
@@ -6,9 +6,6 @@
     @license: GNU GPL, see COPYING for details.
 """
 
-import types
-
-
 class Marshal:
     """ Serialize Python data structures to XML.
 
@@ -64,16 +61,16 @@
         if data is None:
             content = "<none/>"
 
-        elif isinstance(data, types.StringType):
+        elif isinstance(data, str):
             content = (data.replace("&", "&amp;") # Must be done first!
                            .replace("<", "&lt;")
                            .replace(">", "&gt;"))
 
-        elif isinstance(data, types.DictionaryType):
+        elif isinstance(data, dict):
             for key, value in data.items():
                 add_content(self.__toXML(key, value))
 
-        elif isinstance(data, types.ListType) or isinstance(data, types.TupleType):
+        elif isinstance(data, (list, tuple)):
             for item in data:
                 add_content(self.__toXML(self.ITEM_CONTAINER, item))
 
@@ -89,7 +86,7 @@
                                 .replace(">", "&gt;"))
 
         # Close container element
-        if isinstance(content, types.StringType):
+        if isinstance(content, str):
             # No Whitespace
             if element:
                 content = ['<%s>%s</%s>' % (element, content, element)]
--- a/MoinMoin/xmlrpc/__init__.py	Tue Aug 15 00:06:07 2006 +0200
+++ b/MoinMoin/xmlrpc/__init__.py	Tue Aug 15 11:29:45 2006 +0200
@@ -125,11 +125,14 @@
             # report exception back to server
             response = xmlrpclib.dumps(xmlrpclib.Fault(1, self._dump_exc()))
         else:
-            # wrap response in a singleton tuple
-            response = (response,)
-
-            # serialize it
-            response = xmlrpclib.dumps(response, methodresponse=1)
+            if isinstance(response, xmlrpclib.Fault):
+                response = xmlrpclib.dumps(response)
+            else:
+                # wrap response in a singleton tuple
+                response = (response,)
+    
+                # serialize it
+                response = xmlrpclib.dumps(response, methodresponse=1)
 
         self.request.emit_http_headers([
             "Content-Type: text/xml; charset=utf-8",
@@ -235,19 +238,40 @@
                 include_system:: set it to false if you do not want to see system pages
                 include_revno:: set it to True if you want to have lists with [pagename, revno]
                 include_deleted:: set it to True if you want to include deleted pages
+                exclude_non_writable:: do not include pages that the current user may not write to
+                include_underlay:: return underlay pagenames as well
+                prefix:: the page name must begin with this prefix to be included
         @rtype: list
         @return: a list of all pages.
         """
-        options = {"include_system": True, "include_revno": False, "include_deleted": False}
+        from MoinMoin.wikisync import normalise_pagename
+        options = {"include_system": True, "include_revno": False, "include_deleted": False,
+                   "exclude_non_writable": False, "include_underlay": True, "prefix": "",
+                   "pagelist": None}
         if opts is not None:
             options.update(opts)
 
         if not options["include_system"]:
-            filter = lambda name: not wikiutil.isSystemPage(self.request, name)
+            p_filter = lambda name: not wikiutil.isSystemPage(self.request, name)
         else:
-            filter = lambda name: True
+            p_filter = lambda name: True
 
-        pagelist = self.request.rootpage.getPageList(filter=filter, exists=not options["include_deleted"])
+        if options["exclude_non_writable"]:
+            p_filter = lambda name, p_filter=p_filter: p_filter(name) and self.request.user.may.write(name)
+
+        if options["prefix"] or options["pagelist"]:
+            def p_filter(name, p_filter=p_filter, prefix=(options["prefix"] or ""), pagelist=options["pagelist"]):
+                if not p_filter(name):
+                    return False
+                n_name = normalise_pagename(name, prefix)
+                if not n_name:
+                    return False
+                if not pagelist:
+                    return True
+                return n_name in pagelist
+
+        pagelist = self.request.rootpage.getPageList(filter=p_filter, exists=not options["include_deleted"],
+                                                     include_underlay=options["include_underlay"])
         
         if options['include_revno']:
             return [[self._outstr(x), Page(self.request, x).get_real_rev()] for x in pagelist]
@@ -593,7 +617,8 @@
             oldcontents = lambda: oldpage.get_raw_body_str()
 
         if to_rev is None:
-            newcontents = lambda: currentpage.get_raw_body()
+            newpage = currentpage
+            newcontents = lambda: currentpage.get_raw_body_str()
         else:
             newpage = Page(self.request, pagename, rev=to_rev)
             newcontents = lambda: newpage.get_raw_body_str()
@@ -617,7 +642,7 @@
         else:
             return [self._outstr(name), iwid]
 
-    def xmlrpc_mergeDiff(self, pagename, diff, local_rev, delta_remote_rev, last_remote_rev, interwiki_name):
+    def xmlrpc_mergeDiff(self, pagename, diff, local_rev, delta_remote_rev, last_remote_rev, interwiki_name, normalised_name):
         """ Merges a diff sent by the remote machine and returns the number of the new revision.
             Additionally, this method tags the new revision.
             
@@ -627,9 +652,10 @@
             @param delta_remote_rev: The revno that the diff is taken against.
             @param last_remote_rev: The last revno of the page `pagename` that is known by the other wiki site.
             @param interwiki_name: Used to build the interwiki tag.
+            @param normalised_name: The normalised pagename that is common to both wikis.
         """
         from MoinMoin.util.bdiff import decompress, patch
-        from MoinMoin.wikisync import TagStore
+        from MoinMoin.wikisync import TagStore, BOTH
         LASTREV_INVALID = xmlrpclib.Fault("LASTREV_INVALID", "The page was changed")
 
         pagename = self._instr(pagename)
@@ -656,17 +682,20 @@
 
         # generate the new page revision by applying the diff
         newcontents = patch(basepage.get_raw_body_str(), decompress(str(diff)))
+        #print "Diff against %r" % basepage.get_raw_body_str()
 
         # write page
         try:
-            currentpage.saveText(newcontents.encode("utf-8"), last_remote_rev, comment=comment)
+            currentpage.saveText(newcontents.decode("utf-8"), last_remote_rev, comment=comment)
+        except PageEditor.Unchanged: # could happen in case of both wiki's pages being equal
+            pass
         except PageEditor.EditConflict:
             return LASTREV_INVALID
 
         current_rev = currentpage.get_real_rev()
         
         tags = TagStore(currentpage)
-        tags.add(remote_wiki=interwiki_name, remote_rev=local_rev, current_rev=current_rev)
+        tags.add(remote_wiki=interwiki_name, remote_rev=local_rev, current_rev=current_rev, direction=BOTH, normalised_name=normalised_name)
 
         # XXX unlock page
 
--- a/README	Tue Aug 15 00:06:07 2006 +0200
+++ b/README	Tue Aug 15 11:29:45 2006 +0200
@@ -40,7 +40,7 @@
 
 See docs/CHANGES                 for a version history. READ THIS!
 See docs/INSTALL.html            for installation instructions.
-See docs/UPDATE.html             for updating instructions.
+See docs/README.migration        for data conversion instructions.
 
 Note that the code base contains some experimental or unfinished features.
 Use them at your own risk. Official features are described on the set of
--- a/contrib/auth_externalcookie/wikiconfig.py	Tue Aug 15 00:06:07 2006 +0200
+++ b/contrib/auth_externalcookie/wikiconfig.py	Tue Aug 15 11:29:45 2006 +0200
@@ -4,7 +4,7 @@
 # See the XXX places for customizing it to your needs. You need to put this
 # code into your farmconfig.py or wikiconfig.py.
 
-# ...
+# HINT: this code is slightly outdated, if you fix it to work with 1.6, please send us a copy.
 
 class FarmConfig(DefaultConfig):
     def external_cookie(request, **kw):
@@ -13,7 +13,7 @@
         user = None
         try_next = True # if True, moin tries the next auth method
         cookiename = "whatever" # XXX external cookie name you want to use
-        
+
         try:
             cookie = Cookie.SimpleCookie(request.saved_cookie)
         except Cookie.CookieError:
@@ -27,7 +27,7 @@
             cookievalue = urllib.unquote(cookievalue) # cookie value is urlencoded, decode it
             cookievalue = cookievalue.decode('iso-8859-1') # decode cookie charset to unicode
             cookievalue = cookievalue.split('#') # cookie has format loginname#firstname#lastname#email
-            
+
             auth_username = cookievalue[0] # having this cookie means user auth has already been done!
             aliasname = email = ''
             try:
@@ -44,13 +44,13 @@
             from MoinMoin.user import User
             # giving auth_username to User constructor means that authentication has already been done.
             user = User(request, name=auth_username, auth_username=auth_username)
-            
+
             changed = False
             if aliasname != user.aliasname: # was the aliasname externally updated?
                 user.aliasname = aliasname ; changed = True # yes -> update user profile
             if email != user.email: # was the email addr externally updated?
                 user.email = email ; changed = True # yes -> update user profile
-            
+
             if user:
                 user.create_or_update(changed)
             if user and user.valid: # did we succeed making up a valid user?
--- a/docs/CHANGES	Tue Aug 15 00:06:07 2006 +0200
+++ b/docs/CHANGES	Tue Aug 15 11:29:45 2006 +0200
@@ -219,6 +219,16 @@
       You need to change this in your wikiconfig.py or farmconfig.py file.
       See MoinMoin/multiconfig.py for an alternative way if you can't do that.
 
+Version 1.5-current:
+   * moin.fcg improved - if you use FastCGI, you must use the new file:
+     * can self-terminate after some number of requests (default: -1, this means
+       "unlimited lifetime")
+     * the count of created threads is limited now (default: 5), you can use 1
+       to use non-threaded operation.
+     * configurable socket.listen() backlog (default: 5)
+  
+Version 1.5.4:
+    HINT: read docs/README.migration.
 Version 1.5.4-current:
     * increased maxlength of some input fields from 80 to 200
 
--- a/docs/CHANGES.aschremmer	Tue Aug 15 00:06:07 2006 +0200
+++ b/docs/CHANGES.aschremmer	Tue Aug 15 11:29:45 2006 +0200
@@ -2,19 +2,28 @@
 ===============================
 
   Known main issues:
+    * How to handle renames/deletes?
     * How will we store tags? (Metadata support would be handy)
-    * How to handle renames/deletes?
-    * How to handle colliding/empty interwiki names?
+      (currently done in Pickle files)
 
   ToDo:
-    * Implement actual syncronisation.
-      * Add correct IWID_full handling.
+    * Tags should store the page name to recognise renaming scenarios.
+    * Implement all syncronisation cases (all 3 page sets).
+    * Test with prefixes
+    * Reduce round-trip times by caching queries and using MultiCall objects (coroutines?)
+    * Attach the status information to the job page.
+    * Show tags in an action=info view?
     * Implement a cross-site authentication system, i.e. mainly an
-      identity storage.
+      identity storage. (does OpenID make sense?)
     * Clean up trailing whitespace.
     * Add page locking, i.e. use the one in the new storage layer.
     * Check what needs to be documented on MoinMaster.
     * Search for XXX
+    * Maybe refactor YYY into MoinLocalWiki
+    * Remove amount of "very" in the code
+    * Do older tags of one wiki site have to be stored as well? Why don't we
+      keep just one tag?
+    * Put author names into the comment field, transmit mimetypes.
 
   New Features:
     * XMLRPC method to return the Moin version
@@ -30,6 +39,9 @@
       no system pages etc.)
     * IWID support - i.e. every instance has a unique ID
     * InterWiki page editable in the wiki, modification detection based on mtimes
+    * SyncPages action
+    * XMLRPC functions may return Fault instances
+    * diff3 algorithm extended, a new mode should reduce the conflicts
 
   Bugfixes (only stuff that is buggy in moin/1.6 main branch):
     * Conflict resolution fixes. (merged into main)
@@ -86,7 +98,15 @@
          direction option, replaced "localMatch"/"remoteMatch" by "pageMatch".
          Store mtime for InterWiki list updates and detect changes based on it.
          Added support for localPrefix and remotePrefix.
-Week 32: Continued work on the merge logic, finished prefix handling.
+Week 32: Continued work on the merge logic, finished prefix handling. Added local conflict
+         detection in SyncPages. Added logging support to SyncPages. Refactored conflict flag
+         detection from the edit action into the PageEditor class. Enhanced XMLRPC server in Moin to allow
+         XMLRPC functions to return Fault instances. Introduced a new diff3 mode that should reduce the
+         conflicts. Fixed hard to track down bugs in SyncPages. Store full IWIDs and match by
+         either of both components when searching for tags. Ignore underlay pages. Filter the remote page list by
+         the prefix and the pageList on the remote side. Finished the direction==DOWN mode.
+         Started designing the solutions for the other sync cases. Store and transmit the
+         normalised name.
 
 2006-07-18: the requested daily entry is missing here, see http://moinmoin.wikiwikiweb.de/GoogleSoc2006/BetterProgress
 2006-07-19: the requested daily entry is missing here, see http://moinmoin.wikiwikiweb.de/GoogleSoc2006/BetterProgress
@@ -108,6 +128,13 @@
 2006-08-04: the requested daily entry is missing here, see http://moinmoin.wikiwikiweb.de/GoogleSoc2006/BetterProgress
 2006-08-05: student didn't work on project
 2006-08-06: student didn't work on project -- a Sunday
+2006-08-07: entry missing
+2006-08-08: entry missing
+2006-08-09: entry missing
+2006-08-10: entry missing
+2006-08-11: entry missing
+2006-08-12: entry missing
+2006-08-13: entry missing
 
 Time plan
 =========
--- a/docs/CHANGES.config	Tue Aug 15 00:06:07 2006 +0200
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,97 +0,0 @@
-# this is a part of multiconfig.py - the stuff that changed recently:
-    actions_excluded = [] # ['DeletePage', 'AttachFile', 'RenamePage']
-    auth = [moin_cookie]
-    cookie_domain = None # use '.domain.tld" for a farm with hosts in that domain
-    cookie_path = None   # use '/wikifarm" for a farm with pathes below that path
-    editor_default = 'text' # which editor is called when nothing is specified
-    editor_ui = 'freechoice' # which editor links are shown on user interface
-    editor_force = False
-    hacks = {} # { 'feature1': value1, ... }
-               # Configuration for features still in development.
-               # For boolean stuff just use config like this:
-               #   hacks = { 'feature': True, ...}
-               # and in the code use:
-               #   if cfg.hacks.get('feature', False): <doit>
-               # A non-existing hack key should ever mean False, None, "", [] or {}!
-    interwiki_preferred = [] # list of wiki names to show at top of interwiki list
-    language_default = 'en'
-    language_ignore_browser = False # ignore browser settings, use language_default
-                                    # or user prefs
-    lupy_search = False # disabled until lupy is finished
-    mail_sendmail = None # "/usr/sbin/sendmail -t -i" to not use SMTP, but sendmail
-    show_interwiki = 0 # show interwiki name (and link it to page_front_page)
-    superuser = [] # list of unicode user names that have super powers :)
-    user_email_unique = True # do we check whether a user's email is unique?
-
-    user_checkbox_fields = [
-        ('mailto_author', lambda _: _('Publish my email (not my wiki homepage) in author info')),
-        ('edit_on_doubleclick', lambda _: _('Open editor on double click')),
-        ('remember_last_visit', lambda _: _('Remember last page visited')),
-        ('show_nonexist_qm', lambda _: _('Show question mark for non-existing pagelinks')),
-        ('show_page_trail', lambda _: _('Show page trail')),
-        ('show_toolbar', lambda _: _('Show icon toolbar')),
-        ('show_topbottom', lambda _: _('Show top/bottom links in headings')),
-        ('show_fancy_diff', lambda _: _('Show fancy diffs')),
-        ('wikiname_add_spaces', lambda _: _('Add spaces to displayed wiki names')),
-        ('remember_me', lambda _: _('Remember login information')),
-        ('want_trivial', lambda _: _('Subscribe to trivial changes')),
-        
-        ('disabled', lambda _: _('Disable this account forever')),
-        # if an account is disabled, it may be used for looking up
-        # id -> username for page info and recent changes, but it
-        # is not usable for the user any more:
-    ]
-    
-    user_checkbox_defaults = {'mailto_author':       0,
-                              'edit_on_doubleclick': 0,
-                              'remember_last_visit': 0,
-                              'show_nonexist_qm':    nonexist_qm,
-                              'show_page_trail':     1,
-                              'show_toolbar':        1,
-                              'show_topbottom':      0,
-                              'show_fancy_diff':     1,
-                              'wikiname_add_spaces': 0,
-                              'remember_me':         1,
-                              'want_trivial':        0,
-                             }
-    # don't let the user change those
-    # user_checkbox_disable = ['disabled', 'want_trivial']
-    user_checkbox_disable = []
-    # remove those checkboxes:
-    user_checkbox_remove = ['edit_on_doubleclick', 'show_nonexist_qm', 'show_toolbar', 'show_topbottom',
-                            'show_fancy_diff', 'wikiname_add_spaces', 'remember_me', 'disabled',]
-    
-    user_form_fields = [
-        ('name', _('Name'), "text", "36", _("(Use Firstname''''''Lastname)")),
-        ('aliasname', _('Alias-Name'), "text", "36", ''),
-        ('password', _('Password'), "password", "36", ''),
-        ('password2', _('Password repeat'), "password", "36", _('(Only when changing passwords)')),
-        ('email', _('Email'), "text", "36", ''),
-        ('css_url', _('User CSS URL'), "text", "40", _('(Leave it empty for disabling user CSS)')),
-        ('edit_rows', _('Editor size'), "text", "3", ''),
-        ##('theme', _('Preferred theme'), [self._theme_select()])
-        ##('', _('Editor Preference'), [self._editor_default_select()])
-        ##('', _('Editor shown on UI'), [self._editor_ui_select()])
-        ##('', _('Time zone'), [self._tz_select()])
-        ##('', _('Date format'), [self._dtfmt_select()])
-        ##('', _('Preferred language'), [self._lang_select()])
-    ]
-    
-    user_form_defaults = { # key: default
-        'name': '',
-        'aliasname': '',
-        'password': '',
-        'password2': '',
-        'email': '',
-        'css_url': '',
-        'edit_rows': "20",
-    }
-    
-    # don't let the user change those, but show them:
-    user_form_disable = ['name', 'aliasname', 'email',]
-    
-    # remove those completely:
-    user_form_remove = ['password', 'password2', 'css_url', 'logout', 'create', 'account_sendmail',]
-    
-    user_homewiki = 'Self' # interwiki name for where user homepages are located
-
--- a/setup.py	Tue Aug 15 00:06:07 2006 +0200
+++ b/setup.py	Tue Aug 15 11:29:45 2006 +0200
@@ -3,11 +3,12 @@
 """
     MoinMoin installer
 
-    @copyright: 2001-2005 by Jürgen Hermann <jh@web.de>
+    @copyright: 2001-2005 by Jürgen Hermann <jh@web.de>,
+                2006 by MoinMoin:ThomasWaldmann
     @license: GNU GPL, see COPYING for details.
 """
 
-import glob, os, string, sys
+import os, sys, glob
 
 import distutils
 from distutils.core import setup
@@ -134,8 +135,6 @@
             raise Exception("You have to inherit build_scripts_create and"
                 " provide a package name")
 
-        to_module = string.maketrans('-/', '_.')
-
         self.mkpath(self.build_dir)
         for script in self.scripts:
             outfile = os.path.join(self.build_dir, os.path.basename(script))
@@ -149,7 +148,7 @@
                 continue
 
             module = os.path.splitext(os.path.basename(script))[0]
-            module = string.translate(module, to_module)
+            module = module.replace('-', '_').replace('/', '.')
             script_vars = {
                 'python': os.path.normpath(sys.executable),
                 'package': self.package_name,
@@ -188,7 +187,7 @@
         module files.
     """
     script = os.path.splitext(os.path.basename(path))[0]
-    script = string.replace(script, '_', '-')
+    script = script.replace('_', '-')
     if sys.platform == "win32":
         script = script + ".bat"
     return script
@@ -241,6 +240,8 @@
         'MoinMoin.script.old',
         'MoinMoin.script.old.migration',
         'MoinMoin.script.old.xmlrpc-tools',
+        'MoinMoin.script.xmlrpc',
+        'MoinMoin.search',
         'MoinMoin.security',
         'MoinMoin.server',
         'MoinMoin.stats',
--- a/wiki/server/moin.fcg	Tue Aug 15 00:06:07 2006 +0200
+++ b/wiki/server/moin.fcg	Tue Aug 15 11:29:45 2006 +0200
@@ -26,8 +26,15 @@
 ## import os
 ## os.environ['MOIN_DEBUG'] = '1'
 
-# Use threaded version or non-threaded version (default 1)?
-use_threads = 1
+# how many requests shall be handled by a moin fcgi process before it dies,
+# -1 mean "unlimited lifetime":
+max_requests = -1
+
+# how many threads to use (1 means use only main program, non-threaded)
+max_threads = 5
+
+# backlog, use in socket.listen(backlog) call
+backlog = 5
 
 
 # Code ------------------------------------------------------------------
@@ -37,7 +44,7 @@
 
 # Set threads flag, so other code can use proper locking
 from MoinMoin import config
-config.use_threads = use_threads
+config.use_threads = max_threads > 1
 del config
 
 from MoinMoin.request import FCGI
@@ -48,10 +55,6 @@
     request.run()
 
 if __name__ == '__main__':
-    if use_threads:
-        fcg = thfcgi.THFCGI(handle_request)
-    else:
-        fcg = thfcgi.unTHFCGI(handle_request)    
-
+    fcg = thfcgi.FCGI(handle_request, max_requests=max_requests, backlog=backlog, max_threads=max_threads)
     fcg.run()