changeset 1340:6786dc227674

merged main
author Thomas Waldmann <tw AT waldmann-edv DOT de>
date Sun, 20 Aug 2006 13:51:01 +0200
parents 544b931cd965 (current diff) 5ffa0d1ae026 (diff)
children f4558797325a
files MoinMoin/_tests/test_newlocking.py
diffstat 12 files changed, 200 insertions(+), 184 deletions(-) [+]
line wrap: on
line diff
--- a/MoinMoin/PageEditor.py	Sun Aug 20 13:50:27 2006 +0200
+++ b/MoinMoin/PageEditor.py	Sun Aug 20 13:51:01 2006 +0200
@@ -509,6 +509,11 @@
         """
         _ = self._
         success = True
+        if not (self.request.user.may.write(self.page_name)
+                and self.request.user.may.delete(self.page_name)):
+            msg = _('You are not allowed to delete this page!')
+            raise self.AccessDenied, msg
+
         try:
             # First save a final backup copy of the current page
             # (recreating the page allows access to the backups again)
--- a/MoinMoin/_tests/test_newlocking.py	Sun Aug 20 13:50:27 2006 +0200
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,74 +0,0 @@
-# -*- coding: utf-8 -*-
-"""
-    MoinMoin - MoinMoin test for new style "locking" (== mostly avoid locking)
-
-    The idea is to not have to lock files when we just want to read them.
-    When we never overwrite file content with new stuff, locking is not needed.
-    We can just write the new content into a new file (with tmpfname) and then
-    rename it to the original filename. Files that opened the original filename
-    before the rename will still read old content after the rename (until they
-    are closed).
-
-    @copyright: 2006 by Thomas Waldmann (idea: Bastian Blank)
-    @license: GNU GPL, see COPYING for details.
-"""
-
-import unittest, tempfile, os, shutil
-from MoinMoin._tests import TestConfig, TestSkipped
-
-def rename(oldname, newname):
-    """ Multiplatform rename
-
-    Move to MoinMoin.util.filesys when done.
-
-    TODO:
-    Test/Fix win32 stuff.
-    
-    Check: MoveFileEx: If the new filename is None, it deletes the file (needs very recent pywin32 binding).
-           This is documented for the "on reboot" stuff, does this also work when not doing it on next reboot?
-           Maybe we can use this at another place.
-           
-    API doc: http://msdn.microsoft.com/library/default.asp?url=/library/en-us/fileio/fs/movefileex.asp
-    
-    Windows 95/98/ME do not implement MoveFileEx().
-    Either have some other working code or document we drop support for less-than-NT.
-    Document pywin32 extension dependency.
-
-    """
-    # this nt specific code should be replaced by better stuff
-    if os.name == 'nt':
-        # uses mark hammond's pywin32 extension
-        # there seems to be also stuff in win32api.MoveFileEx and win32con.MOVEFILE_REPLACE_EXISTING
-        # what's the difference to them in win32file?
-        from win32file import MoveFileEx, MOVEFILE_REPLACE_EXISTING
-        ret = MoveFileEx(oldname, newname, MOVEFILE_REPLACE_EXISTING)
-        # If the function succeeds, the return value is nonzero.
-        # If the function fails, the return value is 0 (zero). To get extended error information, call GetLastError.
-        if ret == 0:
-            raise OSError # emulate os.rename behaviour
-    else:
-        os.rename(oldname, newname) # rename has no return value, but raises OSError in case of failure
-
-
-class NewLockTests(unittest.TestCase):
-
-    def setUp(self):
-        self.test_dir = tempfile.mkdtemp('', 'lock_')
-
-    def tearDown(self):
-        shutil.rmtree(self.test_dir)
-
-    def testNoLockingForReading(self):
-        """ new locking: NoLockingForReading tests if files still work when filename is target of a rename """
-        fname = os.path.join(self.test_dir, 'readtest')
-        tmpfname = os.path.join(self.test_dir, '__readtest')
-        origdata = "precious content"
-        newdata = "new content"
-        f = file(fname, "w") ; f.write(origdata) ; f.close()
-        f = file(fname, "r")
-        ftmp = file(tmpfname, "w") ; ftmp.write(newdata) ; ftmp.close()
-        rename(tmpfname, fname)
-        read1data = f.read() ; f.close() # we should still get origdata here!
-        f = file(fname, "r") ; read2data = f.read() ; f.close() # we should newdata now.
-        self.failUnless(origdata == read1data and newdata == read2data, "got wrong data when reading")
-
--- a/MoinMoin/_tests/test_packages.py	Sun Aug 20 13:50:27 2006 +0200
+++ b/MoinMoin/_tests/test_packages.py	Sun Aug 20 13:51:01 2006 +0200
@@ -21,13 +21,11 @@
         self.script = script or u"""moinmoinpackage|1
 print|foo
 ReplaceUnderlay|testdatei|TestSeite2
-DeletePage|TestSeite2|Test ...
 IgnoreExceptions|True
 DeletePage|TestSeiteDoesNotExist|Test ...
 IgnoreExceptions|False
 AddRevision|foofile|FooPage
 AddRevision|foofile|FooPage
-DeletePage|FooPage|Test ...
 setthemename|foo
 #foobar
 installplugin|foo|local|parser|testy
@@ -60,7 +58,6 @@
         testseite2 = Page(self.request, 'TestSeite2')
         self.assertEqual(testseite2.getPageText(), "Hello world, I am the file testdatei")
         self.assert_(testseite2.isUnderlayPage())
-        self.assert_(not Page(self.request, 'FooPage').exists())
 
 class QuotingTestCase(TestCase):
     def testQuoting(self):
--- a/MoinMoin/_tests/test_wikisync.py	Sun Aug 20 13:50:27 2006 +0200
+++ b/MoinMoin/_tests/test_wikisync.py	Sun Aug 20 13:51:01 2006 +0200
@@ -29,6 +29,7 @@
         self.assert_(not tags.get_all_tags())
         tags.add(remote_wiki="foo", remote_rev=1, current_rev=2, direction=BOTH, normalised_name="FrontPage")
         tags = TagStore(self.page) # reload
+        dummy = repr(tags.get_all_tags()) # this should not raise
         self.assert_(tags.get_all_tags()[0].remote_rev == 1)
     
     def tearDown(self):
--- a/MoinMoin/action/DeletePage.py	Sun Aug 20 13:50:27 2006 +0200
+++ b/MoinMoin/action/DeletePage.py	Sun Aug 20 13:51:01 2006 +0200
@@ -25,6 +25,8 @@
         self.form_trigger_label = _('Delete')
 
     def is_allowed(self):
+        # this is not strictly necessary because the underlying storage code checks
+        # as well
         may = self.request.user.may
         return may.write(self.pagename) and may.delete(self.pagename)
 
--- a/MoinMoin/action/SyncPages.py	Sun Aug 20 13:50:27 2006 +0200
+++ b/MoinMoin/action/SyncPages.py	Sun Aug 20 13:51:01 2006 +0200
@@ -30,6 +30,9 @@
 from MoinMoin.util import diff3
 
 
+debug = True
+
+
 # map sync directions
 directions_map = {"up": UP, "down": DOWN, "both": BOTH}
 
@@ -37,19 +40,32 @@
 class ActionStatus(Exception): pass
 
 
-class ActionClass:
-    INFO, WARN, ERROR = range(3) # used for logging
+class ActionClass(object):
+    INFO, WARN, ERROR = zip(range(3), ("", "<!>", "/!\\")) # used for logging
 
     def __init__(self, pagename, request):
         self.request = request
         self.pagename = pagename
-        self.page = Page(request, pagename)
+        self.page = PageEditor(request, pagename)
         self.status = []
         request.flush()
 
-    def log_status(self, level, message):
+    def log_status(self, level, message="", substitutions=(), raw_suffix=""):
         """ Appends the message with a given importance level to the internal log. """
-        self.status.append((level, message))
+        self.status.append((level, message, substitutions, raw_suffix))
+
+    def generate_log_table(self):
+        """ Transforms self.status into a user readable table. """
+        table_line = u"|| %(smiley)s || %(message)s%(raw_suffix)s ||"
+        table = []
+
+        for line in self.status:
+            macro_args = [line[1]] + list(line[2])
+            table.append(table_line % {"smiley": line[0][1], "message":
+                macro_args and u"[[GetText2(|%s)]]" % (packLine(macro_args), ),
+                "raw_suffix": line[3]})
+
+        return "\n".join(table)
 
     def parse_page(self):
         """ Parses the parameter page and returns the read arguments. """
@@ -102,6 +118,7 @@
 
         params = self.fix_params(self.parse_page())
 
+        # XXX aquire readlock on self.page
         try:
             if params["direction"] == UP:
                 raise ActionStatus(_("The only supported directions are BOTH and DOWN."))
@@ -114,7 +131,7 @@
 
             local = MoinLocalWiki(self.request, params["localPrefix"], params["pageList"])
             try:
-                remote = MoinRemoteWiki(self.request, params["remoteWiki"], params["remotePrefix"], params["pageList"])
+                remote = MoinRemoteWiki(self.request, params["remoteWiki"], params["remotePrefix"], params["pageList"], verbose=debug)
             except UnsupportedWikiException, (msg, ):
                 raise ActionStatus(msg)
 
@@ -123,11 +140,13 @@
 
             self.sync(params, local, remote)
         except ActionStatus, e:
-            msg = u'<p class="error">%s</p><p>%s</p>\n' % (e.args[0], repr(self.status))
+            msg = u'<p class="error">%s</p>\n' % (e.args[0], )
         else:
-            msg = u"%s<p>%s</p>" % (_("Syncronisation finished."), repr(self.status))
+            msg = u"%s" % (_("Syncronisation finished."), )
 
-        # XXX append self.status to the job page
+        self.page.saveText(self.page.get_raw_body() + "\n\n" + self.generate_log_table(), 0)
+        # XXX release readlock on self.page
+
         return self.page.send_page(self.request, msg=msg)
     
     def sync(self, params, local, remote):
@@ -137,14 +156,6 @@
             Now there are a few other cases left that have to be implemented:
                 Wiki A    | Wiki B   | Remark
                 ----------+----------+------------------------------
-                exists    | deleted  | In this case, we do a normal merge if there
-                          |          | are no tags. If there were changes in
-                          |          | Wiki A, there is a merge with a conflict.
-                          |          | Otherwise (no changes past last merge),
-                          |          | the page is deleted in Wiki A.
-                          |          | This needs static info that could be
-                          |          | transferred with the pagelist.
-                ----------+----------+-------------------------------
                 exists    | non-     | Now the wiki knows that the page was renamed.
                 with tags | existant | There should be an RPC method that asks
                           |          | for the new name (which could be recorded
@@ -161,15 +172,23 @@
                           |          | do a sync without considering tags
                 with tags | with non | to ensure data integrity.
                           | matching | Hmm, how do we detect this
-                          | tags     | case if the unmatching tags are only on the remote side?
+                          | tags     | case if the unmatching tags are only
+                          |          | on the remote side?
                 ----------+----------+-------------------------------
-                exists    | exists   | already handled.
         """
-        _ = self.request.getText
+        _ = lambda x: x # we will translate it later
+
         direction = params["direction"]
+        if direction == BOTH:
+            match_direction = direction
+        else:
+            match_direction = None
+
         local_full_iwid = packLine([local.get_iwid(), local.get_interwiki_name()])
         remote_full_iwid = packLine([remote.get_iwid(), remote.get_interwiki_name()])
 
+        self.log_status(self.INFO, _("Syncronisation started -"), raw_suffix=" [[DateTime(%s)]]" % self.page._get_local_timestamp())
+
         l_pages = local.get_pages()
         r_pages = remote.get_pages(exclude_non_writable=direction != DOWN)
 
@@ -180,39 +199,28 @@
 
         m_pages = [elem.add_missing_pagename(local, remote) for elem in SyncPage.merge(l_pages, r_pages)]
 
-        self.log_status(self.INFO, "Got %i local, %i remote pages, %i merged pages" % (len(l_pages), len(r_pages), len(m_pages))) # XXX remove?
+        self.log_status(self.INFO, _("Got a list of %s local and %s remote pages. This results in %s different pages over-all."),
+                        (str(len(l_pages)), str(len(r_pages)), str(len(m_pages))))
 
         if params["pageMatch"]:
             m_pages = SyncPage.filter(m_pages, params["pageMatch"].match)
-        self.log_status(self.INFO, "After filtering: Got %i merges pages" % (len(m_pages), )) # XXX remove
+            self.log_status(self.INFO, _("After filtering: %s pages"), (str(len(m_pages)), ))
 
-        on_both_sides = list(SyncPage.iter_local_and_remote(m_pages))
-        remote_but_not_local = list(SyncPage.iter_remote_only(m_pages))
-        local_but_not_remote = list(SyncPage.iter_local_only(m_pages))
-        
-        # some initial test code (XXX remove)
-        #r_new_pages = u", ".join([unicode(x) for x in remote_but_not_local])
-        #l_new_pages = u", ".join([unicode(x) for x in local_but_not_remote])
-        #raise ActionStatus("These pages are in the remote wiki, but not local: " + wikiutil.escape(r_new_pages) + "<br>These pages are in the local wiki, but not in the remote one: " + wikiutil.escape(l_new_pages))
-
-        # let's do the simple case first, can be refactored later to match all cases
-        # XXX handle deleted pages
-        for rp in on_both_sides:
+        def handle_page(rp):
             # XXX add locking, acquire read-lock on rp
-            #print "Processing %r" % rp
+            if debug:
+                self.log_status(ActionClass.INFO, raw_suffix="Processing %r" % rp)
 
             local_pagename = rp.local_name
             current_page = PageEditor(self.request, local_pagename) # YYY direct access
-            current_rev = current_page.get_real_rev()
+            comment = u"Local Merge - %r" % (remote.get_interwiki_name() or remote.get_iwid())
 
             tags = TagStore(current_page)
-            if direction == BOTH:
-                match_direction = direction
-            else:
-                match_direction = None
-            matching_tags = tags.fetch(iwid_full=remote.iwid_full,direction=match_direction)
+
+            matching_tags = tags.fetch(iwid_full=remote.iwid_full, direction=match_direction)
             matching_tags.sort()
-            #print "------ TAGS: " + repr(matching_tags) + repr(tags.tags)
+            if debug:
+                self.log_status(ActionClass.INFO, raw_suffix="Tags: %r [[BR]] All: %r" % (matching_tags, tags.tags))
 
             # some default values for non matching tags
             normalised_name = None
@@ -222,25 +230,44 @@
 
             if matching_tags:
                 newest_tag = matching_tags[-1]
-                
+
+                local_change = newest_tag.current_rev != rp.local_rev
+                remote_change = newest_tag.remote_rev != rp.remote_rev
+
                 # handle some cases where we cannot continue for this page
-                if newest_tag.remote_rev == rp.remote_rev and (direction == DOWN or newest_tag.current_rev == current_rev):
-                    continue # no changes done, next page
-                if rp.local_mime_type != MIMETYPE_MOIN and not (newest_tag.remote_rev == rp.remote_rev ^ newest_tag.current_rev == current_rev):
-                    self.log_status(ActionClass.WARN, _("The item %(pagename)s cannot be merged but was changed in both wikis. Please delete it in one of both wikis and try again.") % {"pagename": rp.name})
-                    continue
+                if not remote_change and (direction == DOWN or not local_change):
+                    return # no changes done, next page
+                if rp.local_deleted and rp.remote_deleted:
+                    return
+                if rp.remote_deleted and not local_change:
+                    msg = local.delete_page(rp.local_name, comment)
+                    if not msg:
+                        self.log_status(ActionClass.INFO, _("Deleted page %s locally."), (rp.name, ))
+                    else:
+                        self.log_status(ActionClass.ERROR, _("Error while deleting page %s locally:"), (rp.name, ), msg)
+                    return
+                if rp.local_deleted and not remote_change:
+                    if direction == DOWN:
+                        return
+                    self.log_status(ActionClass.ERROR, "Nothing done, I should have deleted %r remotely" % rp) # XXX add
+                    msg = remote.delete_page(rp.remote_name)
+                    self.log_status(ActionClass.INFO, _("Deleted page %s remotely."), (rp.name, ))
+                    return
+                if rp.local_mime_type != MIMETYPE_MOIN and not (local_change ^ remote_change):
+                    self.log_status(ActionClass.WARN, _("The item %s cannot be merged but was changed in both wikis. Please delete it in one of both wikis and try again."), (rp.name, ))
+                    return
                 if rp.local_mime_type != rp.remote_mime_type:
-                    self.log_status(ActionClass.WARN, _("The item %(pagename)s has different mime types in both wikis and cannot be merged. Please delete it in one of both wikis or unify the mime type, and try again.") % {"pagename": rp.name})
-                    continue
+                    self.log_status(ActionClass.WARN, _("The item %s has different mime types in both wikis and cannot be merged. Please delete it in one of both wikis or unify the mime type, and try again."), (rp.name, ))
+                    return
                 if newest_tag.normalised_name != rp.name:
-                    self.log_status(ActionClass.WARN, _("The item %(pagename)s was renamed locally. This is not implemented yet. Therefore all syncronisation history is lost for this page.") % {"pagename": rp.name}) # XXX implement renames
+                    self.log_status(ActionClass.WARN, _("The item %s was renamed locally. This is not implemented yet. Therefore the full syncronisation history is lost for this page."), (rp.name, )) # XXX implement renames
                 else:
                     normalised_name = newest_tag.normalised_name
                     local_rev = newest_tag.current_rev
                     remote_rev = newest_tag.remote_rev
                     old_contents = Page(self.request, local_pagename, rev=newest_tag.current_rev).get_raw_body_str() # YYY direct access
 
-            self.log_status(ActionClass.INFO, _("Synchronising page %(pagename)s with remote page %(remotepagename)s ...") % {"pagename": local_pagename, "remotepagename": rp.remote_name})
+            self.log_status(ActionClass.INFO, _("Synchronising page %s with remote page %s ..."), (local_pagename, rp.remote_name))
 
             if direction == DOWN:
                 remote_rev = None # always fetch the full page, ignore remote conflict check
@@ -249,14 +276,20 @@
                 patch_base_contents = old_contents
 
             if remote_rev != rp.remote_rev:
-                diff_result = remote.get_diff(rp.remote_name, remote_rev, None, normalised_name)
-                if diff_result is None:
-                    self.log_status(ActionClass.ERROR, _("The page %(pagename)s could not be synced. The remote page was renamed. This is not supported yet. You may want to delete one of the pages to get it synced.") % {"pagename": rp.remote_name})
-                    continue
-                is_remote_conflict = diff_result["conflict"]
-                assert diff_result["diffversion"] == 1
-                diff = diff_result["diff"]
-                current_remote_rev = diff_result["current"]
+                if rp.remote_deleted: # ignore remote changes
+                    current_remote_rev = rp.remote_rev
+                    is_remote_conflict = False
+                    diff = None
+                    self.log_status(ActionClass.WARN, _("The page %s was deleted remotely but changed locally."), (rp.name, ))
+                else:
+                    diff_result = remote.get_diff(rp.remote_name, remote_rev, None, normalised_name)
+                    if diff_result is None:
+                        self.log_status(ActionClass.ERROR, _("The page %s could not be synced. The remote page was renamed. This is not supported yet. You may want to delete one of the pages to get it synced."), (rp.remote_name, ))
+                        return
+                    is_remote_conflict = diff_result["conflict"]
+                    assert diff_result["diffversion"] == 1
+                    diff = diff_result["diff"]
+                    current_remote_rev = diff_result["current"]
             else:
                 current_remote_rev = remote_rev
                 if rp.local_mime_type == MIMETYPE_MOIN:
@@ -269,21 +302,24 @@
             # and the page has never been syncronised
             if (rp.local_mime_type == MIMETYPE_MOIN and wikiutil.containsConflictMarker(current_page.get_raw_body())
                 and (remote_rev is None or is_remote_conflict)):
-                self.log_status(ActionClass.WARN, _("Skipped page %(pagename)s because of a locally or remotely unresolved conflict.") % {"pagename": local_pagename})
-                continue
+                self.log_status(ActionClass.WARN, _("Skipped page %s because of a locally or remotely unresolved conflict."), (local_pagename, ))
+                return
 
             if remote_rev is None and direction == BOTH:
                 self.log_status(ActionClass.INFO, _("This is the first synchronisation between this page and the remote wiki."))
 
-            if diff is None:
+            if rp.remote_deleted:
+                new_contents = ""
+            elif diff is None:
                 new_contents = old_contents
             else:
                 new_contents = patch(patch_base_contents, decompress(diff))
 
             if rp.local_mime_type == MIMETYPE_MOIN:
                 new_contents_unicode = new_contents.decode("utf-8")
-                # here, the actual merge happens
-                # XXX print "Merging %r, %r and %r" % (old_contents.decode("utf-8"), new_contents, current_page.get_raw_body())
+                # here, the actual 3-way merge happens
+                if debug:
+                    self.log_status(ActionClass.INFO, raw_suffix="Merging %r, %r and %r" % (old_contents.decode("utf-8"), new_contents_unicode, current_page.get_raw_body()))
                 verynewtext = diff3.text_merge(old_contents.decode("utf-8"), new_contents_unicode, current_page.get_raw_body(), 2, *conflict_markers)
                 verynewtext_raw = verynewtext.encode("utf-8")
             else:
@@ -293,13 +329,12 @@
                     verynewtext_raw = current_page.get_raw_body_str()
 
             diff = textdiff(new_contents, verynewtext_raw)
-            #print "Diff against %r" % new_contents.encode("utf-8")
-
-            comment = u"Local Merge - %r" % (remote.get_interwiki_name() or remote.get_iwid())
+            if debug:
+                self.log_status(ActionClass.INFO, raw_suffix="Diff against %r" % new_contents)
 
             # XXX upgrade to write lock
             try:
-                current_page.saveText(verynewtext, current_rev, comment=comment) # YYY direct access
+                current_page.saveText(verynewtext, rp.local_rev, comment=comment) # YYY direct access
             except PageEditor.Unchanged:
                 pass
             except PageEditor.EditConflict:
@@ -324,6 +359,9 @@
 
             # XXX release lock
 
+        for rp in m_pages:
+            handle_page(rp)
+
 
 def execute(pagename, request):
     ActionClass(pagename, request).render()
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/MoinMoin/macro/GetText2.py	Sun Aug 20 13:51:01 2006 +0200
@@ -0,0 +1,32 @@
+# -*- coding: iso-8859-1 -*-
+"""
+    MoinMoin - Load I18N Text and substitute data.
+
+    This macro has the main purpose of being used by extensions that write
+    data to wiki pages but want to ensure that it is properly translated.
+
+    @copyright: 2006 by MoinMoin:AlexanderSchremmer
+    @license: GNU GPL, see COPYING for details.
+"""
+
+from MoinMoin import wikiutil
+from MoinMoin.packages import unpackLine
+
+Dependencies = ["language"]
+
+def execute(macro, args):
+    """ args consists of a character specifiying the separator and then a
+    packLine sequence describing a list. The first element of it is the message
+    and the remaining elements are substituted in the message using string
+    substitution.
+    """
+    sep = args[0]
+    args = unpackLine(args[1:], sep)
+    if args:
+        translation = macro.request.getText(args[0], formatted=False)
+    else:
+        translation = u""
+    message = translation % tuple(args[1:])
+
+    return macro.formatter.text(message)
+
--- a/MoinMoin/mail/mailimport.py	Sun Aug 20 13:50:27 2006 +0200
+++ b/MoinMoin/mail/mailimport.py	Sun Aug 20 13:51:01 2006 +0200
@@ -269,6 +269,10 @@
             elif table_ends is not None and not line.startswith("||"):
                 break
 
+        # in order to let the gettext system recognise the [[GetText]] calls used below,
+        # we must repeat them here:
+        [_("From"), _("To"), _("Content"), _("Date"), _("Attachments")]
+
         table_header = (u"\n\n## mail_overview (don't delete this line)\n" +
                         u"|| '''[[GetText(From)]] ''' || '''[[GetText(To)]] ''' || '''[[GetText(Content)]] ''' || '''[[GetText(Date)]] ''' || '''[[GetText(Attachments)]] ''' ||\n"
                        )
--- a/MoinMoin/server/standalone.py	Sun Aug 20 13:50:27 2006 +0200
+++ b/MoinMoin/server/standalone.py	Sun Aug 20 13:51:01 2006 +0200
@@ -293,8 +293,9 @@
         request is not available at this time.  Should be fixed by
         having url_prefix_static in a server config.
         """
-        if self.path.startswith('/moin_static160/'): # XXX
-            self.path = self.path[5:]
+        PREFIX = '/moin_static160/'
+        if self.path.startswith(PREFIX): # XXX
+            self.path = self.path[len(PREFIX)-1:]
             self.serve_static_file()
         elif self.path in ['/favicon.ico', '/robots.txt']:
             self.serve_static_file()
--- a/MoinMoin/wikisync.py	Sun Aug 20 13:50:27 2006 +0200
+++ b/MoinMoin/wikisync.py	Sun Aug 20 13:51:01 2006 +0200
@@ -18,6 +18,7 @@
 from MoinMoin import wikiutil
 from MoinMoin.util import lock
 from MoinMoin.Page import Page
+from MoinMoin.PageEditor import PageEditor
 from MoinMoin.packages import unpackLine, packLine
 
 
@@ -64,9 +65,10 @@
         self.remote_deleted = remote_deleted
         self.local_mime_type = MIMETYPE_MOIN   # XXX no usable storage API yet
         self.remote_mime_type = MIMETYPE_MOIN
+        assert remote_rev != 99999999
 
     def __repr__(self):
-        return repr("<Remote Page %r>" % unicode(self))
+        return repr("<Sync Page %r>" % unicode(self))
 
     def __unicode__(self):
         return u"%s[%s|%s]<%r:%r>" % (self.name, self.local_name, self.remote_name, self.local_rev, self.remote_rev)
@@ -94,7 +96,7 @@
         elif self.remote_name is None:
             n_name = normalise_pagename(self.local_name, local.prefix)
             assert n_name is not None
-            self.remote_name = (local.prefix or "") + n_name
+            self.remote_name = (remote.prefix or "") + n_name
 
         return self # makes using list comps easier
 
@@ -132,26 +134,6 @@
         """ Is true if the page is in both wikis. """
         return self.local_rev and self.remote_rev
 
-    def iter_local_only(cls, sp_list):
-        """ Iterates over all pages that are local only. """
-        for x in sp_list:
-            if x.is_only_local():
-                yield x
-    iter_local_only = classmethod(iter_local_only)
-
-    def iter_remote_only(cls, sp_list):
-        """ Iterates over all pages that are remote only. """
-        for x in sp_list:
-            if x.is_only_remote():
-                yield x
-    iter_remote_only = classmethod(iter_remote_only)
-
-    def iter_local_and_remote(cls, sp_list):
-        """ Iterates over all pages that are local and remote. """
-        for x in sp_list:
-            if x.is_local_and_remote():
-                yield x
-    iter_local_and_remote = classmethod(iter_local_and_remote)
 
 class RemoteWiki(object):
     """ This class should be the base for all implementations of remote wiki
@@ -173,13 +155,18 @@
         """ Returns a list of SyncPage instances. """
         return NotImplemented
 
+    def delete_page(self, pagename):
+        """ Deletes the page called pagename. """
+        return NotImplemented
+
 
 class MoinRemoteWiki(RemoteWiki):
     """ Used for MoinMoin wikis reachable via XMLRPC. """
-    def __init__(self, request, interwikiname, prefix, pagelist):
+    def __init__(self, request, interwikiname, prefix, pagelist, verbose=False):
         self.request = request
         self.prefix = prefix
         self.pagelist = pagelist
+        self.verbose = verbose
         _ = self.request.getText
 
         wikitag, wikiurl, wikitail, wikitag_bad = wikiutil.resolve_wiki(self.request, '%s:""' % (interwikiname, ))
@@ -211,7 +198,7 @@
             self.iwid_full = packLine([remote_iwid, interwikiname])
 
     def createConnection(self):
-        return xmlrpclib.ServerProxy(self.xmlrpc_url, allow_none=True, verbose=True)
+        return xmlrpclib.ServerProxy(self.xmlrpc_url, allow_none=True, verbose=self.verbose)
 
     # Public methods
     def get_diff(self, pagename, from_rev, to_rev, n_name=None):
@@ -231,6 +218,9 @@
         result = self.connection.mergeDiff(pagename, xmlrpclib.Binary(diff), local_rev, delta_remote_rev, last_remote_rev, interwiki_name, n_name)
         return result
 
+    def delete_page(self, pagename):
+        return # XXX not implemented yet
+
     # Methods implementing the RemoteWiki interface
     def get_interwiki_name(self):
         return self.remote_interwikiname
@@ -252,7 +242,10 @@
             normalised_name = normalise_pagename(name, self.prefix)
             if normalised_name is None:
                 continue
-            rpages.append(SyncPage(normalised_name, remote_rev=abs(revno), remote_name=name, remote_deleted=revno < 0))
+            if abs(revno) != 99999999: # I love sane in-band signalling
+                remote_rev = abs(revno)
+                remote_deleted = revno < 0
+                rpages.append(SyncPage(normalised_name, remote_rev=remote_rev, remote_name=name, remote_deleted=remote_deleted))
         return rpages
 
     def __repr__(self):
@@ -275,16 +268,27 @@
 
     def createSyncPage(self, page_name):
         normalised_name = normalise_pagename(page_name, self.prefix)
+        if normalised_name is None:
+            return None
         if not self.request.user.may.write(normalised_name):
             return None
-        if normalised_name is None:
+        page = Page(self.request, page_name)
+        revno = page.get_real_rev()
+        if revno == 99999999: # I love sane in-band signalling
             return None
-        page = Page(self.request, page_name)
-        return SyncPage(normalised_name, local_rev=page.get_real_rev(), local_name=page_name, local_deleted=not page.exists())
+        return SyncPage(normalised_name, local_rev=revno, local_name=page_name, local_deleted=not page.exists())
 
     # Public methods:
 
     # Methods implementing the RemoteWiki interface
+    def delete_page(self, page_name, comment):
+        page = PageEditor(self.request, page_name)
+        try:
+            page.deletePage(comment)
+        except PageEditor.AccessDenied, (msg, ):
+            return msg
+        return ""
+
     def get_interwiki_name(self):
         return self.request.cfg.interwikiname
 
@@ -338,7 +342,7 @@
         self.normalised_name = normalised_name
 
     def __repr__(self):
-        return u"<Tag normalised_pagename=%r remote_wiki=%r remote_rev=%r current_rev=%r>" % (self.normalised_name, self.remote_wiki, self.remote_rev, self.current_rev)
+        return u"<Tag normalised_pagename=%r remote_wiki=%r remote_rev=%r current_rev=%r>" % (getattr(self, "normalised_name", "UNDEF"), self.remote_wiki, self.remote_rev, self.current_rev)
 
     def __cmp__(self, other):
         if not isinstance(other, Tag):
--- a/MoinMoin/xmlrpc/__init__.py	Sun Aug 20 13:50:27 2006 +0200
+++ b/MoinMoin/xmlrpc/__init__.py	Sun Aug 20 13:51:01 2006 +0200
@@ -706,11 +706,12 @@
         """
         from MoinMoin.util.bdiff import decompress, patch
         from MoinMoin.wikisync import TagStore, BOTH
+        from MoinMoin.packages import unpackLine
         LASTREV_INVALID = xmlrpclib.Fault("LASTREV_INVALID", "The page was changed")
 
         pagename = self._instr(pagename)
 
-        comment = u"Remote Merge - %r" % interwiki_name
+        comment = u"Remote Merge - %r" % unpackLine(interwiki_name)[-1]
         
         # User may read page?
         if not self.request.user.may.read(pagename) or not self.request.user.may.write(pagename):
@@ -721,14 +722,14 @@
         # current version of the page
         currentpage = PageEditor(self.request, pagename, do_editor_backup=0)
 
-        if currentpage.get_real_rev() != last_remote_rev:
+        if last_remote_rev is not None and currentpage.get_real_rev() != last_remote_rev:
             return LASTREV_INVALID
 
         if not currentpage.exists() and diff is None:
             return xmlrpclib.Fault("NOT_EXIST", "The page does not exist and no diff was supplied.")
 
         # base revision used for the diff
-        basepage = Page(self.request, pagename, rev=delta_remote_rev)
+        basepage = Page(self.request, pagename, rev=(delta_remote_rev or 0))
 
         # generate the new page revision by applying the diff
         newcontents = patch(basepage.get_raw_body_str(), decompress(str(diff)))
@@ -736,7 +737,7 @@
 
         # write page
         try:
-            currentpage.saveText(newcontents.decode("utf-8"), last_remote_rev, comment=comment)
+            currentpage.saveText(newcontents.decode("utf-8"), last_remote_rev or 0, comment=comment)
         except PageEditor.Unchanged: # could happen in case of both wiki's pages being equal
             pass
         except PageEditor.EditConflict:
--- a/docs/CHANGES.aschremmer	Sun Aug 20 13:50:27 2006 +0200
+++ b/docs/CHANGES.aschremmer	Sun Aug 20 13:51:01 2006 +0200
@@ -2,20 +2,21 @@
 ===============================
 
   Known main issues:
-    * How to handle renames/deletes?
+    * Do I need to tag delete operations?
+    * How to handle renames?
     * How will we store tags? (Metadata support would be handy)
       (currently done in Pickle files)
 
   ToDo:
-    * Implement all syncronisation cases (all 3 page sets).
+    * Delete remote pages.
     * Reduce round-trip times by caching queries and using MultiCall objects (coroutines?)
-    * Attach the status information to the job page.
     * Check what needs to be documented on MoinMaster.
 
     * Show tags in an action=info view?
 
     * Test with prefixes
     * Search for XXX
+    * Delete iters?
     * Maybe refactor YYY into MoinLocalWiki
     * Remove amount of "very" in the code
     * Clean up trailing whitespace.
@@ -26,6 +27,7 @@
     * Do older tags of one wiki site have to be stored as well? Why don't we
       keep just one tag?
     * Put author names into the comment field, transmit mimetypes.
+    * Implement renamed pages.
 
   New Features:
     * XMLRPC method to return the Moin version
@@ -44,6 +46,7 @@
     * SyncPages action
     * XMLRPC functions may return Fault instances
     * diff3 algorithm extended, a new mode should reduce the conflicts
+    * GetText2 macro
 
   Bugfixes (only stuff that is buggy in moin/1.6 main branch):
     * Conflict resolution fixes. (merged into main)
@@ -114,6 +117,8 @@
          normalised name. Implemented preliminary mime type support, only transmission of the mime type
          and the new storage API is missing. Changed getDiff interface. Detect renamed pages (and choke :).
          Added infrastructure support for detecting deleted pages (not used in the merging logic yet).
+         Reworked i18n and logging support. Added GetText2 macro. Getting syncing of pages
+         working that are just available on one side. Working synchronisation of deleted pages.
 
 2006-07-18: the requested daily entry is missing here, see http://moinmoin.wikiwikiweb.de/GoogleSoc2006/BetterProgress
 2006-07-19: the requested daily entry is missing here, see http://moinmoin.wikiwikiweb.de/GoogleSoc2006/BetterProgress