changeset 1378:fa0b7d2d998b

merge with main
author Franz Pletz <fpletz AT franz-pletz DOT org>
date Sun, 20 Aug 2006 02:05:35 +0200
parents bb37beca7545 (current diff) 5ffa0d1ae026 (diff)
children 22526c2fd5b4 6b0ea72d7665
files MoinMoin/_tests/test_newlocking.py MoinMoin/search/results.py
diffstat 29 files changed, 283 insertions(+), 260 deletions(-) [+]
line wrap: on
line diff
--- a/MoinMoin/PageEditor.py	Sun Aug 20 00:14:25 2006 +0200
+++ b/MoinMoin/PageEditor.py	Sun Aug 20 02:05:35 2006 +0200
@@ -509,6 +509,11 @@
         """
         _ = self._
         success = True
+        if not (self.request.user.may.write(self.page_name)
+                and self.request.user.may.delete(self.page_name)):
+            msg = _('You are not allowed to delete this page!')
+            raise self.AccessDenied, msg
+
         try:
             # First save a final backup copy of the current page
             # (recreating the page allows access to the backups again)
@@ -831,14 +836,11 @@
         
         if not os.path.exists(pagedir): # new page, create and init pagedir
             os.mkdir(pagedir)
-            os.chmod(pagedir, 0777 & config.umask)
         if not os.path.exists(revdir):        
             os.mkdir(revdir)
-            os.chmod(revdir, 0777 & config.umask)
             f = open(cfn, 'w')
             f.write('%08d\n' % 0)
             f.close()
-            os.chmod(cfn, 0666 & config.umask)
             
         got_lock = False
         retry = 0
@@ -878,7 +880,6 @@
                 # Write the file using text/* mime type
                 f.write(self.encodeTextMimeType(text))
                 f.close()
-                os.chmod(pagefile, 0666 & config.umask)
                 mtime_usecs = wikiutil.timestamp2version(os.path.getmtime(pagefile))
                 # set in-memory content
                 self.set_raw_body(text)
--- a/MoinMoin/PageGraphicalEditor.py	Sun Aug 20 00:14:25 2006 +0200
+++ b/MoinMoin/PageGraphicalEditor.py	Sun Aug 20 02:05:35 2006 +0200
@@ -249,13 +249,13 @@
         lang = self.language or self.request.cfg.language_default
         contentlangdirection = i18n.getDirection(lang) # 'ltr' or 'rtl'
         uilanguage = self.request.lang
-        url_prefix = self.request.cfg.url_prefix
+        url_prefix_static = self.request.cfg.url_prefix_static
         wikipage = wikiutil.quoteWikinameURL(self.page_name)
-        fckbasepath = url_prefix + '/applets/FCKeditor'
+        fckbasepath = url_prefix_static + '/applets/FCKeditor'
         wikiurl = request.getScriptname()
         if not wikiurl or wikiurl[-1] != '/':
             wikiurl += '/'
-        themepath = '%s/%s' % (url_prefix, self.request.theme.name)
+        themepath = '%s/%s' % (url_prefix_static, self.request.theme.name)
         smileypath = themepath + '/img'
         # auto-generating a list for SmileyImages does NOT work from here!
         editor_size = int(request.user.edit_rows) * 22 # 22 height_pixels/line
@@ -269,8 +269,8 @@
     oFCKeditor.BasePath= '%(fckbasepath)s/' ;
     oFCKeditor.Config['WikiBasePath'] = '%(wikiurl)s' ;
     oFCKeditor.Config['WikiPage'] = '%(wikipage)s' ;
-    oFCKeditor.Config['PluginsPath'] = '%(url_prefix)s/applets/moinFCKplugins/' ;
-    oFCKeditor.Config['CustomConfigurationsPath'] = '%(url_prefix)s/applets/moinfckconfig.js'  ;
+    oFCKeditor.Config['PluginsPath'] = '%(url_prefix_static)s/applets/moinFCKplugins/' ;
+    oFCKeditor.Config['CustomConfigurationsPath'] = '%(url_prefix_static)s/applets/moinfckconfig.js'  ;
     oFCKeditor.Config['WordRule'] = %(word_rule)s ;
     oFCKeditor.Config['SmileyPath'] = '%(smileypath)s/' ;
     oFCKeditor.Config['EditorAreaCSS'] = '%(themepath)s/css/common.css' ;
--- a/MoinMoin/_tests/test_newlocking.py	Sun Aug 20 00:14:25 2006 +0200
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,74 +0,0 @@
-# -*- coding: utf-8 -*-
-"""
-    MoinMoin - MoinMoin test for new style "locking" (== mostly avoid locking)
-
-    The idea is to not have to lock files when we just want to read them.
-    When we never overwrite file content with new stuff, locking is not needed.
-    We can just write the new content into a new file (with tmpfname) and then
-    rename it to the original filename. Files that opened the original filename
-    before the rename will still read old content after the rename (until they
-    are closed).
-
-    @copyright: 2006 by Thomas Waldmann (idea: Bastian Blank)
-    @license: GNU GPL, see COPYING for details.
-"""
-
-import unittest, tempfile, os, shutil
-from MoinMoin._tests import TestConfig, TestSkipped
-
-def rename(oldname, newname):
-    """ Multiplatform rename
-
-    Move to MoinMoin.util.filesys when done.
-
-    TODO:
-    Test/Fix win32 stuff.
-    
-    Check: MoveFileEx: If the new filename is None, it deletes the file (needs very recent pywin32 binding).
-           This is documented for the "on reboot" stuff, does this also work when not doing it on next reboot?
-           Maybe we can use this at another place.
-           
-    API doc: http://msdn.microsoft.com/library/default.asp?url=/library/en-us/fileio/fs/movefileex.asp
-    
-    Windows 95/98/ME do not implement MoveFileEx().
-    Either have some other working code or document we drop support for less-than-NT.
-    Document pywin32 extension dependency.
-
-    """
-    # this nt specific code should be replaced by better stuff
-    if os.name == 'nt':
-        # uses mark hammond's pywin32 extension
-        # there seems to be also stuff in win32api.MoveFileEx and win32con.MOVEFILE_REPLACE_EXISTING
-        # what's the difference to them in win32file?
-        from win32file import MoveFileEx, MOVEFILE_REPLACE_EXISTING
-        ret = MoveFileEx(oldname, newname, MOVEFILE_REPLACE_EXISTING)
-        # If the function succeeds, the return value is nonzero.
-        # If the function fails, the return value is 0 (zero). To get extended error information, call GetLastError.
-        if ret == 0:
-            raise OSError # emulate os.rename behaviour
-    else:
-        os.rename(oldname, newname) # rename has no return value, but raises OSError in case of failure
-
-
-class NewLockTests(unittest.TestCase):
-
-    def setUp(self):
-        self.test_dir = tempfile.mkdtemp('', 'lock_')
-
-    def tearDown(self):
-        shutil.rmtree(self.test_dir)
-
-    def testNoLockingForReading(self):
-        """ new locking: NoLockingForReading tests if files still work when filename is target of a rename """
-        fname = os.path.join(self.test_dir, 'readtest')
-        tmpfname = os.path.join(self.test_dir, '__readtest')
-        origdata = "precious content"
-        newdata = "new content"
-        f = file(fname, "w") ; f.write(origdata) ; f.close()
-        f = file(fname, "r")
-        ftmp = file(tmpfname, "w") ; ftmp.write(newdata) ; ftmp.close()
-        rename(tmpfname, fname)
-        read1data = f.read() ; f.close() # we should still get origdata here!
-        f = file(fname, "r") ; read2data = f.read() ; f.close() # we should newdata now.
-        self.failUnless(origdata == read1data and newdata == read2data, "got wrong data when reading")
-
--- a/MoinMoin/_tests/test_packages.py	Sun Aug 20 00:14:25 2006 +0200
+++ b/MoinMoin/_tests/test_packages.py	Sun Aug 20 02:05:35 2006 +0200
@@ -21,13 +21,11 @@
         self.script = script or u"""moinmoinpackage|1
 print|foo
 ReplaceUnderlay|testdatei|TestSeite2
-DeletePage|TestSeite2|Test ...
 IgnoreExceptions|True
 DeletePage|TestSeiteDoesNotExist|Test ...
 IgnoreExceptions|False
 AddRevision|foofile|FooPage
 AddRevision|foofile|FooPage
-DeletePage|FooPage|Test ...
 setthemename|foo
 #foobar
 installplugin|foo|local|parser|testy
@@ -60,7 +58,6 @@
         testseite2 = Page(self.request, 'TestSeite2')
         self.assertEqual(testseite2.getPageText(), "Hello world, I am the file testdatei")
         self.assert_(testseite2.isUnderlayPage())
-        self.assert_(not Page(self.request, 'FooPage').exists())
 
 class QuotingTestCase(TestCase):
     def testQuoting(self):
--- a/MoinMoin/_tests/test_wikisync.py	Sun Aug 20 00:14:25 2006 +0200
+++ b/MoinMoin/_tests/test_wikisync.py	Sun Aug 20 02:05:35 2006 +0200
@@ -29,6 +29,7 @@
         self.assert_(not tags.get_all_tags())
         tags.add(remote_wiki="foo", remote_rev=1, current_rev=2, direction=BOTH, normalised_name="FrontPage")
         tags = TagStore(self.page) # reload
+        dummy = repr(tags.get_all_tags()) # this should not raise
         self.assert_(tags.get_all_tags()[0].remote_rev == 1)
     
     def tearDown(self):
--- a/MoinMoin/action/AttachFile.py	Sun Aug 20 00:14:25 2006 +0200
+++ b/MoinMoin/action/AttachFile.py	Sun Aug 20 02:05:35 2006 +0200
@@ -189,7 +189,6 @@
             stream.write(filecontent)
         finally:
             stream.close()
-        os.chmod(fpath, 0666 & config.umask)
 
         _addLogEntry(request, 'ATTNEW', pagename, target)
 
@@ -380,7 +379,7 @@
     _ = request.getText
 
     now = time.time()
-    pubpath = request.cfg.url_prefix + "/applets/TWikiDrawPlugin"
+    pubpath = request.cfg.url_prefix_static + "/applets/TWikiDrawPlugin"
     basename = request.form['drawing'][0]
     drawpath = getAttachUrl(pagename, basename + '.draw', request, escaped=1)
     pngpath = getAttachUrl(pagename, basename + '.png', request, escaped=1)
@@ -625,7 +624,6 @@
             stream.write(filecontent)
         finally:
             stream.close()
-        os.chmod(savepath, 0666 & config.umask)
 
     # touch attachment directory to invalidate cache if new map is saved
     if ext == '.map':
@@ -760,7 +758,6 @@
                                     os.unlink(new_file)
                                 else:
                                     valid_name = True
-                                    os.chmod(new_file, 0666 & config.umask)
                                     _addLogEntry(request, 'ATTNEW', pagename, finalname)
 
                 if valid_name:
--- a/MoinMoin/action/DeletePage.py	Sun Aug 20 00:14:25 2006 +0200
+++ b/MoinMoin/action/DeletePage.py	Sun Aug 20 02:05:35 2006 +0200
@@ -25,6 +25,8 @@
         self.form_trigger_label = _('Delete')
 
     def is_allowed(self):
+        # this is not strictly necessary because the underlying storage code checks
+        # as well
         may = self.request.user.may
         return may.write(self.pagename) and may.delete(self.pagename)
 
--- a/MoinMoin/action/PackagePages.py	Sun Aug 20 00:14:25 2006 +0200
+++ b/MoinMoin/action/PackagePages.py	Sun Aug 20 02:05:35 2006 +0200
@@ -113,8 +113,6 @@
         zf.writestr(MOIN_PACKAGE_FILE, u"\n".join(script).encode("utf-8"))
         zf.close()
 
-        os.chmod(fpath, 0666 & config.umask)
-
         _addLogEntry(self.request, 'ATTNEW', self.pagename, target)
 
         raise ActionError(_("Created the package %s containing the pages %s.") % (wikiutil.escape(target), wikiutil.escape(pagelist)))
--- a/MoinMoin/action/SyncPages.py	Sun Aug 20 00:14:25 2006 +0200
+++ b/MoinMoin/action/SyncPages.py	Sun Aug 20 02:05:35 2006 +0200
@@ -30,6 +30,9 @@
 from MoinMoin.util import diff3
 
 
+debug = True
+
+
 # map sync directions
 directions_map = {"up": UP, "down": DOWN, "both": BOTH}
 
@@ -37,19 +40,32 @@
 class ActionStatus(Exception): pass
 
 
-class ActionClass:
-    INFO, WARN, ERROR = range(3) # used for logging
+class ActionClass(object):
+    INFO, WARN, ERROR = zip(range(3), ("", "<!>", "/!\\")) # used for logging
 
     def __init__(self, pagename, request):
         self.request = request
         self.pagename = pagename
-        self.page = Page(request, pagename)
+        self.page = PageEditor(request, pagename)
         self.status = []
         request.flush()
 
-    def log_status(self, level, message):
+    def log_status(self, level, message="", substitutions=(), raw_suffix=""):
         """ Appends the message with a given importance level to the internal log. """
-        self.status.append((level, message))
+        self.status.append((level, message, substitutions, raw_suffix))
+
+    def generate_log_table(self):
+        """ Transforms self.status into a user readable table. """
+        table_line = u"|| %(smiley)s || %(message)s%(raw_suffix)s ||"
+        table = []
+
+        for line in self.status:
+            macro_args = [line[1]] + list(line[2])
+            table.append(table_line % {"smiley": line[0][1], "message":
+                macro_args and u"[[GetText2(|%s)]]" % (packLine(macro_args), ),
+                "raw_suffix": line[3]})
+
+        return "\n".join(table)
 
     def parse_page(self):
         """ Parses the parameter page and returns the read arguments. """
@@ -102,6 +118,7 @@
 
         params = self.fix_params(self.parse_page())
 
+        # XXX aquire readlock on self.page
         try:
             if params["direction"] == UP:
                 raise ActionStatus(_("The only supported directions are BOTH and DOWN."))
@@ -114,7 +131,7 @@
 
             local = MoinLocalWiki(self.request, params["localPrefix"], params["pageList"])
             try:
-                remote = MoinRemoteWiki(self.request, params["remoteWiki"], params["remotePrefix"], params["pageList"])
+                remote = MoinRemoteWiki(self.request, params["remoteWiki"], params["remotePrefix"], params["pageList"], verbose=debug)
             except UnsupportedWikiException, (msg, ):
                 raise ActionStatus(msg)
 
@@ -123,11 +140,13 @@
 
             self.sync(params, local, remote)
         except ActionStatus, e:
-            msg = u'<p class="error">%s</p><p>%s</p>\n' % (e.args[0], repr(self.status))
+            msg = u'<p class="error">%s</p>\n' % (e.args[0], )
         else:
-            msg = u"%s<p>%s</p>" % (_("Syncronisation finished."), repr(self.status))
+            msg = u"%s" % (_("Syncronisation finished."), )
 
-        # XXX append self.status to the job page
+        self.page.saveText(self.page.get_raw_body() + "\n\n" + self.generate_log_table(), 0)
+        # XXX release readlock on self.page
+
         return self.page.send_page(self.request, msg=msg)
     
     def sync(self, params, local, remote):
@@ -137,14 +156,6 @@
             Now there are a few other cases left that have to be implemented:
                 Wiki A    | Wiki B   | Remark
                 ----------+----------+------------------------------
-                exists    | deleted  | In this case, we do a normal merge if there
-                          |          | are no tags. If there were changes in
-                          |          | Wiki A, there is a merge with a conflict.
-                          |          | Otherwise (no changes past last merge),
-                          |          | the page is deleted in Wiki A.
-                          |          | This needs static info that could be
-                          |          | transferred with the pagelist.
-                ----------+----------+-------------------------------
                 exists    | non-     | Now the wiki knows that the page was renamed.
                 with tags | existant | There should be an RPC method that asks
                           |          | for the new name (which could be recorded
@@ -161,15 +172,23 @@
                           |          | do a sync without considering tags
                 with tags | with non | to ensure data integrity.
                           | matching | Hmm, how do we detect this
-                          | tags     | case if the unmatching tags are only on the remote side?
+                          | tags     | case if the unmatching tags are only
+                          |          | on the remote side?
                 ----------+----------+-------------------------------
-                exists    | exists   | already handled.
         """
-        _ = self.request.getText
+        _ = lambda x: x # we will translate it later
+
         direction = params["direction"]
+        if direction == BOTH:
+            match_direction = direction
+        else:
+            match_direction = None
+
         local_full_iwid = packLine([local.get_iwid(), local.get_interwiki_name()])
         remote_full_iwid = packLine([remote.get_iwid(), remote.get_interwiki_name()])
 
+        self.log_status(self.INFO, _("Syncronisation started -"), raw_suffix=" [[DateTime(%s)]]" % self.page._get_local_timestamp())
+
         l_pages = local.get_pages()
         r_pages = remote.get_pages(exclude_non_writable=direction != DOWN)
 
@@ -180,39 +199,28 @@
 
         m_pages = [elem.add_missing_pagename(local, remote) for elem in SyncPage.merge(l_pages, r_pages)]
 
-        self.log_status(self.INFO, "Got %i local, %i remote pages, %i merged pages" % (len(l_pages), len(r_pages), len(m_pages))) # XXX remove?
+        self.log_status(self.INFO, _("Got a list of %s local and %s remote pages. This results in %s different pages over-all."),
+                        (str(len(l_pages)), str(len(r_pages)), str(len(m_pages))))
 
         if params["pageMatch"]:
             m_pages = SyncPage.filter(m_pages, params["pageMatch"].match)
-        self.log_status(self.INFO, "After filtering: Got %i merges pages" % (len(m_pages), )) # XXX remove
+            self.log_status(self.INFO, _("After filtering: %s pages"), (str(len(m_pages)), ))
 
-        on_both_sides = list(SyncPage.iter_local_and_remote(m_pages))
-        remote_but_not_local = list(SyncPage.iter_remote_only(m_pages))
-        local_but_not_remote = list(SyncPage.iter_local_only(m_pages))
-        
-        # some initial test code (XXX remove)
-        #r_new_pages = u", ".join([unicode(x) for x in remote_but_not_local])
-        #l_new_pages = u", ".join([unicode(x) for x in local_but_not_remote])
-        #raise ActionStatus("These pages are in the remote wiki, but not local: " + wikiutil.escape(r_new_pages) + "<br>These pages are in the local wiki, but not in the remote one: " + wikiutil.escape(l_new_pages))
-
-        # let's do the simple case first, can be refactored later to match all cases
-        # XXX handle deleted pages
-        for rp in on_both_sides:
+        def handle_page(rp):
             # XXX add locking, acquire read-lock on rp
-            #print "Processing %r" % rp
+            if debug:
+                self.log_status(ActionClass.INFO, raw_suffix="Processing %r" % rp)
 
             local_pagename = rp.local_name
             current_page = PageEditor(self.request, local_pagename) # YYY direct access
-            current_rev = current_page.get_real_rev()
+            comment = u"Local Merge - %r" % (remote.get_interwiki_name() or remote.get_iwid())
 
             tags = TagStore(current_page)
-            if direction == BOTH:
-                match_direction = direction
-            else:
-                match_direction = None
-            matching_tags = tags.fetch(iwid_full=remote.iwid_full,direction=match_direction)
+
+            matching_tags = tags.fetch(iwid_full=remote.iwid_full, direction=match_direction)
             matching_tags.sort()
-            #print "------ TAGS: " + repr(matching_tags) + repr(tags.tags)
+            if debug:
+                self.log_status(ActionClass.INFO, raw_suffix="Tags: %r [[BR]] All: %r" % (matching_tags, tags.tags))
 
             # some default values for non matching tags
             normalised_name = None
@@ -222,25 +230,44 @@
 
             if matching_tags:
                 newest_tag = matching_tags[-1]
-                
+
+                local_change = newest_tag.current_rev != rp.local_rev
+                remote_change = newest_tag.remote_rev != rp.remote_rev
+
                 # handle some cases where we cannot continue for this page
-                if newest_tag.remote_rev == rp.remote_rev and (direction == DOWN or newest_tag.current_rev == current_rev):
-                    continue # no changes done, next page
-                if rp.local_mime_type != MIMETYPE_MOIN and not (newest_tag.remote_rev == rp.remote_rev ^ newest_tag.current_rev == current_rev):
-                    self.log_status(ActionClass.WARN, _("The item %(pagename)s cannot be merged but was changed in both wikis. Please delete it in one of both wikis and try again.") % {"pagename": rp.name})
-                    continue
+                if not remote_change and (direction == DOWN or not local_change):
+                    return # no changes done, next page
+                if rp.local_deleted and rp.remote_deleted:
+                    return
+                if rp.remote_deleted and not local_change:
+                    msg = local.delete_page(rp.local_name, comment)
+                    if not msg:
+                        self.log_status(ActionClass.INFO, _("Deleted page %s locally."), (rp.name, ))
+                    else:
+                        self.log_status(ActionClass.ERROR, _("Error while deleting page %s locally:"), (rp.name, ), msg)
+                    return
+                if rp.local_deleted and not remote_change:
+                    if direction == DOWN:
+                        return
+                    self.log_status(ActionClass.ERROR, "Nothing done, I should have deleted %r remotely" % rp) # XXX add
+                    msg = remote.delete_page(rp.remote_name)
+                    self.log_status(ActionClass.INFO, _("Deleted page %s remotely."), (rp.name, ))
+                    return
+                if rp.local_mime_type != MIMETYPE_MOIN and not (local_change ^ remote_change):
+                    self.log_status(ActionClass.WARN, _("The item %s cannot be merged but was changed in both wikis. Please delete it in one of both wikis and try again."), (rp.name, ))
+                    return
                 if rp.local_mime_type != rp.remote_mime_type:
-                    self.log_status(ActionClass.WARN, _("The item %(pagename)s has different mime types in both wikis and cannot be merged. Please delete it in one of both wikis or unify the mime type, and try again.") % {"pagename": rp.name})
-                    continue
+                    self.log_status(ActionClass.WARN, _("The item %s has different mime types in both wikis and cannot be merged. Please delete it in one of both wikis or unify the mime type, and try again."), (rp.name, ))
+                    return
                 if newest_tag.normalised_name != rp.name:
-                    self.log_status(ActionClass.WARN, _("The item %(pagename)s was renamed locally. This is not implemented yet. Therefore all syncronisation history is lost for this page.") % {"pagename": rp.name}) # XXX implement renames
+                    self.log_status(ActionClass.WARN, _("The item %s was renamed locally. This is not implemented yet. Therefore the full syncronisation history is lost for this page."), (rp.name, )) # XXX implement renames
                 else:
                     normalised_name = newest_tag.normalised_name
                     local_rev = newest_tag.current_rev
                     remote_rev = newest_tag.remote_rev
                     old_contents = Page(self.request, local_pagename, rev=newest_tag.current_rev).get_raw_body_str() # YYY direct access
 
-            self.log_status(ActionClass.INFO, _("Synchronising page %(pagename)s with remote page %(remotepagename)s ...") % {"pagename": local_pagename, "remotepagename": rp.remote_name})
+            self.log_status(ActionClass.INFO, _("Synchronising page %s with remote page %s ..."), (local_pagename, rp.remote_name))
 
             if direction == DOWN:
                 remote_rev = None # always fetch the full page, ignore remote conflict check
@@ -249,14 +276,20 @@
                 patch_base_contents = old_contents
 
             if remote_rev != rp.remote_rev:
-                diff_result = remote.get_diff(rp.remote_name, remote_rev, None, normalised_name)
-                if diff_result is None:
-                    self.log_status(ActionClass.ERROR, _("The page %(pagename)s could not be synced. The remote page was renamed. This is not supported yet. You may want to delete one of the pages to get it synced.") % {"pagename": rp.remote_name})
-                    continue
-                is_remote_conflict = diff_result["conflict"]
-                assert diff_result["diffversion"] == 1
-                diff = diff_result["diff"]
-                current_remote_rev = diff_result["current"]
+                if rp.remote_deleted: # ignore remote changes
+                    current_remote_rev = rp.remote_rev
+                    is_remote_conflict = False
+                    diff = None
+                    self.log_status(ActionClass.WARN, _("The page %s was deleted remotely but changed locally."), (rp.name, ))
+                else:
+                    diff_result = remote.get_diff(rp.remote_name, remote_rev, None, normalised_name)
+                    if diff_result is None:
+                        self.log_status(ActionClass.ERROR, _("The page %s could not be synced. The remote page was renamed. This is not supported yet. You may want to delete one of the pages to get it synced."), (rp.remote_name, ))
+                        return
+                    is_remote_conflict = diff_result["conflict"]
+                    assert diff_result["diffversion"] == 1
+                    diff = diff_result["diff"]
+                    current_remote_rev = diff_result["current"]
             else:
                 current_remote_rev = remote_rev
                 if rp.local_mime_type == MIMETYPE_MOIN:
@@ -269,21 +302,24 @@
             # and the page has never been syncronised
             if (rp.local_mime_type == MIMETYPE_MOIN and wikiutil.containsConflictMarker(current_page.get_raw_body())
                 and (remote_rev is None or is_remote_conflict)):
-                self.log_status(ActionClass.WARN, _("Skipped page %(pagename)s because of a locally or remotely unresolved conflict.") % {"pagename": local_pagename})
-                continue
+                self.log_status(ActionClass.WARN, _("Skipped page %s because of a locally or remotely unresolved conflict."), (local_pagename, ))
+                return
 
             if remote_rev is None and direction == BOTH:
                 self.log_status(ActionClass.INFO, _("This is the first synchronisation between this page and the remote wiki."))
 
-            if diff is None:
+            if rp.remote_deleted:
+                new_contents = ""
+            elif diff is None:
                 new_contents = old_contents
             else:
                 new_contents = patch(patch_base_contents, decompress(diff))
 
             if rp.local_mime_type == MIMETYPE_MOIN:
                 new_contents_unicode = new_contents.decode("utf-8")
-                # here, the actual merge happens
-                # XXX print "Merging %r, %r and %r" % (old_contents.decode("utf-8"), new_contents, current_page.get_raw_body())
+                # here, the actual 3-way merge happens
+                if debug:
+                    self.log_status(ActionClass.INFO, raw_suffix="Merging %r, %r and %r" % (old_contents.decode("utf-8"), new_contents_unicode, current_page.get_raw_body()))
                 verynewtext = diff3.text_merge(old_contents.decode("utf-8"), new_contents_unicode, current_page.get_raw_body(), 2, *conflict_markers)
                 verynewtext_raw = verynewtext.encode("utf-8")
             else:
@@ -293,13 +329,12 @@
                     verynewtext_raw = current_page.get_raw_body_str()
 
             diff = textdiff(new_contents, verynewtext_raw)
-            #print "Diff against %r" % new_contents.encode("utf-8")
-
-            comment = u"Local Merge - %r" % (remote.get_interwiki_name() or remote.get_iwid())
+            if debug:
+                self.log_status(ActionClass.INFO, raw_suffix="Diff against %r" % new_contents)
 
             # XXX upgrade to write lock
             try:
-                current_page.saveText(verynewtext, current_rev, comment=comment) # YYY direct access
+                current_page.saveText(verynewtext, rp.local_rev, comment=comment) # YYY direct access
             except PageEditor.Unchanged:
                 pass
             except PageEditor.EditConflict:
@@ -324,6 +359,9 @@
 
             # XXX release lock
 
+        for rp in m_pages:
+            handle_page(rp)
+
 
 def execute(pagename, request):
     ActionClass(pagename, request).render()
--- a/MoinMoin/action/fckdialog.py	Sun Aug 20 00:14:25 2006 +0200
+++ b/MoinMoin/action/fckdialog.py	Sun Aug 20 02:05:35 2006 +0200
@@ -87,7 +87,7 @@
        <td valign="top">
        <span fckLang="MacroDlgName">Macro Name</span><br>
        <select id="txtName" size="10" onchange="OnChange(this.value);">
-''' % request.cfg.url_prefix)
+''' % request.cfg.url_prefix_static)
 
     macros = []
     for macro in macro_list(request):
@@ -240,7 +240,7 @@
         ['<option value="%s">%s</option>' % (key, key) for key in resultlist])
 
     # wiki url
-    url_prefix = request.cfg.url_prefix
+    url_prefix_static = request.cfg.url_prefix_static
     scriptname = request.getScriptname()
     if not scriptname or scriptname[-1] != "/":
         scriptname += "/"
@@ -275,9 +275,9 @@
   <title>Link Properties</title>
   <meta http-equiv="Content-Type" content="text/html; charset=utf-8" />
   <meta name="robots" content="noindex,nofollow" />
-  <script src="%(url_prefix)s/applets/FCKeditor/editor/dialog/common/fck_dialog_common.js" type="text/javascript"></script>
-  <script src="%(url_prefix)s/applets/moinFCKplugins/moinlink/fck_link.js" type="text/javascript"></script>
-  <script src="%(url_prefix)s/applets/moinFCKplugins/moinurllib.js" type="text/javascript"></script>
+  <script src="%(url_prefix_static)s/applets/FCKeditor/editor/dialog/common/fck_dialog_common.js" type="text/javascript"></script>
+  <script src="%(url_prefix_static)s/applets/moinFCKplugins/moinlink/fck_link.js" type="text/javascript"></script>
+  <script src="%(url_prefix_static)s/applets/moinFCKplugins/moinurllib.js" type="text/javascript"></script>
  </head>
  <body scroll="no" style="OVERFLOW: hidden">
   <div id="divInfo" style="DISPLAY: none">
@@ -392,7 +392,7 @@
         page_list = ""
 
     # wiki url
-    url_prefix = request.cfg.url_prefix
+    url_prefix_static = request.cfg.url_prefix_static
     scriptname = request.getScriptname()
     if not scriptname or scriptname[-1] != "/":
         scriptname += "/"
@@ -426,9 +426,9 @@
   <title>Attachment Properties</title>
   <meta http-equiv="Content-Type" content="text/html; charset=utf-8" />
   <meta name="robots" content="noindex,nofollow" />
-  <script src="%(url_prefix)s/applets/FCKeditor/editor/dialog/common/fck_dialog_common.js" type="text/javascript"></script>
-  <script src="%(url_prefix)s/applets/moinFCKplugins/moinattachment/fck_attachment.js" type="text/javascript"></script>
-  <script src="%(url_prefix)s/applets/moinFCKplugins/moinurllib.js" type="text/javascript"></script>
+  <script src="%(url_prefix_static)s/applets/FCKeditor/editor/dialog/common/fck_dialog_common.js" type="text/javascript"></script>
+  <script src="%(url_prefix_static)s/applets/moinFCKplugins/moinattachment/fck_attachment.js" type="text/javascript"></script>
+  <script src="%(url_prefix_static)s/applets/moinFCKplugins/moinurllib.js" type="text/javascript"></script>
  </head>
  <body scroll="no" style="OVERFLOW: hidden">
   <div id="divInfo">
@@ -463,7 +463,7 @@
 
 def image_dialog(request):
     request.emit_http_headers()
-    url_prefix = request.cfg.url_prefix
+    url_prefix_static = request.cfg.url_prefix_static
     request.write('''
 <!--
  * FCKeditor - The text editor for internet
@@ -485,9 +485,9 @@
   <title>Link Properties</title>
   <meta http-equiv="Content-Type" content="text/html; charset=utf-8" />
   <meta name="robots" content="noindex,nofollow" />
-  <script src="%(url_prefix)s/applets/FCKeditor/editor/dialog/common/fck_dialog_common.js" type="text/javascript"></script>
-  <script src="%(url_prefix)s/applets/moinFCKplugins/moinimage/fck_image.js" type="text/javascript"></script>
-  <script src="%(url_prefix)s/applets/moinFCKplugins/moinurllib.js" type="text/javascript"></script>
+  <script src="%(url_prefix_static)s/applets/FCKeditor/editor/dialog/common/fck_dialog_common.js" type="text/javascript"></script>
+  <script src="%(url_prefix_static)s/applets/moinFCKplugins/moinimage/fck_image.js" type="text/javascript"></script>
+  <script src="%(url_prefix_static)s/applets/moinFCKplugins/moinurllib.js" type="text/javascript"></script>
  </head>
  <body scroll="no" style="OVERFLOW: hidden">
     <table cellspacing="0" cellpadding="0" width="100%%" border="0">
--- a/MoinMoin/caching.py	Sun Aug 20 00:14:25 2006 +0200
+++ b/MoinMoin/caching.py	Sun Aug 20 02:05:35 2006 +0200
@@ -99,10 +99,6 @@
                 shutil.copyfile(filename, tmpfname)
                 # this is either atomic or happening with real locks set:
                 filesys.rename(tmpfname, fname)
-                try:
-                    os.chmod(self._filename(), 0666 & config.umask)
-                except OSError:
-                    pass
             finally:
                 if self.locking:
                     self.wlock.release()
@@ -124,10 +120,6 @@
                 f.close()
                 # this is either atomic or happening with real locks set:
                 filesys.rename(tmpfname, fname)
-                try:
-                    os.chmod(fname, 0666 & config.umask)
-                except OSError:
-                    pass
             finally:
                 if self.locking:
                     self.wlock.release()
--- a/MoinMoin/config/multiconfig.py	Sun Aug 20 00:14:25 2006 +0200
+++ b/MoinMoin/config/multiconfig.py	Sun Aug 20 02:05:35 2006 +0200
@@ -404,7 +404,25 @@
 
     # Wiki identity
     sitename = u'Untitled Wiki'
-    url_prefix = '/wiki'
+
+    # url_prefix is DEPRECATED and not used any more by the code.
+    # it confused many people by its name and default value of '/wiki' to the
+    # wrong conclusion that it is the url of the wiki (the dynamic) stuff,
+    # but it was used to address the static stuff (images, css, js).
+    # Thus we use the more clear url_prefix_static ['/staticXXX'] setting now.
+    # For a limited time, we still look at url_prefix - if it is not None, we
+    # copy the value to url_prefix_static to ease transition.
+    url_prefix = None
+
+    # include the moin version number, so we can have a unlimited cache lifetime
+    # for the static stuff. if stuff changes on version upgrade, url will change
+    # immediately and we have no problem with stale caches.
+    url_prefix_static = '/moin_static160'
+
+    # we need to prefix actions to be able to exclude them by robots.txt:
+    # TODO:
+    # url_prefix_action = '/action'
+
     logo_string = None
     interwikiname = None
 
@@ -562,6 +580,9 @@
         self._acl_rights_default = AccessControlList(self, [self.acl_rights_default])
         self._acl_rights_after = AccessControlList(self, [self.acl_rights_after])
 
+        if self.url_prefix is not None: # remove this code when url_prefix setting is removed
+            self.url_prefix_static = self.url_prefix
+
     def load_meta_dict(self):
         """ The meta_dict contains meta data about the wiki instance. """
         if getattr(self, "_meta_dict", None) is None:
--- a/MoinMoin/converter/text_html_text_moin_wiki.py	Sun Aug 20 00:14:25 2006 +0200
+++ b/MoinMoin/converter/text_html_text_moin_wiki.py	Sun Aug 20 02:05:35 2006 +0200
@@ -1192,7 +1192,7 @@
                               wikiutil.url_unquote(title),
                               self.white_space])
         # Smiley
-        elif src and (self.request.cfg.url_prefix in src or '../' in src) and "img/" in src: # XXX this is dirty!
+        elif src and (self.request.cfg.url_prefix_static in src or '../' in src) and "img/" in src: # XXX this is dirty!
             filename = src.split("/")[-1]
             for markup, data in self.request.theme.icons.iteritems():
                 if data[1] == filename:
--- a/MoinMoin/logfile/__init__.py	Sun Aug 20 00:14:25 2006 +0200
+++ b/MoinMoin/logfile/__init__.py	Sun Aug 20 02:05:35 2006 +0200
@@ -139,11 +139,6 @@
             return self._input
         elif name == "_output":
             self._output = codecs.open(self.__filename, 'a', config.charset)
-            try:
-                os.chmod(self.__filename, 0666 & config.umask)
-            except OSError:
-                # TODO: should not ignore errors like this!
-                pass
             return self._output
         else:
             raise AttributeError(name)
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/MoinMoin/macro/GetText2.py	Sun Aug 20 02:05:35 2006 +0200
@@ -0,0 +1,32 @@
+# -*- coding: iso-8859-1 -*-
+"""
+    MoinMoin - Load I18N Text and substitute data.
+
+    This macro has the main purpose of being used by extensions that write
+    data to wiki pages but want to ensure that it is properly translated.
+
+    @copyright: 2006 by MoinMoin:AlexanderSchremmer
+    @license: GNU GPL, see COPYING for details.
+"""
+
+from MoinMoin import wikiutil
+from MoinMoin.packages import unpackLine
+
+Dependencies = ["language"]
+
+def execute(macro, args):
+    """ args consists of a character specifiying the separator and then a
+    packLine sequence describing a list. The first element of it is the message
+    and the remaining elements are substituted in the message using string
+    substitution.
+    """
+    sep = args[0]
+    args = unpackLine(args[1:], sep)
+    if args:
+        translation = macro.request.getText(args[0], formatted=False)
+    else:
+        translation = u""
+    message = translation % tuple(args[1:])
+
+    return macro.formatter.text(message)
+
--- a/MoinMoin/macro/MonthCalendar.py	Sun Aug 20 00:14:25 2006 +0200
+++ b/MoinMoin/macro/MonthCalendar.py	Sun Aug 20 02:05:35 2006 +0200
@@ -449,7 +449,7 @@
 // -->
 </script>
 %s
-""" % (request.cfg.url_prefix, "\n".join(maketip_js), restable)
+""" % (request.cfg.url_prefix_static, "\n".join(maketip_js), restable)
     return formatter.rawHTML(result)
 
 # EOF
--- a/MoinMoin/mail/mailimport.py	Sun Aug 20 00:14:25 2006 +0200
+++ b/MoinMoin/mail/mailimport.py	Sun Aug 20 02:05:35 2006 +0200
@@ -269,6 +269,10 @@
             elif table_ends is not None and not line.startswith("||"):
                 break
 
+        # in order to let the gettext system recognise the [[GetText]] calls used below,
+        # we must repeat them here:
+        [_("From"), _("To"), _("Content"), _("Date"), _("Attachments")]
+
         table_header = (u"\n\n## mail_overview (don't delete this line)\n" +
                         u"|| '''[[GetText(From)]] ''' || '''[[GetText(To)]] ''' || '''[[GetText(Content)]] ''' || '''[[GetText(Date)]] ''' || '''[[GetText(Attachments)]] ''' ||\n"
                        )
--- a/MoinMoin/packages.py	Sun Aug 20 00:14:25 2006 +0200
+++ b/MoinMoin/packages.py	Sun Aug 20 02:05:35 2006 +0200
@@ -255,16 +255,13 @@
         revstr = '%08d' % 1
         if not os.path.exists(revdir):
             os.mkdir(revdir)
-            os.chmod(revdir, 0777 & config.umask)
 
         currentf = open(cfn, 'w')
         currentf.write(revstr + "\n")
         currentf.close()
-        os.chmod(cfn, 0666 & config.umask)
 
         pagefile = os.path.join(revdir, revstr)
         self._extractToFile(filename, pagefile)
-        os.chmod(pagefile, 0666 & config.umask)
 
         # Clear caches
         try:
--- a/MoinMoin/request/__init__.py	Sun Aug 20 00:14:25 2006 +0200
+++ b/MoinMoin/request/__init__.py	Sun Aug 20 02:05:35 2006 +0200
@@ -18,6 +18,13 @@
 from MoinMoin import config, wikiutil, user, caching, error
 from MoinMoin.util import IsWin9x
 
+# umask setting --------------------------------------------------------
+# We do this once per Python process, when request is imported:
+try:
+    # we need to use a bitwise inverted value of config.umask
+    os.umask(0777 ^ config.umask)
+except: # we are on win32
+    pass
 
 # Exceptions -----------------------------------------------------------
 
--- a/MoinMoin/script/export/dump.py	Sun Aug 20 00:14:25 2006 +0200
+++ b/MoinMoin/script/export/dump.py	Sun Aug 20 02:05:35 2006 +0200
@@ -17,7 +17,7 @@
 from MoinMoin import script
 from MoinMoin.action import AttachFile
 
-url_prefix = "."
+url_prefix_static = "."
 logo_html = '<img src="logo.png">'
 HTML_SUFFIX = ".html"
 
@@ -108,8 +108,8 @@
         self.init_request()
         request = self.request
 
-        # fix url_prefix so we get relative paths in output html
-        request.cfg.url_prefix = url_prefix
+        # fix url_prefix_static so we get relative paths in output html
+        request.cfg.url_prefix_static = url_prefix_static
 
         if self.options.page:
             pages = [self.options.page]
--- a/MoinMoin/server/standalone.py	Sun Aug 20 00:14:25 2006 +0200
+++ b/MoinMoin/server/standalone.py	Sun Aug 20 02:05:35 2006 +0200
@@ -267,7 +267,7 @@
 class MoinRequestHandler(SimpleHTTPServer.SimpleHTTPRequestHandler):
 
     bufferSize = 8 * 1024 # used to serve static files
-    staticExpire = 7 * 24 * 3600 # 1 week expiry for static files
+    staticExpire = 365 * 24 * 3600 # 1 year expiry for static files
 
     def __init__(self, request, client_address, server):
         self.server_version = "MoinMoin %s %s" % (version.revision,
@@ -287,14 +287,15 @@
         """ Handle requests (request type GET/HEAD/POST is in self.command)
 
         Separate between wiki pages and css and image url by similar
-        system as cgi and twisted, the '/wiki/' url prefix.
+        system as cgi and twisted, the '/staticXXX/' url prefix.
 
-        TODO: should use url_prefix - and not a constant but
+        TODO: should use url_prefix_static - and not a constant but
         request is not available at this time.  Should be fixed by
-        having url_prefix in a server config.
+        having url_prefix_static in a server config.
         """
-        if self.path.startswith('/wiki/'):
-            self.path = self.path[5:]
+        PREFIX = '/moin_static160/'
+        if self.path.startswith(PREFIX): # XXX
+            self.path = self.path[len(PREFIX)-1:]
             self.serve_static_file()
         elif self.path in ['/favicon.ico', '/robots.txt']:
             self.serve_static_file()
--- a/MoinMoin/theme/__init__.py	Sun Aug 20 00:14:25 2006 +0200
+++ b/MoinMoin/theme/__init__.py	Sun Aug 20 02:05:35 2006 +0200
@@ -156,7 +156,7 @@
         @rtype: string
         @return: the image href
         """
-        return "%s/%s/img/%s" % (self.cfg.url_prefix, self.name, img)
+        return "%s/%s/img/%s" % (self.cfg.url_prefix_static, self.name, img)
 
     def emit_custom_html(self, html):
         """
@@ -592,7 +592,7 @@
 
         # Create stylesheets links
         html = []
-        prefix = self.cfg.url_prefix
+        prefix = self.cfg.url_prefix_static
         csshref = '%s/%s/css' % (prefix, self.name)
         for media, basename in stylesheets:
             href = '%s/%s.css' % (csshref, basename)
@@ -883,7 +883,7 @@
 
     def externalScript(self, name):
         """ Format external script html """
-        src = '%s/common/js/%s.js' % (self.request.cfg.url_prefix, name)
+        src = '%s/common/js/%s.js' % (self.request.cfg.url_prefix_static, name)
         return '<script type="text/javascript" src="%s"></script>' % src
 
     def credits(self, d, **keywords):
--- a/MoinMoin/user.py	Sun Aug 20 00:14:25 2006 +0200
+++ b/MoinMoin/user.py	Sun Aug 20 02:05:35 2006 +0200
@@ -525,11 +525,6 @@
                 data.write(line)
         data.close()
 
-        try:
-            os.chmod(self.__filename(), 0666 & config.umask)
-        except OSError:
-            pass
-
         if not self.disabled:
             self.valid = 1
 
@@ -580,10 +575,6 @@
             bmfile = open(bm_fn, "w")
             bmfile.write(str(tm)+"\n")
             bmfile.close()
-            try:
-                os.chmod(bm_fn, 0666 & config.umask)
-            except OSError:
-                pass
 
     def getBookmark(self):
         """ Get bookmark timestamp.
@@ -869,12 +860,6 @@
                 file.write(data)
             finally:
                 file.close()
-
-            try:
-                os.chmod(path, 0666 & config.umask)
-            except OSError, err:
-                self._request.log("Can't change mode of trail file: %s" %
-                                  str(err))
         except (IOError, OSError), err:
             self._request.log("Can't save trail file: %s" % str(err))
 
--- a/MoinMoin/util/filesys.py	Sun Aug 20 00:14:25 2006 +0200
+++ b/MoinMoin/util/filesys.py	Sun Aug 20 02:05:35 2006 +0200
@@ -15,8 +15,12 @@
 #############################################################################
 
 def chmod(name, mode, catchexception=True):
+    """ change mode of some file/dir on platforms that support it.
+        usually you don't need this because we use os.umask() when importing
+        request.py
+    """
     try:
-        return os.chmod(name, mode)
+        os.chmod(name, mode)
     except OSError:
         if not catchexception:
             raise
@@ -31,6 +35,10 @@
     This is modified version of the os.makedirs from Python 2.4. We add
     explicit chmod call after the mkdir call. Fixes some practical
     permission problems on Linux.
+
+    TODO: we use os.umask() now so we usually don't need explicit chmod any
+          more. Check all callers os makedirs/makeDirs and also py2.3+
+          stdlib implementation and maybe remove this function here.
     """
     head, tail = os.path.split(name)
     if not tail:
@@ -86,7 +94,7 @@
             st = os.stat(src)
             mode = S_IMODE(st[ST_MODE])
             if hasattr(os, 'chmod'):
-                os.chmod(dst, mode)
+                os.chmod(dst, mode) # KEEP THIS ONE!
         #else: pass # we are on Win9x,ME - no chmod here
     else:
         shutil.copystat(src, dst)
--- a/MoinMoin/wikisync.py	Sun Aug 20 00:14:25 2006 +0200
+++ b/MoinMoin/wikisync.py	Sun Aug 20 02:05:35 2006 +0200
@@ -18,6 +18,7 @@
 from MoinMoin import wikiutil
 from MoinMoin.util import lock
 from MoinMoin.Page import Page
+from MoinMoin.PageEditor import PageEditor
 from MoinMoin.packages import unpackLine, packLine
 
 
@@ -64,9 +65,10 @@
         self.remote_deleted = remote_deleted
         self.local_mime_type = MIMETYPE_MOIN   # XXX no usable storage API yet
         self.remote_mime_type = MIMETYPE_MOIN
+        assert remote_rev != 99999999
 
     def __repr__(self):
-        return repr("<Remote Page %r>" % unicode(self))
+        return repr("<Sync Page %r>" % unicode(self))
 
     def __unicode__(self):
         return u"%s[%s|%s]<%r:%r>" % (self.name, self.local_name, self.remote_name, self.local_rev, self.remote_rev)
@@ -94,7 +96,7 @@
         elif self.remote_name is None:
             n_name = normalise_pagename(self.local_name, local.prefix)
             assert n_name is not None
-            self.remote_name = (local.prefix or "") + n_name
+            self.remote_name = (remote.prefix or "") + n_name
 
         return self # makes using list comps easier
 
@@ -132,26 +134,6 @@
         """ Is true if the page is in both wikis. """
         return self.local_rev and self.remote_rev
 
-    def iter_local_only(cls, sp_list):
-        """ Iterates over all pages that are local only. """
-        for x in sp_list:
-            if x.is_only_local():
-                yield x
-    iter_local_only = classmethod(iter_local_only)
-
-    def iter_remote_only(cls, sp_list):
-        """ Iterates over all pages that are remote only. """
-        for x in sp_list:
-            if x.is_only_remote():
-                yield x
-    iter_remote_only = classmethod(iter_remote_only)
-
-    def iter_local_and_remote(cls, sp_list):
-        """ Iterates over all pages that are local and remote. """
-        for x in sp_list:
-            if x.is_local_and_remote():
-                yield x
-    iter_local_and_remote = classmethod(iter_local_and_remote)
 
 class RemoteWiki(object):
     """ This class should be the base for all implementations of remote wiki
@@ -173,13 +155,18 @@
         """ Returns a list of SyncPage instances. """
         return NotImplemented
 
+    def delete_page(self, pagename):
+        """ Deletes the page called pagename. """
+        return NotImplemented
+
 
 class MoinRemoteWiki(RemoteWiki):
     """ Used for MoinMoin wikis reachable via XMLRPC. """
-    def __init__(self, request, interwikiname, prefix, pagelist):
+    def __init__(self, request, interwikiname, prefix, pagelist, verbose=False):
         self.request = request
         self.prefix = prefix
         self.pagelist = pagelist
+        self.verbose = verbose
         _ = self.request.getText
 
         wikitag, wikiurl, wikitail, wikitag_bad = wikiutil.resolve_wiki(self.request, '%s:""' % (interwikiname, ))
@@ -211,7 +198,7 @@
             self.iwid_full = packLine([remote_iwid, interwikiname])
 
     def createConnection(self):
-        return xmlrpclib.ServerProxy(self.xmlrpc_url, allow_none=True, verbose=True)
+        return xmlrpclib.ServerProxy(self.xmlrpc_url, allow_none=True, verbose=self.verbose)
 
     # Public methods
     def get_diff(self, pagename, from_rev, to_rev, n_name=None):
@@ -231,6 +218,9 @@
         result = self.connection.mergeDiff(pagename, xmlrpclib.Binary(diff), local_rev, delta_remote_rev, last_remote_rev, interwiki_name, n_name)
         return result
 
+    def delete_page(self, pagename):
+        return # XXX not implemented yet
+
     # Methods implementing the RemoteWiki interface
     def get_interwiki_name(self):
         return self.remote_interwikiname
@@ -252,7 +242,10 @@
             normalised_name = normalise_pagename(name, self.prefix)
             if normalised_name is None:
                 continue
-            rpages.append(SyncPage(normalised_name, remote_rev=abs(revno), remote_name=name, remote_deleted=revno < 0))
+            if abs(revno) != 99999999: # I love sane in-band signalling
+                remote_rev = abs(revno)
+                remote_deleted = revno < 0
+                rpages.append(SyncPage(normalised_name, remote_rev=remote_rev, remote_name=name, remote_deleted=remote_deleted))
         return rpages
 
     def __repr__(self):
@@ -275,16 +268,27 @@
 
     def createSyncPage(self, page_name):
         normalised_name = normalise_pagename(page_name, self.prefix)
+        if normalised_name is None:
+            return None
         if not self.request.user.may.write(normalised_name):
             return None
-        if normalised_name is None:
+        page = Page(self.request, page_name)
+        revno = page.get_real_rev()
+        if revno == 99999999: # I love sane in-band signalling
             return None
-        page = Page(self.request, page_name)
-        return SyncPage(normalised_name, local_rev=page.get_real_rev(), local_name=page_name, local_deleted=not page.exists())
+        return SyncPage(normalised_name, local_rev=revno, local_name=page_name, local_deleted=not page.exists())
 
     # Public methods:
 
     # Methods implementing the RemoteWiki interface
+    def delete_page(self, page_name, comment):
+        page = PageEditor(self.request, page_name)
+        try:
+            page.deletePage(comment)
+        except PageEditor.AccessDenied, (msg, ):
+            return msg
+        return ""
+
     def get_interwiki_name(self):
         return self.request.cfg.interwikiname
 
@@ -338,7 +342,7 @@
         self.normalised_name = normalised_name
 
     def __repr__(self):
-        return u"<Tag normalised_pagename=%r remote_wiki=%r remote_rev=%r current_rev=%r>" % (self.normalised_name, self.remote_wiki, self.remote_rev, self.current_rev)
+        return u"<Tag normalised_pagename=%r remote_wiki=%r remote_rev=%r current_rev=%r>" % (getattr(self, "normalised_name", "UNDEF"), self.remote_wiki, self.remote_rev, self.current_rev)
 
     def __cmp__(self, other):
         if not isinstance(other, Tag):
--- a/MoinMoin/wikiutil.py	Sun Aug 20 00:14:25 2006 +0200
+++ b/MoinMoin/wikiutil.py	Sun Aug 20 02:05:35 2006 +0200
@@ -461,7 +461,6 @@
         metafile = codecs.open(self.metafilename, "w", "utf-8")
         metafile.write(meta)
         metafile.close()
-        filesys.chmod(self.metafilename, 0666 & config.umask)
         self.dirty = False
 
     def sync(self, mtime_usecs=None):
--- a/MoinMoin/xmlrpc/__init__.py	Sun Aug 20 00:14:25 2006 +0200
+++ b/MoinMoin/xmlrpc/__init__.py	Sun Aug 20 02:05:35 2006 +0200
@@ -706,11 +706,12 @@
         """
         from MoinMoin.util.bdiff import decompress, patch
         from MoinMoin.wikisync import TagStore, BOTH
+        from MoinMoin.packages import unpackLine
         LASTREV_INVALID = xmlrpclib.Fault("LASTREV_INVALID", "The page was changed")
 
         pagename = self._instr(pagename)
 
-        comment = u"Remote Merge - %r" % interwiki_name
+        comment = u"Remote Merge - %r" % unpackLine(interwiki_name)[-1]
         
         # User may read page?
         if not self.request.user.may.read(pagename) or not self.request.user.may.write(pagename):
@@ -721,14 +722,14 @@
         # current version of the page
         currentpage = PageEditor(self.request, pagename, do_editor_backup=0)
 
-        if currentpage.get_real_rev() != last_remote_rev:
+        if last_remote_rev is not None and currentpage.get_real_rev() != last_remote_rev:
             return LASTREV_INVALID
 
         if not currentpage.exists() and diff is None:
             return xmlrpclib.Fault("NOT_EXIST", "The page does not exist and no diff was supplied.")
 
         # base revision used for the diff
-        basepage = Page(self.request, pagename, rev=delta_remote_rev)
+        basepage = Page(self.request, pagename, rev=(delta_remote_rev or 0))
 
         # generate the new page revision by applying the diff
         newcontents = patch(basepage.get_raw_body_str(), decompress(str(diff)))
@@ -736,7 +737,7 @@
 
         # write page
         try:
-            currentpage.saveText(newcontents.decode("utf-8"), last_remote_rev, comment=comment)
+            currentpage.saveText(newcontents.decode("utf-8"), last_remote_rev or 0, comment=comment)
         except PageEditor.Unchanged: # could happen in case of both wiki's pages being equal
             pass
         except PageEditor.EditConflict:
@@ -822,7 +823,6 @@
         if os.path.exists(filename) and not os.path.isfile(filename):
             return self.noSuchPageFault()
         open(filename, 'wb+').write(data.data)
-        os.chmod(filename, 0666 & config.umask)
         AttachFile._addLogEntry(self.request, 'ATTNEW', pagename, filename)
         return xmlrpclib.Boolean(1)
 
--- a/docs/CHANGES	Sun Aug 20 00:14:25 2006 +0200
+++ b/docs/CHANGES	Sun Aug 20 02:05:35 2006 +0200
@@ -137,6 +137,7 @@
     * setResponseCode request method DEPRECATED (it only worked for Twisted
       anyway), just use emit_http_headers and include a Status: XXX header.
       Method will vanish with moin 1.7. 
+    * cfg.url_prefix is DEPRECATED, please use cfg.url_prefix_static.
 
   New Features:
     * Removed "underscore in URL" == "blank in pagename magic" - it made more
@@ -220,6 +221,15 @@
       need to use "from MoinMoin.config.multiconfig import DefaultConfig" now.
       You need to change this in your wikiconfig.py or farmconfig.py file.
       See MoinMoin/multiconfig.py for an alternative way if you can't do that.
+    * HINT: you need to change your url_prefix setting in 2 ways:
+      1. The setting is now called url_prefix_static (to make it more clear
+         that we mean the static stuff, not the wiki script url).
+      2. The strongly recommended (and default) value of it is '/moin_static160'
+         for moin version 1.6.0 (and will be ...161 for moin 1.6.1). We use a
+         very long cache lifetime for static stuff now, so it is required to
+         change the URL of static stuff when the static stuff changes (e.g. on
+         a version upgrade of moin) to avoid problems with stale cache content.
+         Your moin will be faster with lower load and traffic because of this.
 
 Version 1.5-current:
    * moin.fcg improved - if you use FastCGI, you must use the new file:
@@ -228,6 +238,9 @@
      * the count of created threads is limited now (default: 5), you can use 1
        to use non-threaded operation.
      * configurable socket.listen() backlog (default: 5)
+  * tuning:
+    * more efficient locking code on POSIX platforms, we do much less I/O there now
+    * removed most chmod calls in favour of a single os.umask call
   
 Version 1.5.4:
     HINT: read docs/README.migration.
--- a/docs/CHANGES.aschremmer	Sun Aug 20 00:14:25 2006 +0200
+++ b/docs/CHANGES.aschremmer	Sun Aug 20 02:05:35 2006 +0200
@@ -2,20 +2,21 @@
 ===============================
 
   Known main issues:
-    * How to handle renames/deletes?
+    * Do I need to tag delete operations?
+    * How to handle renames?
     * How will we store tags? (Metadata support would be handy)
       (currently done in Pickle files)
 
   ToDo:
-    * Implement all syncronisation cases (all 3 page sets).
+    * Delete remote pages.
     * Reduce round-trip times by caching queries and using MultiCall objects (coroutines?)
-    * Attach the status information to the job page.
     * Check what needs to be documented on MoinMaster.
 
     * Show tags in an action=info view?
 
     * Test with prefixes
     * Search for XXX
+    * Delete iters?
     * Maybe refactor YYY into MoinLocalWiki
     * Remove amount of "very" in the code
     * Clean up trailing whitespace.
@@ -26,6 +27,7 @@
     * Do older tags of one wiki site have to be stored as well? Why don't we
       keep just one tag?
     * Put author names into the comment field, transmit mimetypes.
+    * Implement renamed pages.
 
   New Features:
     * XMLRPC method to return the Moin version
@@ -44,6 +46,7 @@
     * SyncPages action
     * XMLRPC functions may return Fault instances
     * diff3 algorithm extended, a new mode should reduce the conflicts
+    * GetText2 macro
 
   Bugfixes (only stuff that is buggy in moin/1.6 main branch):
     * Conflict resolution fixes. (merged into main)
@@ -114,6 +117,8 @@
          normalised name. Implemented preliminary mime type support, only transmission of the mime type
          and the new storage API is missing. Changed getDiff interface. Detect renamed pages (and choke :).
          Added infrastructure support for detecting deleted pages (not used in the merging logic yet).
+         Reworked i18n and logging support. Added GetText2 macro. Getting syncing of pages
+         working that are just available on one side. Working synchronisation of deleted pages.
 
 2006-07-18: the requested daily entry is missing here, see http://moinmoin.wikiwikiweb.de/GoogleSoc2006/BetterProgress
 2006-07-19: the requested daily entry is missing here, see http://moinmoin.wikiwikiweb.de/GoogleSoc2006/BetterProgress