changeset 1304:f103cf7c371e

Detect renamed pages. Changed the getDiff interface.
author Alexander Schremmer <alex AT alexanderweb DOT de>
date Thu, 17 Aug 2006 00:00:57 +0200
parents a0b8e78621d0
children 1b4b1e9aaa4c
files MoinMoin/action/SyncPages.py MoinMoin/wikisync.py MoinMoin/xmlrpc/__init__.py docs/CHANGES.aschremmer
diffstat 4 files changed, 96 insertions(+), 25 deletions(-) [+]
line wrap: on
line diff
--- a/MoinMoin/action/SyncPages.py	Wed Aug 16 22:09:22 2006 +0200
+++ b/MoinMoin/action/SyncPages.py	Thu Aug 17 00:00:57 2006 +0200
@@ -133,6 +133,7 @@
     def sync(self, params, local, remote):
         """ This method does the syncronisation work.
             Currently, it handles the case where the pages exist on both sides.
+            One of the major missing parts is rename handling.
             Now there are a few other cases left that have to be implemented:
                 Wiki A    | Wiki B   | Remark
                 ----------+----------+------------------------------
@@ -159,8 +160,8 @@
                 exists    | any case | Try a rename search first, then
                           |          | do a sync without considering tags
                 with tags | with non | to ensure data integrity.
-                          | matching |
-                          | tags     |
+                          | matching | Hmm, how do we detect this
+                          | tags     | case if the unmatching tags are only on the remote side?
                 ----------+----------+-------------------------------
                 exists    | exists   | already handled.
         """
@@ -213,24 +214,31 @@
             matching_tags.sort()
             #print "------ TAGS: " + repr(matching_tags) + repr(tags.tags)
 
-            if not matching_tags:
-                remote_rev = None
-                local_rev = rp.local_rev # merge against the newest version
-                old_contents = ""
-            else:
+            # some default values for non matching tags
+            normalised_name = None
+            remote_rev = None
+            local_rev = rp.local_rev # merge against the newest version
+            old_contents = ""
+
+            if matching_tags:
                 newest_tag = matching_tags[-1]
-                # XXX check the tag.normalised_name here
-                local_rev = newest_tag.current_rev
-                remote_rev = newest_tag.remote_rev
-                if remote_rev == rp.remote_rev and (direction == DOWN or local_rev == current_rev):
+                
+                # handle some cases where we cannot continue for this page
+                if newest_tag.remote_rev == rp.remote_rev and (direction == DOWN or newest_tag.current_rev == current_rev):
                     continue # no changes done, next page
-                if rp.local_mime_type != MIMETYPE_MOIN and not (remote_rev == rp.remote_rev ^ local_rev == current_rev):
+                if rp.local_mime_type != MIMETYPE_MOIN and not (newest_tag.remote_rev == rp.remote_rev ^ newest_tag.current_rev == current_rev):
                     self.log_status(ActionClass.WARN, _("The item %(pagename)s cannot be merged but was changed in both wikis. Please delete it in one of both wikis and try again.") % {"pagename": rp.name})
                     continue
                 if rp.local_mime_type != rp.remote_mime_type:
                     self.log_status(ActionClass.WARN, _("The item %(pagename)s has different mime types in both wikis and cannot be merged. Please delete it in one of both wikis or unify the mime type, and try again.") % {"pagename": rp.name})
                     continue
-                old_contents = Page(self.request, local_pagename, rev=local_rev).get_raw_body_str() # YYY direct access
+                if newest_tag.normalised_name != rp.name:
+                    self.log_status(ActionClass.WARN, _("The item %(pagename)s was renamed locally. This is not implemented yet. Therefore all syncronisation history is lost for this page.") % {"pagename": rp.name}) # XXX implement renames
+                else:
+                    normalised_name = newest_tag.normalised_name
+                    local_rev = newest_tag.current_rev
+                    remote_rev = newest_tag.remote_rev
+                    old_contents = Page(self.request, local_pagename, rev=newest_tag.current_rev).get_raw_body_str() # YYY direct access
 
             self.log_status(ActionClass.INFO, _("Synchronising page %(pagename)s with remote page %(remotepagename)s ...") % {"pagename": local_pagename, "remotepagename": rp.remote_name})
 
@@ -241,7 +249,10 @@
                 patch_base_contents = old_contents
 
             if remote_rev != rp.remote_rev:
-                diff_result = remote.get_diff(rp.remote_name, remote_rev, None) # XXX might raise ALREADY_CURRENT
+                diff_result = remote.get_diff(rp.remote_name, remote_rev, None, normalised_name)
+                if diff_result is None:
+                    self.log_status(ActionClass.ERROR, _("The page %(pagename)s could not be synced. The remote page was renamed. This is not supported yet. You may want to delete one of the pages to get it synced.") % {"pagename": rp.remote_name})
+                    continue
                 is_remote_conflict = diff_result["conflict"]
                 assert diff_result["diffversion"] == 1
                 diff = diff_result["diff"]
--- a/MoinMoin/wikisync.py	Wed Aug 16 22:09:22 2006 +0200
+++ b/MoinMoin/wikisync.py	Thu Aug 17 00:00:57 2006 +0200
@@ -210,10 +210,15 @@
         return xmlrpclib.ServerProxy(self.xmlrpc_url, allow_none=True, verbose=True)
 
     # Public methods
-    def get_diff(self, pagename, from_rev, to_rev):
+    def get_diff(self, pagename, from_rev, to_rev, n_name=None):
         """ Returns the binary diff of the remote page named pagename, given
             from_rev and to_rev. """
-        result = self.connection.getDiff(pagename, from_rev, to_rev)
+        try:
+            result = self.connection.getDiff(pagename, from_rev, to_rev, n_name)
+        except xmlrpclib.Fault, e:
+            if e.faultCode == "INVALID_TAG":
+                return None
+            raise
         result["diff"] = str(result["diff"]) # unmarshal Binary object
         return result
 
@@ -356,7 +361,11 @@
     def get_all_tags(self):
         """ Returns a list of all Tag objects associated to this page. """
         return NotImplemented
-    
+
+    def get_last_tag(self):
+        """ Returns the newest tag. """
+        return NotImplemented
+
     def clear(self):
         """ Removes all tags. """
         return NotImplemented
@@ -416,7 +425,14 @@
             self.wlock.release()
 
     def get_all_tags(self):
-        return self.tags
+        return self.tags[:]
+
+    def get_last_tag(self):
+        temp = self.tags[:]
+        temp.sort()
+        if not temp:
+            return None
+        return temp[-1]
 
     def clear(self):
         self.tags = []
--- a/MoinMoin/xmlrpc/__init__.py	Wed Aug 16 22:09:22 2006 +0200
+++ b/MoinMoin/xmlrpc/__init__.py	Thu Aug 17 00:00:57 2006 +0200
@@ -577,11 +577,46 @@
 
     # methods for wiki synchronization
 
-    def xmlrpc_getDiff(self, pagename, from_rev, to_rev):
-        """ Gets the binary difference between two page revisions. See MoinMoin:WikiSyncronisation. """
+    def xmlrpc_getDiff(self, pagename, from_rev, to_rev, n_name=None):
+        """ Gets the binary difference between two page revisions.
+
+            @param pagename: unicode string qualifying the page name
+
+            @param fromRev: integer specifying the source revision. May be None to
+            refer to a virtual empty revision which leads to a diff
+            containing the whole page.
+
+            @param toRev: integer specifying the target revision. May be None to
+            refer to the current revision. If the current revision is the same
+            as fromRev, there will be a special error condition "ALREADY_CURRENT"
+
+            @param n_name: do a tag check verifying that n_name was the normalised
+            name of the last tag
+
+            If both fromRev and toRev are None, this function acts similar to getPage, i.e. it will diff("",currentRev).
+
+            @return Returns a dict:
+            * status (not a field, implicit, returned as Fault if not SUCCESS):
+             * "SUCCESS" - if the diff could be retrieved successfully
+             * "NOT_EXIST" - item does not exist
+             * "FROMREV_INVALID" - the source revision is invalid
+             * "TOREV_INVALID" - the target revision is invalid
+             * "INTERNAL_ERROR" - there was an internal error
+             * "INVALID_TAG" - the last tag does not match the supplied normalised name
+             * "ALREADY_CURRENT" - this not merely an error condition. It rather means that
+             there is no new revision to diff against which is a good thing while
+             synchronisation.
+            * current: the revision number of the current revision (not the one which was diff'ed against)
+            * diff: Binary object that transports a zlib-compressed binary diff (see bdiff.py, taken from Mercurial)
+            * conflict: if there is a conflict on the page currently
+
+        """
         from MoinMoin.util.bdiff import textdiff, compress
+        from MoinMoin.wikisync import TagStore
 
         pagename = self._instr(pagename)
+        if n_name is not None:
+            n_name = self._instr(n_name)
 
         # User may read page?
         if not self.request.user.may.read(pagename):
@@ -627,6 +662,12 @@
         if oldcontents() and oldpage.get_real_rev() == newpage.get_real_rev():
             return xmlrpclib.Fault("ALREADY_CURRENT", "There are no changes.")
 
+        if n_name is not None:
+            tags = TagStore(newpage)
+            last_tag = tags.get_last_tag()
+            if last_tag is not None and last_tag.normalised_name != n_name:
+                return xmlrpclib.Fault("INVALID_TAG", "The used tag is incorrect because the normalised name does not match.")
+
         newcontents = newcontents()
         conflict = wikiutil.containsConflictMarker(newcontents)
         diffblob = xmlrpclib.Binary(compress(textdiff(oldcontents(), newcontents)))
--- a/docs/CHANGES.aschremmer	Wed Aug 16 22:09:22 2006 +0200
+++ b/docs/CHANGES.aschremmer	Thu Aug 17 00:00:57 2006 +0200
@@ -9,17 +9,20 @@
   ToDo:
     * Tags should store the page name to recognise renaming scenarios.
     * Implement all syncronisation cases (all 3 page sets).
-    * Test with prefixes
     * Reduce round-trip times by caching queries and using MultiCall objects (coroutines?)
     * Attach the status information to the job page.
+    * Check what needs to be documented on MoinMaster.
+
     * Show tags in an action=info view?
-    * Check what needs to be documented on MoinMaster.
+
+    * Test with prefixes
     * Search for XXX
     * Maybe refactor YYY into MoinLocalWiki
     * Remove amount of "very" in the code
+    * Clean up trailing whitespace.
+
     * Implement a cross-site authentication system, i.e. mainly an
       identity storage. (does OpenID make sense?)
-    * Clean up trailing whitespace.
     * Add page locking, i.e. use the one in the new storage layer.
     * Do older tags of one wiki site have to be stored as well? Why don't we
       keep just one tag?
@@ -106,9 +109,9 @@
          conflicts. Fixed hard to track down bugs in SyncPages. Store full IWIDs and match by
          either of both components when searching for tags. Ignore underlay pages. Filter the remote page list by
          the prefix and the pageList on the remote side. Finished the direction==DOWN mode.
-         Started designing the solutions for the other sync cases. Store and transmit the
+Week 33: Started designing the solutions for the other sync cases. Store and transmit the
          normalised name. Implemented preliminary mime type support, only transmission of the mime type
-         and the new storage API is missing.
+         and the new storage API is missing. Changed getDiff interface. Detect renamed pages (and choke :).
 
 2006-07-18: the requested daily entry is missing here, see http://moinmoin.wikiwikiweb.de/GoogleSoc2006/BetterProgress
 2006-07-19: the requested daily entry is missing here, see http://moinmoin.wikiwikiweb.de/GoogleSoc2006/BetterProgress