changeset 1399:4f591cf4f1c6

merge main
author Thomas Waldmann <tw AT waldmann-edv DOT de>
date Mon, 21 Aug 2006 21:09:46 +0200
parents fe8facfcb439 (current diff) b7ca51e9be1e (diff)
children a95414cd1280
files
diffstat 6 files changed, 160 insertions(+), 78 deletions(-) [+]
line wrap: on
line diff
--- a/MoinMoin/action/SyncPages.py	Mon Aug 21 21:07:36 2006 +0200
+++ b/MoinMoin/action/SyncPages.py	Mon Aug 21 21:09:46 2006 +0200
@@ -24,13 +24,13 @@
 from MoinMoin.PageEditor import PageEditor, conflict_markers
 from MoinMoin.Page import Page
 from MoinMoin.wikidicts import Dict, Group
-from MoinMoin.wikisync import TagStore, UnsupportedWikiException, SyncPage
+from MoinMoin.wikisync import TagStore, UnsupportedWikiException, SyncPage, NotAllowedException
 from MoinMoin.wikisync import MoinLocalWiki, MoinRemoteWiki, UP, DOWN, BOTH, MIMETYPE_MOIN
 from MoinMoin.util.bdiff import decompress, patch, compress, textdiff
 from MoinMoin.util import diff3
 
 
-debug = True
+debug = False
 
 
 # map sync directions
@@ -59,10 +59,17 @@
         table = []
 
         for line in self.status:
-            macro_args = [line[1]] + list(line[2])
-            table.append(table_line % {"smiley": line[0][1], "message":
-                line[1] and (u"[[GetText2(|%s)]]" % (packLine(macro_args), )),
-                "raw_suffix": line[3]})
+            if line[1]:
+                if line[2]:
+                    macro_args = [line[1]] + list(line[2])
+                    message = u"[[GetText2(|%s)]]" % (packLine(macro_args), )
+                else:
+                    message = u"[[GetText(%s)]]" % (line[1], )
+            else:
+                message = u""
+            table.append(table_line % {"smiley": line[0][1],
+                                       "message": message,
+                                       "raw_suffix": line[3]})
 
         return "\n".join(table)
 
@@ -76,6 +83,8 @@
             "pageList": None,
             "groupList": None,
             "direction": "foo", # is defaulted below
+            "user": None,     # this should be refactored into a password agent
+            "password": None, # or OpenID like solution (XXX)
         }
 
         options.update(Dict(self.request, self.pagename).get_dict())
@@ -117,7 +126,6 @@
 
         params = self.fix_params(self.parse_page())
 
-        # XXX aquire readlock on self.page
         try:
             if params["direction"] == UP:
                 raise ActionStatus(_("The only supported directions are BOTH and DOWN."))
@@ -130,21 +138,27 @@
 
             local = MoinLocalWiki(self.request, params["localPrefix"], params["pageList"])
             try:
-                remote = MoinRemoteWiki(self.request, params["remoteWiki"], params["remotePrefix"], params["pageList"], verbose=debug)
-            except UnsupportedWikiException, (msg, ):
+                remote = MoinRemoteWiki(self.request, params["remoteWiki"], params["remotePrefix"], params["pageList"], params["user"], params["password"], verbose=debug)
+            except (UnsupportedWikiException, NotAllowedException), (msg, ):
                 raise ActionStatus(msg)
 
             if not remote.valid:
                 raise ActionStatus(_("The ''remoteWiki'' is unknown."))
-
-            self.sync(params, local, remote)
         except ActionStatus, e:
             msg = u'<p class="error">%s</p>\n' % (e.args[0], )
-        else:
-            msg = u"%s" % (_("Syncronisation finished. Look below for the status messages."), )
 
-        self.page.saveText(self.page.get_raw_body() + "\n\n" + self.generate_log_table(), 0)
-        # XXX release readlock on self.page
+        try:
+            try:
+                self.sync(params, local, remote)
+            except Exception, e:
+                self.log_status(self.ERROR, _("A severe error occured:"), raw_suffix=repr(e))
+                raise
+            else:
+                msg = u"%s" % (_("Syncronisation finished. Look below for the status messages."), )
+        finally:
+            # XXX aquire readlock on self.page
+            self.page.saveText(self.page.get_raw_body() + "\n\n" + self.generate_log_table(), 0)
+            # XXX release readlock on self.page
 
         self.page.send_page(self.request, msg=msg)
 
@@ -307,7 +321,7 @@
                 return
 
             if remote_rev is None and direction == BOTH:
-                self.log_status(ActionClass.INFO, _("This is the first synchronisation between this page and the remote wiki."))
+                self.log_status(ActionClass.INFO, _("This is the first synchronisation between the local and the remote wiki for the page %s."), (sp.name, ))
 
             if sp.remote_deleted:
                 remote_contents = ""
@@ -319,9 +333,9 @@
             if sp.local_mime_type == MIMETYPE_MOIN:
                 remote_contents_unicode = remote_contents.decode("utf-8")
                 # here, the actual 3-way merge happens
+                merged_text = diff3.text_merge(old_contents.decode("utf-8"), remote_contents_unicode, current_page.get_raw_body(), 1, *conflict_markers) # YYY direct access
                 if debug:
-                    self.log_status(ActionClass.INFO, raw_suffix="Merging %r, %r and %r" % (old_contents.decode("utf-8"), remote_contents_unicode, current_page.get_raw_body()))
-                merged_text = diff3.text_merge(old_contents.decode("utf-8"), remote_contents_unicode, current_page.get_raw_body(), 2, *conflict_markers) # YYY direct access
+                    self.log_status(ActionClass.INFO, raw_suffix="Merging %r, %r and %r into %r" % (old_contents.decode("utf-8"), remote_contents_unicode, current_page.get_raw_body(), merged_text))
                 merged_text_raw = merged_text.encode("utf-8")
             else:
                 if diff is None:
@@ -335,28 +349,44 @@
 
             # XXX upgrade to write lock
             try:
-                current_page.saveText(merged_text, sp.local_rev, comment=comment) # YYY direct access
+                local_change_done = True
+                current_page.saveText(merged_text, sp.local_rev or 0, comment=comment) # YYY direct access
             except PageEditor.Unchanged:
-                pass
+                local_change_done = False
             except PageEditor.EditConflict:
+                local_change_done = False
                 assert False, "You stumbled on a problem with the current storage system - I cannot lock pages"
 
             new_local_rev = current_page.get_real_rev() # YYY direct access
 
-            if direction == BOTH:
-                try:
-                    very_current_remote_rev = remote.merge_diff(sp.remote_name, compress(diff), new_local_rev, current_remote_rev, current_remote_rev, local_full_iwid, sp.name)
-                except Exception, e:
-                    raise # XXX rollback locally and do not tag locally
-            else:
-                very_current_remote_rev = current_remote_rev
+            def rollback_local_change(): # YYY direct local access
+                rev = new_local_rev - 1
+                revstr = '%08d' % rev
+                oldpg = Page(self.request, sp.local_name, rev=rev)
+                pg = PageEditor(self.request, sp.local_name)
+                savemsg = pg.saveText(oldpg.get_raw_body(), 0, comment=u"Wikisync rollback", extra=revstr, action="SAVE/REVERT")
+
+            try:
+                if direction == BOTH:
+                    try:
+                        very_current_remote_rev = remote.merge_diff(sp.remote_name, compress(diff), new_local_rev, current_remote_rev, current_remote_rev, local_full_iwid, sp.name)
+                    except NotAllowedException:
+                        self.log_status(ActionClass.ERROR, _("The page %s could not be merged because you are not allowed to modify the page in the remote wiki."), (sp.name, ))
+                        return
+                else:
+                    very_current_remote_rev = current_remote_rev
+
+                local_change_done = False # changes are committed remotely, all is fine
+            finally:
+                if local_change_done:
+                    rollback_local_change()
 
             tags.add(remote_wiki=remote_full_iwid, remote_rev=very_current_remote_rev, current_rev=new_local_rev, direction=direction, normalised_name=sp.name)
 
             if sp.local_mime_type != MIMETYPE_MOIN or not wikiutil.containsConflictMarker(merged_text):
-                self.log_status(ActionClass.INFO, _("Page successfully merged."))
+                self.log_status(ActionClass.INFO, _("Page %s successfully merged."), (sp.name, ))
             else:
-                self.log_status(ActionClass.WARN, _("Page merged with conflicts."))
+                self.log_status(ActionClass.WARN, _("Page %s merged with conflicts."), (sp.name, ))
 
             # XXX release lock
 
--- a/MoinMoin/server/standalone.py	Mon Aug 21 21:07:36 2006 +0200
+++ b/MoinMoin/server/standalone.py	Mon Aug 21 21:09:46 2006 +0200
@@ -575,7 +575,7 @@
             MoinRequestHandler.serve_moin, config.memoryProfile)
 
     if config.logPath:
-        sys.stderr = file(config.logPath, 'at')
+        sys.stderr = file(config.logPath, 'at', 0)
     registerSignalHandlers(quit)
     httpd = makeServer(config)
 
--- a/MoinMoin/util/diff3.py	Mon Aug 21 21:07:36 2006 +0200
+++ b/MoinMoin/util/diff3.py	Mon Aug 21 21:09:46 2006 +0200
@@ -98,7 +98,7 @@
         result.extend(new[new_nr:])
     # other added lines
     elif old_nr == old_len and new_nr == new_len:
-        result.extend(other[other_nr])
+        result.extend(other[other_nr:])
     # new deleted lines
     elif (new_nr == new_len and (old_len - old_nr == other_len - other_nr) and
           match(old, other, old_nr, other_nr, old_len-old_nr) == old_len - old_nr):
--- a/MoinMoin/wikisync.py	Mon Aug 21 21:07:36 2006 +0200
+++ b/MoinMoin/wikisync.py	Mon Aug 21 21:09:46 2006 +0200
@@ -20,6 +20,7 @@
 from MoinMoin.Page import Page
 from MoinMoin.PageEditor import PageEditor
 from MoinMoin.packages import unpackLine, packLine
+from MoinMoin.support.multicall import MultiCall
 
 
 MIMETYPE_MOIN = "text/wiki"
@@ -43,6 +44,9 @@
 class UnsupportedWikiException(Exception): pass
 
 
+class NotAllowedException(Exception): pass
+
+
 class SyncPage(object):
     """ This class represents a page in one or two wiki(s). """
     def __init__(self, name, local_rev=None, remote_rev=None, local_name=None, remote_name=None,
@@ -158,7 +162,7 @@
 
 class MoinRemoteWiki(RemoteWiki):
     """ Used for MoinMoin wikis reachable via XMLRPC. """
-    def __init__(self, request, interwikiname, prefix, pagelist, verbose=False):
+    def __init__(self, request, interwikiname, prefix, pagelist, user, password, verbose=False):
         self.request = request
         self.prefix = prefix
         self.pagelist = pagelist
@@ -178,7 +182,16 @@
         try:
             iw_list = self.connection.interwikiName()
         except xmlrpclib.Fault, e:
-            raise UnsupportedWikiException(_("The remote version of MoinMoin is too old, the version 1.6 is required at least."))
+            raise UnsupportedWikiException(_("The remote version of MoinMoin is too old, version 1.6 is required at least."))
+
+        if user and password:
+            token = self.connection.getAuthToken(user, password)
+            if token:
+                self.token = token
+            else:
+                raise NotAllowedException(_("Invalid username or password."))
+        else:
+            self.token = None
 
         self.remote_interwikiname = remote_interwikiname = iw_list[0]
         self.remote_iwid = remote_iwid = iw_list[1]
@@ -201,7 +214,13 @@
         """ Returns the binary diff of the remote page named pagename, given
             from_rev and to_rev. """
         try:
-            result = self.connection.getDiff(pagename, from_rev, to_rev, n_name)
+            if self.token:
+                m = MultiCall(self.connection)
+                m.applyAuthToken(self.token)
+                m.getDiff(pagename, from_rev, to_rev, n_name)
+                tokres, result = m()
+            else:
+                result = self.connection.getDiff(pagename, from_rev, to_rev, n_name)
         except xmlrpclib.Fault, e:
             if e.faultCode == "INVALID_TAG":
                 return None
@@ -211,12 +230,29 @@
 
     def merge_diff(self, pagename, diff, local_rev, delta_remote_rev, last_remote_rev, interwiki_name, n_name):
         """ Merges the diff into the page on the remote side. """
-        result = self.connection.mergeDiff(pagename, xmlrpclib.Binary(diff), local_rev, delta_remote_rev, last_remote_rev, interwiki_name, n_name)
+        try:
+            if self.token:
+                m = MultiCall(self.connection)
+                m.applyAuthToken(self.token)
+                m.mergeDiff(pagename, xmlrpclib.Binary(diff), local_rev, delta_remote_rev, last_remote_rev, interwiki_name, n_name)
+                tokres, result = m()
+            else:
+                result = self.connection.mergeDiff(pagename, xmlrpclib.Binary(diff), local_rev, delta_remote_rev, last_remote_rev, interwiki_name, n_name)
+        except xmlrpclib.Fault, e:
+            if e.faultCode == "NOT_ALLOWED":
+                raise NotAllowedException
+            raise
         return result
 
     def delete_page(self, pagename, last_remote_rev, interwiki_name):
         try:
-            result = self.connection.mergeDiff(pagename, None, None, None, last_remote_rev, interwiki_name, None)
+            if self.token:
+                m = MultiCall(self.connection)
+                m.applyAuthToken(self.token)
+                m.mergeDiff(pagename, None, None, None, last_remote_rev, interwiki_name, None)
+                tokres, result = m()
+            else:
+                result = self.connection.mergeDiff(pagename, None, None, None, last_remote_rev, interwiki_name, None)
         except xmlrpclib.Fault, e:
             if e.faultCode == "NOT_ALLOWED":
                 return e.faultString
@@ -239,7 +275,13 @@
                    "prefix": self.prefix,
                    "pagelist": self.pagelist,
                    "mark_deleted": True}
-        pages = self.connection.getAllPagesEx(options)
+        if self.token:
+            m = MultiCall(self.connection)
+            m.applyAuthToken(self.token)
+            m.getAllPagesEx(options)
+            tokres, pages = m()
+        else:
+            pages = self.connection.getAllPagesEx(options)
         rpages = []
         for name, revno in pages:
             normalised_name = normalise_pagename(name, self.prefix)
--- a/MoinMoin/xmlrpc/__init__.py	Mon Aug 21 21:07:36 2006 +0200
+++ b/MoinMoin/xmlrpc/__init__.py	Mon Aug 21 21:09:46 2006 +0200
@@ -723,7 +723,7 @@
 
         # User may read page?
         if not self.request.user.may.read(pagename) or not self.request.user.may.write(pagename):
-            return self.notAllowedFault()
+            return xmlrpclib.Fault("NOT_ALLOWED", "You are not allowed to write to this page.")
 
         # XXX add locking here!
 
--- a/docs/CHANGES.aschremmer	Mon Aug 21 21:07:36 2006 +0200
+++ b/docs/CHANGES.aschremmer	Mon Aug 21 21:09:46 2006 +0200
@@ -7,76 +7,83 @@
     * How should we store tags? (Metadata support would be handy)
       (currently done in Pickle files)
 
-  ToDo:
-    * Implement rollback
+  ToDo: (this should not go into CHANGES)
     * Reduce round-trip times by caching queries and using MultiCall objects (coroutines?)
-    * Check what needs to be documented on MoinMaster.
-    * Test with prefixes
-    * Search for XXX
-    * Implement a cross-site authentication system, i.e. mainly an
-      identity storage. (does OpenID make sense?)
+    * Implement a password agent. (does OpenID make sense?)
 
+  Longterm ToDo:
     * Maybe refactor YYY into MoinLocalWiki
-    * Add page locking, i.e. use the one in the new storage layer.
+    * Add page locking, i.e. use the one in the new storage layer (see XXX).
     * Do older tags of one wiki site have to be stored as well? Why don't we
       keep just one tag?
-    * Put author names into the comment field, transmit mimetypes.
+    * Put author names into the comment field
+    * Transmit mimetypes (see XXX). Needs new storage system.
     * Implement renamed pages.
+    * Cache the result of remote.get_pages locally to reduce the load.
 
   New Features:
-    * XMLRPC method to return the Moin version
-    * XMLRPC multicall support
     * Conflict icon in RecentChanges
-    * XMLRPC Authentication System
-    * Binary Diffing
-    * XMLRPC method to get binary diffs
-    * XMLRPC method to merge remote changes locally
-    * XMLRPC method to get the interwiki name
-    * TagStore/PickleTagStore class
-    * XMLRPC method to get the pagelist in a special way (revnos,
-      no system pages etc.)
-    * IWID support - i.e. every instance has a unique ID
-    * InterWiki page editable in the wiki, modification detection based on mtimes
-    * SyncPages action
-    * XMLRPC functions may return Fault instances
-    * diff3 algorithm extended, a new mode should reduce the conflicts
-    * GetText2 macro
-    * showtags action
+    * New XMLRPC methods (see doc strings for details):
+      * getMoinVersion
+      * system.multicall -- multicall support
+      * Authentication System: getAuthToken/appyAuthToken
+      * getDiff -- method to get binary diffs
+      * mergeDiff -- method to local changes remotely
+      * interwikiName -- method to get the IWID and the interwiki moniker
+      * getAllPagesEx -- method to get the pagelist in a special way (revnos,
+        no system pages etc.)
+    * IWID support - i.e. every wiki instance has a unique ID
+    * The list of InterWiki sites is editable in the wiki (page InterWikiMap),
+      it is getting reloaded every minute
+    * Syncronisation of wikis using the SyncPages action
+    * GetText2 macro that allows to translate messages that contain data
+    * showtags action that lists all tags related to a page
 
-  Bugfixes (only stuff that is buggy in moin/1.6 main branch):
-    * Conflict resolution fixes. (merged into main)
-    * Python 2.5 compatibility fixes in the Page caching logic (merged)
-    * sre pickle issues in the wikidicts code (merged)
+  Bugfixes:
+    * Conflict resolution fixes.
+    * Python 2.5 compatibility fixes in the Page caching logic
+    * sre pickle issues in the wikidicts code
     * cgitb can hide particular names, this avoids information leaks
       if the user files cannot be parsed for example
     * Fixed User.__repr__ - it is insane to put the ID in there
-    * Worked around the FastCGI problem on Lighttpd: empty lines in the error log, thanks to Jay Soffian
+    * Worked around the FastCGI problem on Lighttpd: empty lines in the error
+      log, thanks to Jay Soffian
     * Fixed the MetaDict code to use locks.
     * Fixed bug in request.py that avoided showing a traceback if there was a fault
       after the first headers were sent.
     * Fixed severe race conditions in the meta dict and the sync tags code.
     * Mute the tempnam warning in the caching module.
+    * diff3 algorithm extended, a new mode should reduce the conflicts,
+      fixed a bug that silently truncated pages if there was content added
+      on one side at the end
+    * Standalone opens it logfile unbuffered from now on, thanks to
+      Carsten Grohmann
 
   Other Changes:
     * Refactored conflict resolution and XMLRPC code.
-    * Enhanced API at some points.
+    * Added a module for binary diffs
 
   Developer notes:
-    * ...
+    * There is a new Page method called Page.get_raw_body_str that returns
+      the encoded page body. This is useful if you just deal with byte data
+      (e.g. while generating binary diffs).
+    * The TagStore/PickleTagStore system is used to store the syncronisation tags.
+    * XMLRPC functions may return Fault instances
+    * Moin got multicall support, including a module that makes it usable on the
+      client-side without requiring Python 2.4
 
 Do not forget to check the related wiki page: http://moinmoin.wikiwikiweb.de/WikiSyncronisation
 
 Diary
 =====
 
-Week 21: Basic Infrastructur setup (repos),
+Week 21: Basic infrastructure setup (repos),
          initial talks to the mentor, started writing the design document,
          helped other students to get started
-Week 22: Tax forms, Fulfilled transcription request,
+Week 22: Tax forms, fulfilled transcription request,
          written conflict icon support, refactored conflict handling,
-         changed conflict icon,
-         Added xmlrpc multicall support into the server and
-         backported the client code from python 2.4
+         changed conflict icon, added xmlrpc multicall support into the server
+         and backported the client code from Python 2.4
 Week 23: Debian-Sprint in Extremadura, Spain. Initial thoughts about Mercurial as
          a base for syncronisation. (See wiki)
 Week 24: Evaluation of OpenID as a base for authentication, written local testing scripts
@@ -110,10 +117,13 @@
          the prefix and the pageList on the remote side. Finished the direction==DOWN mode.
 Week 33: Started designing the solutions for the other sync cases. Store and transmit the
          normalised name. Implemented preliminary mime type support, only transmission of the mime type
-         and the new storage API is missing. Changed getDiff interface. Detect renamed pages (and choke :).
-         Added infrastructure support for detecting deleted pages (not used in the merging logic yet).
+         and the new storage API is missing. Changed getDiff interface. Detect renamed pages (and inform
+         the user about the missing support for them).
+         Added infrastructure support for detecting deleted pages.
          Reworked i18n and logging support. Added GetText2 macro. Getting syncing of pages
          working that are just available on one side. Working synchronisation of deleted pages.
+         Implemented rollback in case of remote problems and exception logging.
+         Documented this system on MoinMaster. Added authentication support.
 
 2006-07-18: the requested daily entry is missing here, see http://moinmoin.wikiwikiweb.de/GoogleSoc2006/BetterProgress
 2006-07-19: the requested daily entry is missing here, see http://moinmoin.wikiwikiweb.de/GoogleSoc2006/BetterProgress