changeset 1050:3b5603bf468d

merge main
author Thomas Waldmann <tw AT waldmann-edv DOT de>
date Mon, 24 Jul 2006 22:35:28 +0200
parents d3f7552b9e0a (current diff) b7544e3bd478 (diff)
children 61142a50c41b
files
diffstat 8 files changed, 351 insertions(+), 51 deletions(-) [+]
line wrap: on
line diff
--- a/MoinMoin/Page.py	Mon Jul 24 22:32:30 2006 +0200
+++ b/MoinMoin/Page.py	Mon Jul 24 22:35:28 2006 +0200
@@ -1036,7 +1036,7 @@
                 except wikiutil.PluginMissingError:
                     pass
             else:
-                raise "Plugin missing error!" # XXX what now?
+                raise NotImplementedError("Plugin missing error!") # XXX what now?
         request.formatter = self.formatter
         self.formatter.setPage(self)
         if self.hilite_re:
@@ -1247,7 +1247,7 @@
             except wikiutil.PluginMissingError:
                 pass
         else:
-            raise "No matching parser" # XXX what do we use if nothing at all matches?
+            raise NotImplementedError("No matching parser") # XXX what do we use if nothing at all matches?
             
         # start wiki content div
         request.write(self.formatter.startContent(content_id))
@@ -1339,7 +1339,7 @@
                     except wikiutil.PluginMissingError:
                         pass
                 else:
-                    raise "no matching parser" # XXX what now?
+                    raise NotImplementedError("no matching parser") # XXX what now?
             return getattr(parser, 'caching', False)
         return False
 
@@ -1362,11 +1362,15 @@
             try:
                 code = self.loadCache(request)
                 self.execute(request, parser, code)
-            except 'CacheNeedsUpdate':
+            except Exception, (msg, ):
+                if msg != 'CacheNeedsUpdate':
+                    raise
                 try:
                     code = self.makeCache(request, parser)
                     self.execute(request, parser, code)
-                except 'CacheNeedsUpdate':
+                except Exception, (msg, ):
+                    if msg != 'CacheNeedsUpdate':
+                        raise
                     request.log('page cache failed after creation')
                     self.format(parser)
         
@@ -1392,19 +1396,21 @@
         cache = caching.CacheEntry(request, self, self.getFormatterName(), scope='item')
         attachmentsPath = self.getPagePath('attachments', check_create=0)
         if cache.needsUpdate(self._text_filename(), attachmentsPath):
-            raise 'CacheNeedsUpdate'
+            raise Exception('CacheNeedsUpdate')
         
         import marshal
         try:
             return marshal.loads(cache.content())
+        except "CacheNeedsUpdate": # convert old exception into a new one
+            raise Exception('CacheNeedsUpdate')
         except (EOFError, ValueError, TypeError):
             # Bad marshal data, must update the cache.
             # See http://docs.python.org/lib/module-marshal.html
-            raise 'CacheNeedsUpdate'
+            raise Exception('CacheNeedsUpdate')
         except Exception, err:
             request.log('fail to load "%s" cache: %s' % 
                         (self.page_name, str(err)))
-            raise 'CacheNeedsUpdate'
+            raise Exception('CacheNeedsUpdate')
 
     def makeCache(self, request, parser):
         """ Format content into code, update cache and return code """
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/MoinMoin/_tests/test_wikisync.py	Mon Jul 24 22:35:28 2006 +0200
@@ -0,0 +1,36 @@
+# -*- coding: iso-8859-1 -*-
+"""
+MoinMoin - MoinMoin.wikisync tests
+
+@copyright: 2006 MoinMoin:AlexanderSchremmer
+@license: GNU GPL, see COPYING for details.
+"""
+
+from unittest import TestCase
+from MoinMoin.Page import Page
+from MoinMoin.PageEditor import PageEditor
+from MoinMoin._tests import TestConfig, TestSkipped
+
+from MoinMoin.wikisync import TagStore
+
+
+class UnsafeSyncTestcase(TestCase):
+    """ Tests various things related to syncing. Note that it is not possible
+        to create pages without cluttering page revision currently, so we have to use
+        the testwiki. """
+
+    def setUp(self):
+        if not getattr(self.request.cfg, 'is_test_wiki', False):
+            raise TestSkipped('This test needs to be run using the test wiki.')
+        self.page = PageEditor(self.request, "FrontPage")
+
+    def testBasicTagThings(self):
+        tags = TagStore(self.page)
+        self.assert_(not tags.get_all_tags())
+        tags.add(remote_wiki="foo", remote_rev=1, current_rev=2)
+        tags = TagStore(self.page) # reload
+        self.assert_(tags.get_all_tags()[0].remote_rev == 1)
+    
+    def tearDown(self):
+        tags = TagStore(self.page)
+        tags.clear()
--- a/MoinMoin/action/SyncPages.py	Mon Jul 24 22:32:30 2006 +0200
+++ b/MoinMoin/action/SyncPages.py	Mon Jul 24 22:35:28 2006 +0200
@@ -9,45 +9,103 @@
 """
 
 import os
+import re
 import zipfile
 import xmlrpclib
 from datetime import datetime
 
+# Compatiblity to Python 2.3
+try:
+    set
+except NameError:
+    from sets import Set as set
+
+
 from MoinMoin import wikiutil, config, user
+from MoinMoin.packages import unpackLine
 from MoinMoin.PageEditor import PageEditor
 from MoinMoin.Page import Page
 from MoinMoin.wikidicts import Dict
 
+
 class ActionStatus(Exception): pass
 
+
+class RemotePage(object):
+    """ This class represents a page in (another) wiki. """
+    def __init__(self, name, revno):
+        self.name = name
+        self.revno = revno
+
+    def __repr__(self):
+        return repr(u"%s<%i>" % (self.name, self.revno))
+
+    def __lt__(self, other):
+        return self.name < other.name
+
+
 class RemoteWiki(object):
     """ This class should be the base for all implementations of remote wiki
         classes. """
-    def getInterwikiName(self):
-        """ Returns the interwiki name of the other wiki. """
-        return NotImplemented
-    
+
     def __repr__(self):
         """ Returns a representation of the instance for debugging purposes. """
         return NotImplemented
 
-class MoinWiki(RemoteWiki):
-    def __init__(self, interwikiname):
+    def getInterwikiName(self):
+        """ Returns the interwiki name of the other wiki. """
+        return NotImplemented
+
+    def getPages(self):
+        """ Returns a list of RemotePage instances. """
+        return NotImplemented
+
+
+class MoinRemoteWiki(RemoteWiki):
+    """ Used for MoinMoin wikis reachable via XMLRPC. """
+    def __init__(self, request, interwikiname):
+        self.request = request
         wikitag, wikiurl, wikitail, wikitag_bad = wikiutil.resolve_wiki(self.request, '%s:""' % (interwikiname, ))
         self.wiki_url = wikiutil.mapURL(self.request, wikiurl)
         self.valid = not wikitag_bad
         self.xmlrpc_url = self.wiki_url + "?action=xmlrpc2"
         self.connection = self.createConnection()
+        # XXX add version and interwiki name checking!
 
     def createConnection(self):
-        return xmlrpclib.ServerProxy(self.xmlrpc_url, allow_none=True)
+        if self.valid:
+            return xmlrpclib.ServerProxy(self.xmlrpc_url, allow_none=True)
+        else:
+            return None
 
     # Methods implementing the RemoteWiki interface
     def getInterwikiName(self):
         return self.connection.interwikiName()
 
+    def getPages(self):
+        pages = self.connection.getAllPagesEx({"include_revno": True, "include_deleted": True})
+        return [RemotePage(unicode(name), revno) for name, revno in pages]
+
     def __repr__(self):
-        return "<RemoteWiki wiki_url=%r valid=%r>" % (self.valid, self.wiki_url)
+        return "<MoinRemoteWiki wiki_url=%r valid=%r>" % (self.wiki_url, self.valid)
+
+
+class MoinLocalWiki(RemoteWiki):
+    """ Used for the current MoinMoin wiki. """
+    def __init__(self, request):
+        self.request = request
+
+    # Methods implementing the RemoteWiki interface
+    def getInterwikiName(self):
+        return self.request.cfg.interwikiname
+
+    def getPages(self):
+        l_pages = [[x, Page(self.request, x).get_real_rev()] for x in self.request.rootpage.getPageList(exists=0)]
+        return [RemotePage(unicode(name), revno) for name, revno in l_pages]
+
+    def __repr__(self):
+        return "<MoinLocalWiki>"
+
 
 class ActionClass:
     def __init__(self, pagename, request):
@@ -55,44 +113,81 @@
         self.pagename = pagename
         self.page = Page(request, pagename)
 
-    def parsePage(self):
-        defaults = {
+    def parse_page(self):
+        options = {
             "remotePrefix": "",
             "localPrefix": "",
-            "remoteWiki": ""
+            "remoteWiki": "",
+            "localMatch": None,
+            "remoteMatch": None,
+            "pageList": None,
+            "groupList": None,
         }
+
+        options.update(Dict(self.request, self.pagename).get_dict())
+
+        # Convert page and group list strings to lists
+        if options["pageList"] is not None:
+            options["pageList"] = unpackLine(options["pageList"], ",")
+        if options["groupList"] is not None:
+            options["groupList"] = unpackLine(options["groupList"], ",")
+
+        return options
+
+    def fix_params(self, params):
+        """ Does some fixup on the parameters. """
+
+        # merge the pageList case into the remoteMatch case
+        if params["pageList"] is not None:
+            params["remoteMatch"] = u'|'.join([r'^%s$' % re.escape(name) for name in params["pageList"]])
+
+        if params["localMatch"] is not None:
+            params["localMatch"] = re.compile(params["localMatch"], re.U)
         
-        defaults.update(Dict(self.request, self.pagename).get_dict())
-        return defaults
-        
+        if params["remoteMatch"] is not None:
+            params["remoteMatch"] = re.compile(params["remoteMatch"], re.U)
+
+        return params
+
     def render(self):
         """ Render action
 
-        This action returns a wiki page with optional message, or
-        redirects to new page.
+        This action returns a status message.
         """
         _ = self.request.getText
-        
-        params = self.parsePage()
-        
+
+        params = self.fix_params(self.parse_page())
+
+
         try:
             if not self.request.cfg.interwikiname:
                 raise ActionStatus(_("Please set an interwikiname in your wikiconfig (see HelpOnConfiguration) to be able to use this action."))
 
             if not params["remoteWiki"]:
                 raise ActionStatus(_("Incorrect parameters. Please supply at least the ''remoteWiki'' parameter."))
-            
-            remote = MoinWiki(params["remoteWiki"])
-            
+
+            remote = MoinRemoteWiki(self.request, params["remoteWiki"])
+            local = MoinLocalWiki(self.request)
+
             if not remote.valid:
                 raise ActionStatus(_("The ''remoteWiki'' is unknown."))
-            
-            # ...
-            self.sync(params)
+
+            self.sync(params, local, remote)
         except ActionStatus, e:
             return self.page.send_page(self.request, msg=u'<p class="error">%s</p>\n' % (e.args[0], ))
 
         return self.page.send_page(self.request, msg=_("Syncronisation finished."))
     
+    def sync(self, params, local, remote):
+        """ This method does the syncronisation work. """
+        
+        r_pages = remote.getPages()
+        l_pages = local.getPages()
+
+        # some initial test code
+        r_new_pages = u",".join(set([repr(x) for x in r_pages]) - set([repr(x) for x in l_pages]))
+        raise ActionStatus("These pages are in the remote wiki, but not local: " + r_new_pages)
+
+
 def execute(pagename, request):
     ActionClass(pagename, request).render()
--- a/MoinMoin/formatter/text_python.py	Mon Jul 24 22:32:30 2006 +0200
+++ b/MoinMoin/formatter/text_python.py	Mon Jul 24 22:35:28 2006 +0200
@@ -54,8 +54,9 @@
         waspcode_timestamp = int(time.time())
         source = ["""
 moincode_timestamp = int(os.path.getmtime(os.path.dirname(__file__)))
-if moincode_timestamp > %d or request.cfg.cfg_mtime > %d:
-    raise "CacheNeedsUpdate"
+cfg_mtime = getattr(request.cfg, "cfg_mtime", None)
+if moincode_timestamp > %d or cfg_mtime is None or cfg_mtime > %d:
+    raise Exception("CacheNeedsUpdate")
 """ % (waspcode_timestamp, waspcode_timestamp)]
 
 
--- a/MoinMoin/wikidicts.py	Mon Jul 24 22:32:30 2006 +0200
+++ b/MoinMoin/wikidicts.py	Mon Jul 24 22:35:28 2006 +0200
@@ -26,7 +26,7 @@
 
 # Version of the internal data structure which is pickled
 # Please increment if you have changed the structure
-DICTS_PICKLE_VERSION = 4
+DICTS_PICKLE_VERSION = 5
 
 
 class DictBase:
@@ -44,13 +44,18 @@
         """
         self.name = name
 
-        self.regex = re.compile(self.regex, re.MULTILINE | re.UNICODE)
+        self.initRegex()
 
         # Get text from page named 'name'
         p = Page.Page(request, name)
         text = p.get_raw_body()
         self.initFromText(text)
 
+    def initRegex(cls):
+        """ Make it a class attribute to avoid it being pickled. """
+        cls.regex = re.compile(cls.regex, re.MULTILINE | re.UNICODE)
+    initRegex = classmethod(initRegex)
+
     def initFromText(self, text):
         raise NotImplementedError('sub classes should override this')
 
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/MoinMoin/wikisync.py	Mon Jul 24 22:35:28 2006 +0200
@@ -0,0 +1,99 @@
+# -*- coding: iso-8859-1 -*-
+"""
+    MoinMoin - Wiki Synchronisation
+
+    @copyright: 2006 by MoinMoin:AlexanderSchremmer
+    @license: GNU GPL, see COPYING for details.
+"""
+
+try:
+    import cPickle as pickle
+except ImportError:
+    import pickle
+
+
+class Tag(object):
+    """ This class is used to store information about merging state. """
+    
+    def __init__(self, remote_wiki, remote_rev, current_rev):
+        """ Creates a new Tag.
+        
+        @param remote_wiki: The identifier of the remote wiki.
+        @param remote_rev: The revision number on the remote end.
+        @param current_rev: The related local revision.
+        """
+        self.remote_wiki = remote_wiki
+        self.remote_rev = remote_rev
+        self.current_rev = current_rev
+
+    def __repr__(self):
+        return u"<Tag remote_wiki=%r remote_rev=%r current_rev=%r>" % (self.remote_wiki, self.remote_rev, self.current_rev)
+
+
+class AbstractTagStore(object):
+    """ This class is an abstract base class that shows how to implement classes
+        that manage the storage of tags. """
+
+    def __init__(self, page):
+        """ Subclasses don't need to call this method. It is just here to enforce
+        them having accept a page argument at least. """
+        pass
+
+    def add(self, **kwargs):
+        """ Adds a Tag object to the current TagStore. """
+        print "Got tag for page %r: %r" % (self.page, kwargs)
+        return NotImplemented
+
+    def get_all_tags(self):
+        """ Returns a list of all Tag objects associated to this page. """
+        return NotImplemented
+    
+    def clear(self):
+        """ Removes all tags. """
+        return NotImplemented
+
+
+class PickleTagStore(AbstractTagStore):
+    """ This class manages the storage of tags in pickle files. """
+
+    def __init__(self, page):
+        """ Creates a new TagStore that uses pickle files.
+        
+        @param page: a Page object where the tags should be related to
+        """
+        
+        self.page = page
+        self.filename = page.getPagePath('synctags', use_underlay=0, check_create=1, isfile=1)
+        self.load()
+
+    def load(self):
+        """ Loads the tags from the data file. """
+        try:
+            datafile = file(self.filename, "rb")
+        except IOError:
+            self.tags = []
+        else:
+            self.tags = pickle.load(datafile)
+            datafile.close()
+    
+    def commit(self):
+        """ Writes the memory contents to the data file. """
+        datafile = file(self.filename, "wb")
+        pickle.dump(self.tags, datafile, protocol=pickle.HIGHEST_PROTOCOL)
+        datafile.close()
+
+    # public methods ---------------------------------------------------
+    def add(self, **kwargs):
+        self.tags.append(Tag(**kwargs))
+        self.commit()
+    
+    def get_all_tags(self):
+        return self.tags
+
+    def clear(self):
+        self.tags = []
+        self.commit()
+
+# currently we just have one implementation, so we do not need
+# a factory method
+TagStore = PickleTagStore
\ No newline at end of file
--- a/MoinMoin/xmlrpc/__init__.py	Mon Jul 24 22:32:30 2006 +0200
+++ b/MoinMoin/xmlrpc/__init__.py	Mon Jul 24 22:35:28 2006 +0200
@@ -219,10 +219,39 @@
         """ Get all pages readable by current user
 
         @rtype: list
-        @return: a list of all pages. The result is a list of utf-8 strings.
+        @return: a list of all pages.
         """
-        pagelist = self.request.rootpage.getPageList()
-        return map(self._outstr, pagelist)
+
+        # the official WikiRPC interface is implemented by the extended method
+        # as well
+        return self.xmlrpc_getAllPagesEx()
+
+
+    def xmlrpc_getAllPagesEx(self, opts=None):
+        """ Get all pages readable by current user. Not an WikiRPC method.
+
+        @param opts: dictionary that can contain the following arguments:
+                include_system:: set it to false if you do not want to see system pages
+                include_revno:: set it to True if you want to have lists with [pagename, revno]
+                include_deleted:: set it to True if you want to include deleted pages
+        @rtype: list
+        @return: a list of all pages.
+        """
+        options = {"include_system": True, "include_revno": False, "include_deleted": False}
+        if opts is not None:
+            options.update(opts)
+
+        if not options["include_system"]:
+            filter = lambda name: not wikiutil.isSystemPage(self.request, name)
+        else:
+            filter = lambda name: True
+
+        pagelist = self.request.rootpage.getPageList(filter=filter, exists=not options["include_deleted"])
+        
+        if options['include_revno']:
+            return [[self._outstr(x), Page(self.request, x).get_real_rev()] for x in pagelist]
+        else:
+            return [self._outstr(x) for x in pagelist]
 
     def xmlrpc_getRecentChanges(self, date):
         """ Get RecentChanges since date
@@ -498,6 +527,7 @@
         from MoinMoin import version
         return (version.project, version.release, version.revision)
 
+
     # authorization methods
 
     def xmlrpc_getAuthToken(self, username, password, *args):
@@ -519,6 +549,9 @@
         else:
             return xmlrpclib.Fault("INVALID", "Invalid token.")
 
+
+    # methods for wiki synchronization
+
     def xmlrpc_getDiff(self, pagename, from_rev, to_rev):
         """ Gets the binary difference between two page revisions. See MoinMoin:WikiSyncronisation. """
         from MoinMoin.util.bdiff import textdiff, compress
@@ -594,9 +627,13 @@
             @param interwiki_name: Used to build the interwiki tag.
         """
         from MoinMoin.util.bdiff import decompress, patch
+        from MoinMoin.wikisync import TagStore
+        LASTREV_INVALID = xmlrpclib.Fault("LASTREV_INVALID", "The page was changed")
 
         pagename = self._instr(pagename)
 
+        comment = u"Remote - %r" % interwiki_name
+        
         # User may read page?
         if not self.request.user.may.read(pagename) or not self.request.user.may.write(pagename):
             return self.notAllowedFault()
@@ -604,10 +641,10 @@
         # XXX add locking here!
 
         # current version of the page
-        currentpage = Page(self.request, pagename)
+        currentpage = PageEditor(self.request, pagename, do_editor_backup=0)
 
         if currentpage.get_real_rev() != last_remote_rev:
-            return xmlrpclib.Fault("LASTREV_INVALID", "The page was changed")
+            return LASTREV_INVALID
 
         if not currentpage.exists() and diff is None:
             return xmlrpclib.Fault("NOT_EXIST", "The page does not exist and no diff was supplied.")
@@ -619,11 +656,19 @@
         newcontents = patch(basepage.get_raw_body_str(), decompress(str(diff)))
 
         # write page
-        # XXX ...
+        try:
+            currentpage.saveText(newcontents.encode("utf-8"), last_remote_rev, comment=comment)
+        except PageEditor.EditConflict:
+            return LASTREV_INVALID
 
-        # XXX add a tag (interwiki_name, local_rev, current rev) to the page
-        # XXX return current rev
-        # XXX finished
+        current_rev = currentpage.get_real_rev()
+        
+        tags = TagStore(currentpage)
+        tags.add(remote_wiki=interwiki_name, remote_rev=local_rev, current_rev=current_rev)
+
+        # XXX unlock page
+
+        return current_rev
 
 
     # XXX BEGIN WARNING XXX
--- a/docs/CHANGES.aschremmer	Mon Jul 24 22:32:30 2006 +0200
+++ b/docs/CHANGES.aschremmer	Mon Jul 24 22:35:28 2006 +0200
@@ -2,16 +2,17 @@
 ===============================
 
   Known main issues:
-    * ...
+    * How will we store tags?
+    * How to handle renames/deletes?
+    * How to handle colliding/empty interwiki names?
 
   ToDo:
     * Implement actual syncronisation.
     * Implement a cross-site authentication system, i.e. mainly an
       identity storage.
     * Clean up trailing whitespace.
-    * Add page locking.
+    * Add page locking, i.e. use the one in the new storage layer.
     * How about using unique IDs that just derive from the interwikiname?
-    * How to handle renames?
 
   New Features:
     * XMLRPC method to return the Moin version
@@ -20,10 +21,16 @@
     * XMLRPC Authentication System
     * Binary Diffing
     * XMLRPC method to get binary diffs
-    * 
+    * XMLRPC method to merge remote changes locally
+    * XMLRPC method to get the interwiki name
+    * TagStore/PickleTagStore class
+    * XMLRPC method to get the pagelist in a special way (revnos,
+      no system pages etc.)
 
   Bugfixes (only stuff that is buggy in moin/1.6 main branch):
-    * Conflict resolution fixes.
+    * Conflict resolution fixes. (merged into main)
+    * Python 2.5 compatibility fixes in the Page caching logic
+    * sre pickle issues in the wikidicts code
 
   Other Changes:
     * Refactored conflict resolution and XMLRPC code.
@@ -58,6 +65,12 @@
 Week 28: Debian-Edu Developer Camp. Implemented getDiff XMLRPC method, added preliminary SyncPages action,
          added interwikiName XMLRPC method, added mergeChanges XMLRPC method. Started analysis of the moinupdate
          script written by Stefan Merten.
+Week 29: Finished first version of the mergeChanges method. Added Tag and TagStore classes which are currently
+         using pickle-based storage. Added getAllPagesEx XMLRPC method.
+
+2006-07-18: the requested daily entry is missing here, see http://moinmoin.wikiwikiweb.de/GoogleSoc2006/BetterProgress
+2006-07-19: the requested daily entry is missing here, see http://moinmoin.wikiwikiweb.de/GoogleSoc2006/BetterProgress
+2006-07-20: the requested daily entry is missing here, see http://moinmoin.wikiwikiweb.de/GoogleSoc2006/BetterProgress
 
 Time plan
 =========