changeset 1308:9c6c07e63832

merged main
author Thomas Waldmann <tw AT waldmann-edv DOT de>
date Thu, 17 Aug 2006 12:12:31 +0200
parents 6bc162aba29d (current diff) 05602636efa7 (diff)
children 238c43cb8877
files
diffstat 5 files changed, 202 insertions(+), 106 deletions(-) [+]
line wrap: on
line diff
--- a/MoinMoin/action/SyncPages.py	Thu Aug 17 12:11:01 2006 +0200
+++ b/MoinMoin/action/SyncPages.py	Thu Aug 17 12:12:31 2006 +0200
@@ -24,8 +24,8 @@
 from MoinMoin.PageEditor import PageEditor, conflict_markers
 from MoinMoin.Page import Page
 from MoinMoin.wikidicts import Dict, Group
-from MoinMoin.wikisync import (TagStore, UnsupportedWikiException, SyncPage,
-                               MoinLocalWiki, MoinRemoteWiki, UP, DOWN, BOTH)
+from MoinMoin.wikisync import TagStore, UnsupportedWikiException, SyncPage
+from MoinMoin.wikisync import MoinLocalWiki, MoinRemoteWiki, UP, DOWN, BOTH, MIMETYPE_MOIN
 from MoinMoin.util.bdiff import decompress, patch, compress, textdiff
 from MoinMoin.util import diff3
 
@@ -133,6 +133,7 @@
     def sync(self, params, local, remote):
         """ This method does the syncronisation work.
             Currently, it handles the case where the pages exist on both sides.
+            One of the major missing parts is rename handling.
             Now there are a few other cases left that have to be implemented:
                 Wiki A    | Wiki B   | Remark
                 ----------+----------+------------------------------
@@ -159,12 +160,15 @@
                 exists    | any case | Try a rename search first, then
                           |          | do a sync without considering tags
                 with tags | with non | to ensure data integrity.
-                          | matching |
-                          | tags     |
+                          | matching | Hmm, how do we detect this
+                          | tags     | case if the unmatching tags are only on the remote side?
                 ----------+----------+-------------------------------
+                exists    | exists   | already handled.
         """
         _ = self.request.getText
         direction = params["direction"]
+        local_full_iwid = packLine([local.get_iwid(), local.get_interwiki_name()])
+        remote_full_iwid = packLine([remote.get_iwid(), remote.get_interwiki_name()])
 
         l_pages = local.get_pages()
         r_pages = remote.get_pages(exclude_non_writable=direction != DOWN)
@@ -176,11 +180,11 @@
 
         m_pages = [elem.add_missing_pagename(local, remote) for elem in SyncPage.merge(l_pages, r_pages)]
 
-        print "Got %i local, %i remote pages, %i merged pages" % (len(l_pages), len(r_pages), len(m_pages)) # XXX remove
+        self.log_status(self.INFO, "Got %i local, %i remote pages, %i merged pages" % (len(l_pages), len(r_pages), len(m_pages))) # XXX remove?
 
         if params["pageMatch"]:
             m_pages = SyncPage.filter(m_pages, params["pageMatch"].match)
-        print "After filtering: Got %i merges pages" % (len(m_pages), ) # XXX remove
+        self.log_status(self.INFO, "After filtering: Got %i merges pages" % (len(m_pages), )) # XXX remove
 
         on_both_sides = list(SyncPage.iter_local_and_remote(m_pages))
         remote_but_not_local = list(SyncPage.iter_remote_only(m_pages))
@@ -195,7 +199,7 @@
         # XXX handle deleted pages
         for rp in on_both_sides:
             # XXX add locking, acquire read-lock on rp
-            print "Processing %r" % rp
+            #print "Processing %r" % rp
 
             local_pagename = rp.local_name
             current_page = PageEditor(self.request, local_pagename) # YYY direct access
@@ -210,17 +214,31 @@
             matching_tags.sort()
             #print "------ TAGS: " + repr(matching_tags) + repr(tags.tags)
 
-            if not matching_tags:
-                remote_rev = None
-                local_rev = rp.local_rev # merge against the newest version
-                old_contents = ""
-            else:
+            # some default values for non matching tags
+            normalised_name = None
+            remote_rev = None
+            local_rev = rp.local_rev # merge against the newest version
+            old_contents = ""
+
+            if matching_tags:
                 newest_tag = matching_tags[-1]
-                local_rev = newest_tag.current_rev
-                remote_rev = newest_tag.remote_rev
-                if remote_rev == rp.remote_rev and (direction == DOWN or local_rev == current_rev):
+                
+                # handle some cases where we cannot continue for this page
+                if newest_tag.remote_rev == rp.remote_rev and (direction == DOWN or newest_tag.current_rev == current_rev):
                     continue # no changes done, next page
-                old_contents = Page(self.request, local_pagename, rev=local_rev).get_raw_body_str() # YYY direct access
+                if rp.local_mime_type != MIMETYPE_MOIN and not (newest_tag.remote_rev == rp.remote_rev ^ newest_tag.current_rev == current_rev):
+                    self.log_status(ActionClass.WARN, _("The item %(pagename)s cannot be merged but was changed in both wikis. Please delete it in one of both wikis and try again.") % {"pagename": rp.name})
+                    continue
+                if rp.local_mime_type != rp.remote_mime_type:
+                    self.log_status(ActionClass.WARN, _("The item %(pagename)s has different mime types in both wikis and cannot be merged. Please delete it in one of both wikis or unify the mime type, and try again.") % {"pagename": rp.name})
+                    continue
+                if newest_tag.normalised_name != rp.name:
+                    self.log_status(ActionClass.WARN, _("The item %(pagename)s was renamed locally. This is not implemented yet. Therefore all syncronisation history is lost for this page.") % {"pagename": rp.name}) # XXX implement renames
+                else:
+                    normalised_name = newest_tag.normalised_name
+                    local_rev = newest_tag.current_rev
+                    remote_rev = newest_tag.remote_rev
+                    old_contents = Page(self.request, local_pagename, rev=newest_tag.current_rev).get_raw_body_str() # YYY direct access
 
             self.log_status(ActionClass.INFO, _("Synchronising page %(pagename)s with remote page %(remotepagename)s ...") % {"pagename": local_pagename, "remotepagename": rp.remote_name})
 
@@ -231,19 +249,25 @@
                 patch_base_contents = old_contents
 
             if remote_rev != rp.remote_rev:
-                diff_result = remote.get_diff(rp.remote_name, remote_rev, None) # XXX might raise ALREADY_CURRENT
+                diff_result = remote.get_diff(rp.remote_name, remote_rev, None, normalised_name)
+                if diff_result is None:
+                    self.log_status(ActionClass.ERROR, _("The page %(pagename)s could not be synced. The remote page was renamed. This is not supported yet. You may want to delete one of the pages to get it synced.") % {"pagename": rp.remote_name})
+                    continue
                 is_remote_conflict = diff_result["conflict"]
                 assert diff_result["diffversion"] == 1
                 diff = diff_result["diff"]
                 current_remote_rev = diff_result["current"]
             else:
                 current_remote_rev = remote_rev
-                is_remote_conflict = wikiutil.containsConflictMarker(old_contents.decode("utf-8"))
+                if rp.local_mime_type == MIMETYPE_MOIN:
+                    is_remote_conflict = wikiutil.containsConflictMarker(old_contents.decode("utf-8"))
+                else:
+                    is_remote_conflict = NotImplemented
                 diff = None
 
             # do not sync if the conflict is remote and local, or if it is local
             # and the page has never been syncronised
-            if (wikiutil.containsConflictMarker(current_page.get_raw_body())
+            if (rp.local_mime_type == MIMETYPE_MOIN and wikiutil.containsConflictMarker(current_page.get_raw_body())
                 and (remote_rev is None or is_remote_conflict)):
                 self.log_status(ActionClass.WARN, _("Skipped page %(pagename)s because of a locally or remotely unresolved conflict.") % {"pagename": local_pagename})
                 continue
@@ -252,18 +276,23 @@
                 self.log_status(ActionClass.INFO, _("This is the first synchronisation between this page and the remote wiki."))
 
             if diff is None:
-                new_contents = old_contents.decode("utf-8")
+                new_contents = old_contents
             else:
-                new_contents = patch(patch_base_contents, decompress(diff)).decode("utf-8")
+                new_contents = patch(patch_base_contents, decompress(diff))
 
-            # here, the actual merge happens
-            print "Merging %r, %r and %r" % (old_contents.decode("utf-8"), new_contents, current_page.get_raw_body())
-            verynewtext = diff3.text_merge(old_contents.decode("utf-8"), new_contents, current_page.get_raw_body(), 2, *conflict_markers)
+            if rp.local_mime_type == MIMETYPE_MOIN:
+                new_contents_unicode = new_contents.decode("utf-8")
+                # here, the actual merge happens
+                # XXX print "Merging %r, %r and %r" % (old_contents.decode("utf-8"), new_contents, current_page.get_raw_body())
+                verynewtext = diff3.text_merge(old_contents.decode("utf-8"), new_contents_unicode, current_page.get_raw_body(), 2, *conflict_markers)
+                verynewtext_raw = verynewtext.encode("utf-8")
+            else:
+                if diff is None:
+                    verynewtext_raw = new_contents
+                else:
+                    verynewtext_raw = current_page.get_raw_body_str()
 
-            local_full_iwid = packLine([local.get_iwid(), local.get_interwiki_name()])
-            remote_full_iwid = packLine([remote.get_iwid(), remote.get_interwiki_name()])
-
-            diff = textdiff(new_contents.encode("utf-8"), verynewtext.encode("utf-8"))
+            diff = textdiff(new_contents, verynewtext_raw)
             #print "Diff against %r" % new_contents.encode("utf-8")
 
             comment = u"Local Merge - %r" % (remote.get_interwiki_name() or remote.get_iwid())
@@ -288,7 +317,7 @@
 
             tags.add(remote_wiki=remote_full_iwid, remote_rev=very_current_remote_rev, current_rev=new_local_rev, direction=direction, normalised_name=rp.name)
 
-            if not wikiutil.containsConflictMarker(verynewtext):
+            if rp.local_mime_type != MIMETYPE_MOIN or not wikiutil.containsConflictMarker(verynewtext):
                 self.log_status(ActionClass.INFO, _("Page successfully merged."))
             else:
                 self.log_status(ActionClass.WARN, _("Page merged with conflicts."))
--- a/MoinMoin/wikisync.py	Thu Aug 17 12:11:01 2006 +0200
+++ b/MoinMoin/wikisync.py	Thu Aug 17 12:12:31 2006 +0200
@@ -21,6 +21,7 @@
 from MoinMoin.packages import unpackLine, packLine
 
 
+MIMETYPE_MOIN = "text/wiki"
 # sync directions
 UP, DOWN, BOTH = range(3)
 
@@ -58,6 +59,8 @@
         self.remote_name = remote_name
         assert local_rev or remote_rev
         assert local_name or remote_name
+        self.local_mime_type = MIMETYPE_MOIN   # XXX no usable storage API yet
+        self.remote_mime_type = MIMETYPE_MOIN
 
     def __repr__(self):
         return repr("<Remote Page %r>" % unicode(self))
@@ -107,6 +110,7 @@
             if sp in d:
                 d[sp].remote_rev = sp.remote_rev
                 d[sp].remote_name = sp.remote_name
+                # XXX merge mime type here
             else:
                 d[sp] = sp
         return d.keys()
@@ -206,10 +210,15 @@
         return xmlrpclib.ServerProxy(self.xmlrpc_url, allow_none=True, verbose=True)
 
     # Public methods
-    def get_diff(self, pagename, from_rev, to_rev):
+    def get_diff(self, pagename, from_rev, to_rev, n_name=None):
         """ Returns the binary diff of the remote page named pagename, given
             from_rev and to_rev. """
-        result = self.connection.getDiff(pagename, from_rev, to_rev)
+        try:
+            result = self.connection.getDiff(pagename, from_rev, to_rev, n_name)
+        except xmlrpclib.Fault, e:
+            if e.faultCode == "INVALID_TAG":
+                return None
+            raise
         result["diff"] = str(result["diff"]) # unmarshal Binary object
         return result
 
@@ -261,6 +270,8 @@
 
     def createSyncPage(self, page_name):
         normalised_name = normalise_pagename(page_name, self.prefix)
+        if not self.request.user.may.write(normalised_name):
+            return None
         if normalised_name is None:
             return None
         return SyncPage(normalised_name, local_rev=Page(self.request, page_name).get_real_rev(), local_name=page_name)
@@ -350,7 +361,11 @@
     def get_all_tags(self):
         """ Returns a list of all Tag objects associated to this page. """
         return NotImplemented
-    
+
+    def get_last_tag(self):
+        """ Returns the newest tag. """
+        return NotImplemented
+
     def clear(self):
         """ Removes all tags. """
         return NotImplemented
@@ -368,51 +383,65 @@
         
         @param page: a Page object where the tags should be related to
         """
-        
+
         self.page = page
         self.filename = page.getPagePath('synctags', use_underlay=0, check_create=1, isfile=1)
         lock_dir = os.path.join(page.getPagePath('cache', use_underlay=0, check_create=1), '__taglock__')
         self.rlock = lock.ReadLock(lock_dir, 60.0)
         self.wlock = lock.WriteLock(lock_dir, 60.0)
-        self.load()
 
-    def load(self):
-        """ Loads the tags from the data file. """
         if not self.rlock.acquire(3.0):
             raise EnvironmentError("Could not lock in PickleTagStore")
         try:
-            try:
-                datafile = file(self.filename, "rb")
-            except IOError:
-                self.tags = []
-            else:
-                self.tags = pickle.load(datafile)
-                datafile.close()
+            self.load()
         finally:
             self.rlock.release()
+
+    def load(self):
+        """ Loads the tags from the data file. """
+        try:
+            datafile = file(self.filename, "rb")
+            self.tags = pickle.load(datafile)
+        except (IOError, EOFError):
+            self.tags = []
+        else:
+            datafile.close()
     
     def commit(self):
         """ Writes the memory contents to the data file. """
+        datafile = file(self.filename, "wb")
+        pickle.dump(self.tags, datafile, pickle.HIGHEST_PROTOCOL)
+        datafile.close()
+
+    # public methods ---------------------------------------------------
+    def add(self, **kwargs):
         if not self.wlock.acquire(3.0):
             raise EnvironmentError("Could not lock in PickleTagStore")
         try:
-            datafile = file(self.filename, "wb")
-            pickle.dump(self.tags, datafile, protocol=pickle.HIGHEST_PROTOCOL)
-            datafile.close()
+            self.load()
+            self.tags.append(Tag(**kwargs))
+            self.commit()
         finally:
             self.wlock.release()
 
-    # public methods ---------------------------------------------------
-    def add(self, **kwargs):
-        self.tags.append(Tag(**kwargs))
-        self.commit()
-    
     def get_all_tags(self):
-        return self.tags
+        return self.tags[:]
+
+    def get_last_tag(self):
+        temp = self.tags[:]
+        temp.sort()
+        if not temp:
+            return None
+        return temp[-1]
 
     def clear(self):
         self.tags = []
-        self.commit()
+        if not self.wlock.acquire(3.0):
+            raise EnvironmentError("Could not lock in PickleTagStore")
+        try:
+            self.commit()
+        finally:
+            self.wlock.release()
 
     def fetch(self, iwid_full, direction=None):
         iwid_full = unpackLine(iwid_full)
--- a/MoinMoin/wikiutil.py	Thu Aug 17 12:11:01 2006 +0200
+++ b/MoinMoin/wikiutil.py	Thu Aug 17 12:12:31 2006 +0200
@@ -408,18 +408,23 @@
 
 class MetaDict(dict):
     """ store meta informations as a dict.
-    XXX It is not thread-safe, add locks!
     """
     def __init__(self, metafilename, cache_directory):
         """ create a MetaDict from metafilename """
         dict.__init__(self)
         self.metafilename = metafilename
         self.dirty = False
-        self.loaded = False
         lock_dir = os.path.join(cache_directory, '__metalock__')
         self.rlock = lock.ReadLock(lock_dir, 60.0)
         self.wlock = lock.WriteLock(lock_dir, 60.0)
 
+        if not self.rlock.acquire(3.0):
+            raise EnvironmentError("Could not lock in MetaDict")
+        try:
+            self._get_meta()
+        finally:
+            self.rlock.release()
+
     def _get_meta(self):
         """ get the meta dict from an arbitrary filename.
             does not keep state, does uncached, direct disk access.
@@ -428,14 +433,9 @@
         """
 
         try:
-            if not self.rlock.acquire(3.0):
-                raise EnvironmentError("Could not lock in MetaDict")
-            try:
-                metafile = codecs.open(self.metafilename, "r", "utf-8")
-                meta = metafile.read() # this is much faster than the file's line-by-line iterator
-                metafile.close()
-            finally:
-                self.rlock.release()
+            metafile = codecs.open(self.metafilename, "r", "utf-8")
+            meta = metafile.read() # this is much faster than the file's line-by-line iterator
+            metafile.close()
         except IOError:
             meta = u''
         for line in meta.splitlines():
@@ -444,7 +444,6 @@
             if key in INTEGER_METAS:
                 value = int(value)
             dict.__setitem__(self, key, value)
-        self.loaded = True
 
     def _put_meta(self):
         """ put the meta dict into an arbitrary filename.
@@ -459,44 +458,37 @@
             meta.append("%s: %s" % (key, value))
         meta = '\r\n'.join(meta)
 
-        if not self.wlock.acquire(5.0):
-            raise EnvironmentError("Could not lock in MetaDict")
-        try:
-            metafile = codecs.open(self.metafilename, "w", "utf-8")
-            metafile.write(meta)
-            metafile.close()
-        finally:
-            self.wlock.release()
+        metafile = codecs.open(self.metafilename, "w", "utf-8")
+        metafile.write(meta)
+        metafile.close()
         filesys.chmod(self.metafilename, 0666 & config.umask)
         self.dirty = False
 
     def sync(self, mtime_usecs=None):
-        """ sync the in-memory dict to the persistent store (if dirty) """
-        if self.dirty:
-            if not mtime_usecs is None:
-                self.__setitem__('mtime', str(mtime_usecs))
-            self._put_meta()
+        """ No-Op except for that parameter """
+        if not mtime_usecs is None:
+            self.__setitem__('mtime', str(mtime_usecs))
+        # otherwise no-op
 
     def __getitem__(self, key):
-        try:
-            return dict.__getitem__(self, key)
-        except KeyError:
-            if not self.loaded:
-                self._get_meta() # lazy loading of metadata
-                return dict.__getitem__(self, key)
-            else:
-                raise
+        """ We don't care for cache coherency here. """
+        return dict.__getitem__(self, key)
 
     def __setitem__(self, key, value):
-        """ Sets a dictionary entry. You actually have to call sync to write it
-            to the persistent store. """
+        """ Sets a dictionary entry. """
+        if not self.wlock.acquire(5.0):
+            raise EnvironmentError("Could not lock in MetaDict")
         try:
-            oldvalue = dict.__getitem__(self, key)
-        except KeyError:
-            oldvalue = None
-        if value != oldvalue:
-            dict.__setitem__(self, key, value)
-            self.dirty = True
+            self._get_meta() # refresh cache
+            try:
+                oldvalue = dict.__getitem__(self, key)
+            except KeyError:
+                oldvalue = None
+            if value != oldvalue:
+                dict.__setitem__(self, key, value)
+                self._put_meta() # sync cache
+        finally:
+            self.wlock.release()
 
 
 #############################################################################
--- a/MoinMoin/xmlrpc/__init__.py	Thu Aug 17 12:11:01 2006 +0200
+++ b/MoinMoin/xmlrpc/__init__.py	Thu Aug 17 12:12:31 2006 +0200
@@ -577,11 +577,46 @@
 
     # methods for wiki synchronization
 
-    def xmlrpc_getDiff(self, pagename, from_rev, to_rev):
-        """ Gets the binary difference between two page revisions. See MoinMoin:WikiSyncronisation. """
+    def xmlrpc_getDiff(self, pagename, from_rev, to_rev, n_name=None):
+        """ Gets the binary difference between two page revisions.
+
+            @param pagename: unicode string qualifying the page name
+
+            @param fromRev: integer specifying the source revision. May be None to
+            refer to a virtual empty revision which leads to a diff
+            containing the whole page.
+
+            @param toRev: integer specifying the target revision. May be None to
+            refer to the current revision. If the current revision is the same
+            as fromRev, there will be a special error condition "ALREADY_CURRENT"
+
+            @param n_name: do a tag check verifying that n_name was the normalised
+            name of the last tag
+
+            If both fromRev and toRev are None, this function acts similar to getPage, i.e. it will diff("",currentRev).
+
+            @return Returns a dict:
+            * status (not a field, implicit, returned as Fault if not SUCCESS):
+             * "SUCCESS" - if the diff could be retrieved successfully
+             * "NOT_EXIST" - item does not exist
+             * "FROMREV_INVALID" - the source revision is invalid
+             * "TOREV_INVALID" - the target revision is invalid
+             * "INTERNAL_ERROR" - there was an internal error
+             * "INVALID_TAG" - the last tag does not match the supplied normalised name
+             * "ALREADY_CURRENT" - this not merely an error condition. It rather means that
+             there is no new revision to diff against which is a good thing while
+             synchronisation.
+            * current: the revision number of the current revision (not the one which was diff'ed against)
+            * diff: Binary object that transports a zlib-compressed binary diff (see bdiff.py, taken from Mercurial)
+            * conflict: if there is a conflict on the page currently
+
+        """
         from MoinMoin.util.bdiff import textdiff, compress
+        from MoinMoin.wikisync import TagStore
 
         pagename = self._instr(pagename)
+        if n_name is not None:
+            n_name = self._instr(n_name)
 
         # User may read page?
         if not self.request.user.may.read(pagename):
@@ -627,6 +662,12 @@
         if oldcontents() and oldpage.get_real_rev() == newpage.get_real_rev():
             return xmlrpclib.Fault("ALREADY_CURRENT", "There are no changes.")
 
+        if n_name is not None:
+            tags = TagStore(newpage)
+            last_tag = tags.get_last_tag()
+            if last_tag is not None and last_tag.normalised_name != n_name:
+                return xmlrpclib.Fault("INVALID_TAG", "The used tag is incorrect because the normalised name does not match.")
+
         newcontents = newcontents()
         conflict = wikiutil.containsConflictMarker(newcontents)
         diffblob = xmlrpclib.Binary(compress(textdiff(oldcontents(), newcontents)))
--- a/docs/CHANGES.aschremmer	Thu Aug 17 12:11:01 2006 +0200
+++ b/docs/CHANGES.aschremmer	Thu Aug 17 12:12:31 2006 +0200
@@ -7,20 +7,22 @@
       (currently done in Pickle files)
 
   ToDo:
-    * Tags should store the page name to recognise renaming scenarios.
     * Implement all syncronisation cases (all 3 page sets).
-    * Test with prefixes
     * Reduce round-trip times by caching queries and using MultiCall objects (coroutines?)
     * Attach the status information to the job page.
+    * Check what needs to be documented on MoinMaster.
+
     * Show tags in an action=info view?
-    * Implement a cross-site authentication system, i.e. mainly an
-      identity storage. (does OpenID make sense?)
-    * Clean up trailing whitespace.
-    * Add page locking, i.e. use the one in the new storage layer.
-    * Check what needs to be documented on MoinMaster.
+
+    * Test with prefixes
     * Search for XXX
     * Maybe refactor YYY into MoinLocalWiki
     * Remove amount of "very" in the code
+    * Clean up trailing whitespace.
+
+    * Implement a cross-site authentication system, i.e. mainly an
+      identity storage. (does OpenID make sense?)
+    * Add page locking, i.e. use the one in the new storage layer.
     * Do older tags of one wiki site have to be stored as well? Why don't we
       keep just one tag?
     * Put author names into the comment field, transmit mimetypes.
@@ -54,6 +56,7 @@
     * Fixed the MetaDict code to use locks.
     * Fixed bug in request.py that avoided showing a traceback if there was a fault
       after the first headers were sent.
+    * Fixed severe race conditions in the meta dict and the sync tags code.
 
   Other Changes:
     * Refactored conflict resolution and XMLRPC code.
@@ -91,8 +94,9 @@
 Week 29: Finished first version of the mergeChanges method. Added Tag and TagStore classes which are currently
          using pickle-based storage. Added getAllPagesEx XMLRPC method.
 Week 30: Implemented IWID support, added function to generate random strings. Added support
-         for editing the InterWikiMap in the wiki. Added locking to the PickleTagStore and the MetaDict classes. Added handling of
-         various options and detection of anonymous wikis to the SyncPages action.
+         for editing the InterWikiMap in the wiki. Added locking to the PickleTagStore and
+         the MetaDict classes. Added handling of various options and detection of anonymous
+         wikis to the SyncPages action.
 Week 31: Load the IWID and the meta dict lazily. Reworked RemotePage/SyncPage,
          fixed option handling again, refined semantics of options, introduced
          direction option, replaced "localMatch"/"remoteMatch" by "pageMatch".
@@ -105,8 +109,9 @@
          conflicts. Fixed hard to track down bugs in SyncPages. Store full IWIDs and match by
          either of both components when searching for tags. Ignore underlay pages. Filter the remote page list by
          the prefix and the pageList on the remote side. Finished the direction==DOWN mode.
-         Started designing the solutions for the other sync cases. Store and transmit the
-         normalised name.
+Week 33: Started designing the solutions for the other sync cases. Store and transmit the
+         normalised name. Implemented preliminary mime type support, only transmission of the mime type
+         and the new storage API is missing. Changed getDiff interface. Detect renamed pages (and choke :).
 
 2006-07-18: the requested daily entry is missing here, see http://moinmoin.wikiwikiweb.de/GoogleSoc2006/BetterProgress
 2006-07-19: the requested daily entry is missing here, see http://moinmoin.wikiwikiweb.de/GoogleSoc2006/BetterProgress