changeset 1102:71a996462a09

merge main, remove my solution to the header stuff
author Thomas Waldmann <tw AT waldmann-edv DOT de>
date Sun, 30 Jul 2006 22:24:16 +0200
parents 0a3a7ca70e48 (current diff) a60c236bf0ac (diff)
children fe874e9e72c7
files MoinMoin/request/__init__.py
diffstat 12 files changed, 232 insertions(+), 83 deletions(-) [+]
line wrap: on
line diff
--- a/MoinMoin/action/SyncPages.py	Sun Jul 30 22:11:38 2006 +0200
+++ b/MoinMoin/action/SyncPages.py	Sun Jul 30 22:24:16 2006 +0200
@@ -22,15 +22,17 @@
 
 
 from MoinMoin import wikiutil, config, user
-from MoinMoin.packages import unpackLine
+from MoinMoin.packages import unpackLine, packLine
 from MoinMoin.PageEditor import PageEditor
 from MoinMoin.Page import Page
-from MoinMoin.wikidicts import Dict
+from MoinMoin.wikidicts import Dict, Group
 
 
 class ActionStatus(Exception): pass
 
+class UnsupportedWikiException(Exception): pass
 
+# Move these classes to MoinMoin.wikisync
 class RemotePage(object):
     """ This class represents a page in (another) wiki. """
     def __init__(self, name, revno):
@@ -38,11 +40,23 @@
         self.revno = revno
 
     def __repr__(self):
-        return repr(u"%s<%i>" % (self.name, self.revno))
+        return repr("<Remote Page %r>" % unicode(self))
+
+    def __unicode__(self):
+        return u"%s<%i>" % (self.name, self.revno)
 
     def __lt__(self, other):
         return self.name < other.name
 
+    def __eq__(self, other):
+        if not isinstance(other, RemotePage):
+            return false
+        return self.name == other.name
+
+    def filter(cls, rp_list, regex):
+        return [x for x in rp_list if regex.match(x.name)]
+    filter = classmethod(filter)
+
 
 class RemoteWiki(object):
     """ This class should be the base for all implementations of remote wiki
@@ -65,18 +79,33 @@
     """ Used for MoinMoin wikis reachable via XMLRPC. """
     def __init__(self, request, interwikiname):
         self.request = request
+        _ = self.request.getText
         wikitag, wikiurl, wikitail, wikitag_bad = wikiutil.resolve_wiki(self.request, '%s:""' % (interwikiname, ))
         self.wiki_url = wikiutil.mapURL(self.request, wikiurl)
         self.valid = not wikitag_bad
         self.xmlrpc_url = self.wiki_url + "?action=xmlrpc2"
+        if not self.valid:
+            self.connection = None
+            return
         self.connection = self.createConnection()
-        # XXX add version and interwiki name checking!
+        version = self.connection.getMoinVersion()
+        if not isinstance(version, (tuple, list)):
+            raise UnsupportedWikiException(_("The remote version of MoinMoin is too old, the version 1.6 is required at least."))
+        remote_interwikiname = self.getInterwikiName()
+        remote_iwid = self.connection.interwikiName()[1]
+        self.is_anonymous = remote_interwikiname is None
+        if not self.is_anonymous and interwikiname != remote_interwikiname:
+            raise UnsupportedWikiException(_("The remote wiki uses a different InterWiki name (%(remotename)s)"
+                                             " internally than you specified (%(localname)s).") % {
+                "remotename": remote_interwikiname, "localname": interwikiname})
+
+        if self.is_anonymous:
+            self.iwid_full = packLine([remote_iwid])
+        else:
+            self.iwid_full = packLine([remote_iwid, interwikiname])
 
     def createConnection(self):
-        if self.valid:
-            return xmlrpclib.ServerProxy(self.xmlrpc_url, allow_none=True)
-        else:
-            return None
+        return xmlrpclib.ServerProxy(self.xmlrpc_url, allow_none=True, verbose=True)
 
     # Methods implementing the RemoteWiki interface
     def getInterwikiName(self):
@@ -95,13 +124,21 @@
     def __init__(self, request):
         self.request = request
 
+    def getGroupItems(self, group_list):
+        pages = []
+        for group_pagename in group_list:
+            pages.extend(Group(self.request, group_pagename).members())
+        return [self.createRemotePage(x) for x in pages]
+
+    def createRemotePage(self, page_name):
+        return RemotePage(page_name, Page(self.request, page_name).get_real_rev())
+
     # Methods implementing the RemoteWiki interface
     def getInterwikiName(self):
         return self.request.cfg.interwikiname
 
     def getPages(self):
-        l_pages = [[x, Page(self.request, x).get_real_rev()] for x in self.request.rootpage.getPageList(exists=0)]
-        return [RemotePage(unicode(name), revno) for name, revno in l_pages]
+        return [self.createRemotePage(x) for x in self.request.rootpage.getPageList(exists=0)]
 
     def __repr__(self):
         return "<MoinLocalWiki>"
@@ -139,7 +176,8 @@
 
         # merge the pageList case into the remoteMatch case
         if params["pageList"] is not None:
-            params["remoteMatch"] = u'|'.join([r'^%s$' % re.escape(name) for name in params["pageList"]])
+            params["localMatch"] = params["remoteMatch"] = u'|'.join([r'^%s$' % re.escape(name)
+                                                                      for name in params["pageList"]])
 
         if params["localMatch"] is not None:
             params["localMatch"] = re.compile(params["localMatch"], re.U)
@@ -166,8 +204,11 @@
             if not params["remoteWiki"]:
                 raise ActionStatus(_("Incorrect parameters. Please supply at least the ''remoteWiki'' parameter."))
 
-            remote = MoinRemoteWiki(self.request, params["remoteWiki"])
             local = MoinLocalWiki(self.request)
+            try:
+                remote = MoinRemoteWiki(self.request, params["remoteWiki"])
+            except UnsupportedWikiException, (msg, ):
+                raise ActionStatus(msg)
 
             if not remote.valid:
                 raise ActionStatus(_("The ''remoteWiki'' is unknown."))
@@ -183,10 +224,32 @@
         
         r_pages = remote.getPages()
         l_pages = local.getPages()
+        print "Got %i local, %i remote pages" % (len(l_pages), len(r_pages))
+        if params["localMatch"]:
+            l_pages = RemotePage.filter(l_pages, params["localMatch"])
+        if params["remoteMatch"]:
+            print "Filtering remote pages using regex %r" % params["remoteMatch"].pattern
+            r_pages = RemotePage.filter(r_pages, params["remoteMatch"])
+        print "After filtering: Got %i local, %i remote pages" % (len(l_pages), len(r_pages))
 
+        if params["groupList"]:
+            pages_from_groupList = local.getGroupItems(params["groupList"])
+            if not params["localMatch"]:
+                l_pages = pages_from_groupList
+            else:
+                l_pages += pages_from_groupList
+
+        l_pages = set(l_pages)
+        r_pages = set(r_pages)
+        
+        # XXX this is not correct if matching is active
+        remote_but_not_local = r_pages - l_pages
+        local_but_not_remote = l_pages - r_pages
+        
         # some initial test code
-        r_new_pages = u",".join(set([repr(x) for x in r_pages]) - set([repr(x) for x in l_pages]))
-        raise ActionStatus("These pages are in the remote wiki, but not local: " + r_new_pages)
+        r_new_pages = u", ".join([unicode(x) for x in remote_but_not_local])
+        l_new_pages = u", ".join([unicode(x) for x in local_but_not_remote])
+        raise ActionStatus("These pages are in the remote wiki, but not local: " + r_new_pages + "<br>These pages are in the local wiki, but not in the remote one: " + l_new_pages)
 
 
 def execute(pagename, request):
--- a/MoinMoin/config/multiconfig.py	Sun Jul 30 22:11:38 2006 +0200
+++ b/MoinMoin/config/multiconfig.py	Sun Jul 30 22:24:16 2006 +0200
@@ -12,8 +12,9 @@
 import sys
 import time
 
-from MoinMoin import error, util
+from MoinMoin import error, util, wikiutil
 import MoinMoin.auth as authmodule
+from MoinMoin.packages import packLine
 
 _url_re_cache = None
 _farmconfig_mtime = None
@@ -552,36 +553,30 @@
         # check if mail is possible and set flag:
         self.mail_enabled = (self.mail_smarthost is not None or self.mail_sendmail is not None) and self.mail_from
         
+        self.meta_dict = wikiutil.MetaDict(os.path.join(data_dir, 'meta'))
+
         # interwiki ID processing
         self.load_IWID()
 
     def load_IWID(self):
         """ Loads the InterWikiID of this instance. It is used to identify the instance
             globally.
-            The data file can be found in data/IWID
             The IWID is available as cfg.iwid
             The full IWID containing the interwiki name is available as cfg.iwid_full
         """
-        iwid_path = os.path.join(self.data_dir, "IWID")
 
         try:
-            iwid_file = file(iwid_path, "rb")
-            iwid = iwid_file.readline().strip()
-            iwid_file.close()
-        except IOError:
-            iwid = None
-
-        if iwid is None:
+            iwid = self.meta_dict['IWID']
+        except KeyError:
             iwid = util.random_string(16).encode("hex") + "-" + str(int(time.time()))
-            iwid_file = file(iwid_path, "wb")
-            iwid_file.write(iwid)
-            iwid_file.close()
+            self.meta_dict['IWID'] = iwid
+            self.meta_dict.sync()
 
         self.iwid = iwid
         if self.interwikiname is not None:
-            self.iwid_full = iwid + ":" + self.interwikiname
+            self.iwid_full = packLine([iwid, self.interwikiname])
         else:
-            self.iwid_full = iwid
+            self.iwid_full = packLine([iwid])
 
     def _config_check(self):
         """ Check namespace and warn about unknown names
--- a/MoinMoin/parser/text_rst.py	Sun Jul 30 22:11:38 2006 +0200
+++ b/MoinMoin/parser/text_rst.py	Sun Jul 30 22:24:16 2006 +0200
@@ -386,7 +386,7 @@
                 if refuri.find('#') != -1:
                     pagename, anchor = refuri.split('#', 1)
                     anchor = '#' + anchor
-                page = MoinMoin.Page.Page(self.request, pagename)
+                page = Page(self.request, pagename)
                 node['refuri'] = page.url(self.request) + anchor
                 if not page.exists():
                     node['classes'].append('nonexistent')
--- a/MoinMoin/request/__init__.py	Sun Jul 30 22:11:38 2006 +0200
+++ b/MoinMoin/request/__init__.py	Sun Jul 30 22:24:16 2006 +0200
@@ -17,7 +17,11 @@
 
 class MoinMoinFinish(Exception):
     """ Raised to jump directly to end of run() function, where finish is called """
-    pass
+
+
+class HeadersAlreadySentException(Exception):
+    """ Is raised if the headers were already sent when emit_http_headers is called."""
+
 
 # Timing ---------------------------------------------------------------
 
@@ -1122,7 +1126,7 @@
         """
         self.emit_http_headers(more_headers)
 
-    def emit_http_headers(self, more_headers=[], dont_raise=False):
+    def emit_http_headers(self, more_headers=[]):
         """ emit http headers after some preprocessing / checking
 
             Makes sure we only emit headers once. If dont_raise is True,
@@ -1146,8 +1150,8 @@
         # Send headers only once
         sent_headers = getattr(self, 'sent_headers', 0)
         self.sent_headers = sent_headers + 1
-        if sent_headers and not dont_raise:
-            raise error.InternalError("emit_http_headers called multiple times(%d)! Headers: %r" % (sent_headers, all_headers))
+        if sent_headers:
+            raise HeadersAlreadySentException("emit_http_headers called multiple (%d) times! Headers: %r" % (sent_headers, headers))
         #else:
         #    self.log("Notice: emit_http_headers called first time. Headers: %r" % all_headers)
 
@@ -1222,7 +1226,9 @@
         @param err: Exception instance or subclass.
         """
         self.failed = 1 # save state for self.run()            
-        self.emit_http_headers(['Status: 500 MoinMoin Internal Error'], dont_raise=True)
+        # we should not generate the headers two times
+        if not getattr(self, 'sent_headers', 0):
+            self.emit_http_headers(['Status: 500 MoinMoin Internal Error'])
         self.log('%s: %s' % (err.__class__.__name__, str(err)))
         from MoinMoin import failure
         failure.handle(self)
--- a/MoinMoin/support/cgitb.py	Sun Jul 30 22:11:38 2006 +0200
+++ b/MoinMoin/support/cgitb.py	Sun Jul 30 22:24:16 2006 +0200
@@ -70,6 +70,11 @@
 __UNDEF__ = [] # a special sentinel object
 
 
+class HiddenObject:
+    def __repr__(self):
+        return "<HIDDEN>"
+HiddenObject = HiddenObject()
+
 class HTMLFormatter:
     """ Minimal html formatter """
     
@@ -295,7 +300,10 @@
             if ttype == tokenize.NAME and token not in keyword.kwlist:
                 if lasttoken == '.':
                     if parent is not __UNDEF__:
-                        value = getattr(parent, token, __UNDEF__)
+                        if self.unsafe_name(token):
+                            value = HiddenObject
+                        else:
+                            value = getattr(parent, token, __UNDEF__)
                         vars.append((prefix + token, prefix, value))
                 else:
                     where, value = self.lookup(token)
@@ -324,8 +332,12 @@
                 value = builtins.get(name, __UNDEF__)
             else:
                 value = getattr(builtins, name, __UNDEF__)
+        if self.unsafe_name(name):
+            value = HiddenObject
         return scope, value
 
+    def unsafe_name(self, name):
+        return name in self.frame.f_globals.get("unsafe_names", ())
 
 class View:
     """ Traceback view """
--- a/MoinMoin/support/thfcgi.py	Sun Jul 30 22:11:38 2006 +0200
+++ b/MoinMoin/support/thfcgi.py	Sun Jul 30 22:24:16 2006 +0200
@@ -327,17 +327,18 @@
         self.have_finished = 1
 
         # stderr
-        self.err.reset()
-        rec = Record()
-        rec.rec_type = FCGI_STDERR
-        rec.req_id = self.req_id
-        data = self.err.read()
-        while data:
-            chunk, data = self.getNextChunk(data)
-            rec.content = chunk
-            rec.writeRecord(self.conn)
-        rec.content = ""
-        rec.writeRecord(self.conn)      # Terminate stream
+        if self.err.tell(): # just send err record if there is data on the err stream
+            self.err.reset()
+            rec = Record()
+            rec.rec_type = FCGI_STDERR
+            rec.req_id = self.req_id
+            data = self.err.read()
+            while data:
+                chunk, data = self.getNextChunk(data)
+                rec.content = chunk
+                rec.writeRecord(self.conn)
+            rec.content = ""
+            rec.writeRecord(self.conn)      # Terminate stream
 
         # stdout
         self.out.reset()
--- a/MoinMoin/user.py	Sun Jul 30 22:11:38 2006 +0200
+++ b/MoinMoin/user.py	Sun Jul 30 22:24:16 2006 +0200
@@ -6,6 +6,9 @@
     @license: GNU GPL, see COPYING for details.
 """
 
+# add names here to hide them in the cgitb traceback
+unsafe_names = ("id", "key", "val", "user_data", "enc_password")
+
 import os, time, sha, codecs
 
 try:
@@ -289,9 +292,9 @@
             self.language = 'en'
 
     def __repr__(self):
-        return "<%s.%s at 0x%x name:%r id:%s valid:%r>" % (
+        return "<%s.%s at 0x%x name:%r valid:%r>" % (
             self.__class__.__module__, self.__class__.__name__,
-            id(self), self.name, self.id, self.valid)
+            id(self), self.name, self.valid)
 
     def make_id(self):
         """ make a new unique user id """
--- a/MoinMoin/util/lock.py	Sun Jul 30 22:11:38 2006 +0200
+++ b/MoinMoin/util/lock.py	Sun Jul 30 22:24:16 2006 +0200
@@ -11,9 +11,9 @@
 
 # Temporary debugging aid, to be replaced with system wide debuging
 # in release 3000.
-import sys
-def log(msg):
-    sys.stderr.write('[%s] lock: %s' % (time.asctime(), msg))
+#import sys
+#def log(msg):
+#    sys.stderr.write('[%s] lock: %s' % (time.asctime(), msg))
 
 
 class Timer:
--- a/MoinMoin/wikisync.py	Sun Jul 30 22:11:38 2006 +0200
+++ b/MoinMoin/wikisync.py	Sun Jul 30 22:24:16 2006 +0200
@@ -6,11 +6,15 @@
     @license: GNU GPL, see COPYING for details.
 """
 
+import os
+
 try:
     import cPickle as pickle
 except ImportError:
     import pickle
 
+from MoinMoin.util import lock
+
 
 class Tag(object):
     """ This class is used to store information about merging state. """
@@ -64,23 +68,36 @@
         
         self.page = page
         self.filename = page.getPagePath('synctags', use_underlay=0, check_create=1, isfile=1)
+        lock_dir = os.path.join(page.getPagePath('cache', use_underlay=0, check_create=1), '__taglock__')
+        self.rlock = lock.ReadLock(lock_dir, 60.0)
+        self.wlock = lock.WriteLock(lock_dir, 60.0)
         self.load()
 
     def load(self):
         """ Loads the tags from the data file. """
+        if not self.rlock.acquire(3.0):
+            raise EnvironmentError("Could not lock in PickleTagStore")
         try:
-            datafile = file(self.filename, "rb")
-        except IOError:
-            self.tags = []
-        else:
-            self.tags = pickle.load(datafile)
-            datafile.close()
+            try:
+                datafile = file(self.filename, "rb")
+            except IOError:
+                self.tags = []
+            else:
+                self.tags = pickle.load(datafile)
+                datafile.close()
+        finally:
+            self.rlock.release()
     
     def commit(self):
         """ Writes the memory contents to the data file. """
-        datafile = file(self.filename, "wb")
-        pickle.dump(self.tags, datafile, protocol=pickle.HIGHEST_PROTOCOL)
-        datafile.close()
+        if not self.wlock.acquire(3.0):
+            raise EnvironmentError("Could not lock in PickleTagStore")
+        try:
+            datafile = file(self.filename, "wb")
+            pickle.dump(self.tags, datafile, protocol=pickle.HIGHEST_PROTOCOL)
+            datafile.close()
+        finally:
+            self.wlock.release()
 
     # public methods ---------------------------------------------------
     def add(self, **kwargs):
--- a/MoinMoin/wikiutil.py	Sun Jul 30 22:11:38 2006 +0200
+++ b/MoinMoin/wikiutil.py	Sun Jul 30 22:24:16 2006 +0200
@@ -6,11 +6,16 @@
     @license: GNU GPL, see COPYING for details.
 """
 
-import os, re, urllib, cgi
-import codecs, types
+import cgi
+import codecs
+import os
+import re
+import time
+import types
+import urllib
 
 from MoinMoin import util, version, config
-from MoinMoin.util import pysupport, filesys
+from MoinMoin.util import pysupport, filesys, lock
 
 # Exceptions
 class InvalidFileNameError(Exception):
@@ -403,13 +408,18 @@
                 ]
 
 class MetaDict(dict):
-    """ store meta informations as a dict """
+    """ store meta informations as a dict.
+    XXX It is not thread-safe, add locks!
+    """
     def __init__(self, metafilename):
         """ create a MetaDict from metafilename """
         dict.__init__(self)
         self.metafilename = metafilename
         self.dirty = False
         self.loaded = False
+        lock_dir = os.path.normpath(os.path.join(metafilename, '..', 'cache', '__metalock__'))
+        self.rlock = lock.ReadLock(lock_dir, 60.0)
+        self.wlock = lock.WriteLock(lock_dir, 60.0)
 
     def _get_meta(self):
         """ get the meta dict from an arbitrary filename.
@@ -417,11 +427,16 @@
             @param metafilename: the name of the file to read
             @return: dict with all values or {} if empty or error
         """
-        # XXX what does happen if the metafile is being written to in another process?
+
         try:
-            metafile = codecs.open(self.metafilename, "r", "utf-8")
-            meta = metafile.read() # this is much faster than the file's line-by-line iterator
-            metafile.close()
+            if not self.rlock.acquire(3.0):
+                raise EnvironmentError("Could not lock in MetaDict")
+            try:
+                metafile = codecs.open(self.metafilename, "r", "utf-8")
+                meta = metafile.read() # this is much faster than the file's line-by-line iterator
+                metafile.close()
+            finally:
+                self.rlock.release()
         except IOError:
             meta = u''
         for line in meta.splitlines():
@@ -443,16 +458,21 @@
             if key in INTEGER_METAS:
                 value = str(value)
             meta.append("%s: %s" % (key, value))
-        meta = '\n'.join(meta)
-        # XXX what does happen if the metafile is being read or written to in another process?
-        metafile = codecs.open(self.metafilename, "w", "utf-8")
-        metafile.write(meta)
-        metafile.close()
+        meta = '\r\n'.join(meta)
+
+        if not self.wlock.acquire(5.0):
+            raise EnvironmentError("Could not lock in MetaDict")
+        try:
+            metafile = codecs.open(self.metafilename, "w", "utf-8")
+            metafile.write(meta)
+            metafile.close()
+        finally:
+            self.wlock.release()
         filesys.chmod(self.metafilename, 0666 & config.umask)
         self.dirty = False
 
     def sync(self, mtime_usecs=None):
-        """ sync the in-memory dict to disk (if dirty) """
+        """ sync the in-memory dict to the persistent store (if dirty) """
         if self.dirty:
             if not mtime_usecs is None:
                 self.__setitem__('mtime', str(mtime_usecs))
@@ -469,6 +489,8 @@
                 raise
 
     def __setitem__(self, key, value):
+        """ Sets a dictionary entry. You actually have to call sync to write it
+            to the persistent store. """
         try:
             oldvalue = dict.__getitem__(self, key)
         except KeyError:
@@ -483,9 +505,16 @@
 #############################################################################
 def load_wikimap(request):
     """ load interwiki map (once, and only on demand) """
+
+    now = int(time.time())
+
     try:
         _interwiki_list = request.cfg._interwiki_list
+        if request.cfg._interwiki_ts + (3*60) < now: # 3 minutes caching time
+            raise AttributeError # refresh cache
     except AttributeError:
+        from MoinMoin.Page import Page
+
         _interwiki_list = {}
         lines = []
 
@@ -493,7 +522,7 @@
         # precedence over the shared one, and is thus read AFTER
         # the shared one
         intermap_files = request.cfg.shared_intermap
-        if not isinstance(intermap_files, type([])):
+        if not isinstance(intermap_files, list):
             intermap_files = [intermap_files]
         intermap_files.append(os.path.join(request.cfg.data_dir, "intermap.txt"))
 
@@ -503,6 +532,9 @@
                 lines.extend(f.readlines())
                 f.close()
 
+        # add the contents of the InterWikiMap page
+        lines += Page(request, "InterWikiMap").get_raw_body().splitlines()
+
         for line in lines:
             if not line or line[0] == '#': continue
             try:
@@ -522,6 +554,7 @@
 
         # save for later
         request.cfg._interwiki_list = _interwiki_list
+        request.cfg._interwiki_ts = now
 
     return _interwiki_list
 
--- a/MoinMoin/xmlrpc/__init__.py	Sun Jul 30 22:11:38 2006 +0200
+++ b/MoinMoin/xmlrpc/__init__.py	Sun Jul 30 22:24:16 2006 +0200
@@ -589,13 +589,13 @@
         if from_rev is None:
             oldcontents = lambda: ""
         else:
-            oldpage = Page(request, pagename, rev=from_rev)
+            oldpage = Page(self.request, pagename, rev=from_rev)
             oldcontents = lambda: oldpage.get_raw_body_str()
 
         if to_rev is None:
             newcontents = lambda: currentpage.get_raw_body()
         else:
-            newpage = Page(request, pagename, rev=to_rev)
+            newpage = Page(self.request, pagename, rev=to_rev)
             newcontents = lambda: newpage.get_raw_body_str()
             newrev = newpage.get_real_rev()
 
--- a/docs/CHANGES.aschremmer	Sun Jul 30 22:11:38 2006 +0200
+++ b/docs/CHANGES.aschremmer	Sun Jul 30 22:24:16 2006 +0200
@@ -2,7 +2,7 @@
 ===============================
 
   Known main issues:
-    * How will we store tags?
+    * How will we store tags? (Metadata support would be handy)
     * How to handle renames/deletes?
     * How to handle colliding/empty interwiki names?
 
@@ -12,7 +12,7 @@
       identity storage.
     * Clean up trailing whitespace.
     * Add page locking, i.e. use the one in the new storage layer.
-    * How about using unique IDs that just derive from the interwikiname?
+    * Check what needs to be documented on MoinMaster.
 
   New Features:
     * XMLRPC method to return the Moin version
@@ -27,11 +27,19 @@
     * XMLRPC method to get the pagelist in a special way (revnos,
       no system pages etc.)
     * IWID support - i.e. every instance has a unique ID
+    * InterWiki page editable in the wiki
 
   Bugfixes (only stuff that is buggy in moin/1.6 main branch):
     * Conflict resolution fixes. (merged into main)
-    * Python 2.5 compatibility fixes in the Page caching logic
-    * sre pickle issues in the wikidicts code
+    * Python 2.5 compatibility fixes in the Page caching logic (merged)
+    * sre pickle issues in the wikidicts code (merged)
+    * cgitb can hide particular names, this avoids information leaks
+      if the user files cannot be parsed for example
+    * Fixed User.__repr__ - it is insane to put the ID in there
+    * Worked around the FastCGI problem on Lighttpd: empty lines in the error log, thanks to Jay Soffian
+    * Fixed the MetaDict code to use locks.
+    * Fixed bug in request.py that avoided showing a traceback if there was a fault
+      after the first headers were sent.
 
   Other Changes:
     * Refactored conflict resolution and XMLRPC code.
@@ -68,11 +76,22 @@
          script written by Stefan Merten.
 Week 29: Finished first version of the mergeChanges method. Added Tag and TagStore classes which are currently
          using pickle-based storage. Added getAllPagesEx XMLRPC method.
-Week 30: Implemented IWID support, added function to generate random strings.
+Week 30: Implemented IWID support, added function to generate random strings. Added support
+         for editing the InterWikiMap in the wiki. Added locking to the PickleTagStore and the MetaDict classes. Added handling of
+         various options and detection of anonymous wikis to the SyncPages action.
 
 2006-07-18: the requested daily entry is missing here, see http://moinmoin.wikiwikiweb.de/GoogleSoc2006/BetterProgress
 2006-07-19: the requested daily entry is missing here, see http://moinmoin.wikiwikiweb.de/GoogleSoc2006/BetterProgress
 2006-07-20: the requested daily entry is missing here, see http://moinmoin.wikiwikiweb.de/GoogleSoc2006/BetterProgress
+2006-07-21: the requested daily entry is missing here, see http://moinmoin.wikiwikiweb.de/GoogleSoc2006/BetterProgress
+2006-07-22: the requested daily entry is missing here, see http://moinmoin.wikiwikiweb.de/GoogleSoc2006/BetterProgress
+2006-07-23: no work on SOC project -- a Sunday
+2006-07-24: the requested daily entry is missing here, see http://moinmoin.wikiwikiweb.de/GoogleSoc2006/BetterProgress
+2006-07-25: the requested daily entry is missing here, see http://moinmoin.wikiwikiweb.de/GoogleSoc2006/BetterProgress
+2006-07-26: student didnt work on project
+2006-07-27: student didnt work on project
+2006-07-28: the requested daily entry is missing here, see http://moinmoin.wikiwikiweb.de/GoogleSoc2006/BetterProgress
+2006-07-29: the requested daily entry is missing here, see http://moinmoin.wikiwikiweb.de/GoogleSoc2006/BetterProgress
 
 Time plan
 =========