changeset 1212:84f94820d612

merge with main
author Franz Pletz <fpletz AT franz-pletz DOT org>
date Wed, 02 Aug 2006 17:07:09 +0200
parents d028d37e7105 (current diff) 72a208bfe579 (diff)
children 20908b1eccb2
files MoinMoin/config/multiconfig.py
diffstat 11 files changed, 309 insertions(+), 210 deletions(-) [+]
line wrap: on
line diff
--- a/MoinMoin/__init__.py	Wed Aug 02 17:06:24 2006 +0200
+++ b/MoinMoin/__init__.py	Wed Aug 02 17:07:09 2006 +0200
@@ -1,6 +1,6 @@
 # -*- coding: iso-8859-1 -*-
 """
-MoinMoin Version 1.6.0alpha 61142a50c41b+ tip
+MoinMoin Version 1.6.0alpha b27d720fbc8e tip
 
 @copyright: 2000-2006 by Jürgen Hermann <jh@web.de>
 @license: GNU GPL, see COPYING for details.
--- a/MoinMoin/action/AttachFile.py	Wed Aug 02 17:06:24 2006 +0200
+++ b/MoinMoin/action/AttachFile.py	Wed Aug 02 17:07:09 2006 +0200
@@ -29,7 +29,7 @@
 import os, time, zipfile
 from MoinMoin import config, user, util, wikiutil, packages
 from MoinMoin.Page import Page
-from MoinMoin.util import filesys
+from MoinMoin.util import filesys, timefuncs
 
 action_name = __name__.split('.')[-1]
 
@@ -651,18 +651,22 @@
     if not filename:
         return # error msg already sent in _access_file
 
-    mt = wikiutil.MimeType(filename=filename)
+    timestamp = timefuncs.formathttpdate(int(os.path.getmtime(fpath)))
+    if request.if_modified_since == timestamp:
+        request.emit_http_headers(["Status: 304 Not modified"])
+    else:
+        mt = wikiutil.MimeType(filename=filename)
+        request.emit_http_headers([
+            "Content-Type: %s" % mt.content_type(),
+            "Last-Modified: %s" % timestamp, # TODO maybe add a short Expires: header here?
+            "Content-Length: %d" % os.path.getsize(fpath),
+            # TODO: fix the encoding here, plain 8 bit is not allowed according to the RFCs
+            # There is no solution that is compatible to IE except stripping non-ascii chars
+            "Content-Disposition: attachment; filename=\"%s\"" % filename.encode(config.charset),
+        ])
 
-    request.emit_http_headers([
-        "Content-Type: %s" % mt.content_type(),
-        "Content-Length: %d" % os.path.getsize(fpath),
-        # TODO: fix the encoding here, plain 8 bit is not allowed according to the RFCs
-        # There is no solution that is compatible to IE except stripping non-ascii chars
-        "Content-Disposition: attachment; filename=\"%s\"" % filename.encode(config.charset),
-    ])
-
-    # send data
-    shutil.copyfileobj(open(fpath, 'rb'), request, 8192)
+        # send data
+        shutil.copyfileobj(open(fpath, 'rb'), request, 8192)
 
 def install_package(pagename, request):
     _ = request.getText
--- a/MoinMoin/action/SyncPages.py	Wed Aug 02 17:06:24 2006 +0200
+++ b/MoinMoin/action/SyncPages.py	Wed Aug 02 17:07:09 2006 +0200
@@ -27,36 +27,81 @@
 from MoinMoin.Page import Page
 from MoinMoin.wikidicts import Dict, Group
 
+# directions
+UP, DOWN, BOTH = range(3)
+directions_map = {"up": UP, "down": DOWN, "both": BOTH}
 
 class ActionStatus(Exception): pass
 
 class UnsupportedWikiException(Exception): pass
 
 # Move these classes to MoinMoin.wikisync
-class RemotePage(object):
+class SyncPage(object):
     """ This class represents a page in (another) wiki. """
-    def __init__(self, name, revno):
+    def __init__(self, name, local_rev=None, remote_rev=None):
         self.name = name
-        self.revno = revno
+        self.local_rev = local_rev
+        self.remote_rev = remote_rev
+        assert local_rev or remote_rev
 
     def __repr__(self):
         return repr("<Remote Page %r>" % unicode(self))
 
     def __unicode__(self):
-        return u"%s<%i>" % (self.name, self.revno)
+        return u"%s<%r:%r>" % (self.name, self.local_rev, self.remote_rev)
 
     def __lt__(self, other):
         return self.name < other.name
 
+    def __hash__(self):
+        return hash(self.name)
+
     def __eq__(self, other):
-        if not isinstance(other, RemotePage):
+        if not isinstance(other, SyncPage):
             return false
         return self.name == other.name
 
-    def filter(cls, rp_list, regex):
-        return [x for x in rp_list if regex.match(x.name)]
+    def filter(cls, sp_list, func):
+        return [x for x in sp_list if func(x.name)]
     filter = classmethod(filter)
 
+    def merge(cls, local_list, remote_list):
+        # map page names to SyncPage objects :-)
+        d = dict(zip(local_list, local_list))
+        for sp in remote_list:
+            if sp in d:
+                d[sp].remote_rev = sp.remote_rev
+            else:
+                d[sp] = sp
+        return d.keys()
+    merge = classmethod(merge)
+
+    def is_only_local(self):
+        return not self.remote_rev
+
+    def is_only_remote(self):
+        return not self.local_rev
+
+    def is_local_and_remote(self):
+        return self.local_rev and self.remote_rev
+
+    def iter_local_only(cls, sp_list):
+        for x in sp_list:
+            if x.is_only_local():
+                yield x
+    iter_local_only = classmethod(iter_local_only)
+
+    def iter_remote_only(cls, sp_list):
+        for x in sp_list:
+            if x.is_only_remote():
+                yield x
+    iter_remote_only = classmethod(iter_remote_only)
+
+    def iter_local_and_remote(cls, sp_list):
+        for x in sp_list:
+            if x.is_local_and_remote():
+                yield x
+    iter_local_and_remote = classmethod(iter_local_and_remote)
 
 class RemoteWiki(object):
     """ This class should be the base for all implementations of remote wiki
@@ -71,7 +116,7 @@
         return NotImplemented
 
     def getPages(self):
-        """ Returns a list of RemotePage instances. """
+        """ Returns a list of SyncPage instances. """
         return NotImplemented
 
 
@@ -97,7 +142,7 @@
         if not self.is_anonymous and interwikiname != remote_interwikiname:
             raise UnsupportedWikiException(_("The remote wiki uses a different InterWiki name (%(remotename)s)"
                                              " internally than you specified (%(localname)s).") % {
-                "remotename": remote_interwikiname, "localname": interwikiname})
+                "remotename": wikiutil.escape(remote_interwikiname), "localname": wikiutil.escape(interwikiname)})
 
         if self.is_anonymous:
             self.iwid_full = packLine([remote_iwid])
@@ -113,7 +158,7 @@
 
     def getPages(self):
         pages = self.connection.getAllPagesEx({"include_revno": True, "include_deleted": True})
-        return [RemotePage(unicode(name), revno) for name, revno in pages]
+        return [SyncPage(unicode(name), remote_rev=revno) for name, revno in pages]
 
     def __repr__(self):
         return "<MoinRemoteWiki wiki_url=%r valid=%r>" % (self.wiki_url, self.valid)
@@ -128,17 +173,17 @@
         pages = []
         for group_pagename in group_list:
             pages.extend(Group(self.request, group_pagename).members())
-        return [self.createRemotePage(x) for x in pages]
+        return [self.createSyncPage(x) for x in pages]
 
-    def createRemotePage(self, page_name):
-        return RemotePage(page_name, Page(self.request, page_name).get_real_rev())
+    def createSyncPage(self, page_name):
+        return SyncPage(page_name, local_rev=Page(self.request, page_name).get_real_rev())
 
     # Methods implementing the RemoteWiki interface
     def getInterwikiName(self):
         return self.request.cfg.interwikiname
 
     def getPages(self):
-        return [self.createRemotePage(x) for x in self.request.rootpage.getPageList(exists=0)]
+        return [self.createSyncPage(x) for x in self.request.rootpage.getPageList(exists=0)]
 
     def __repr__(self):
         return "<MoinLocalWiki>"
@@ -155,10 +200,10 @@
             "remotePrefix": "",
             "localPrefix": "",
             "remoteWiki": "",
-            "localMatch": None,
-            "remoteMatch": None,
+            "pageMatch": None,
             "pageList": None,
             "groupList": None,
+            "direction": "foo", # is defaulted below
         }
 
         options.update(Dict(self.request, self.pagename).get_dict())
@@ -169,21 +214,25 @@
         if options["groupList"] is not None:
             options["groupList"] = unpackLine(options["groupList"], ",")
 
+        options["direction"] = directions_map.get(options["direction"], BOTH)
+
         return options
 
     def fix_params(self, params):
         """ Does some fixup on the parameters. """
 
-        # merge the pageList case into the remoteMatch case
+        # merge the pageList case into the pageMatch case
         if params["pageList"] is not None:
-            params["localMatch"] = params["remoteMatch"] = u'|'.join([r'^%s$' % re.escape(name)
-                                                                      for name in params["pageList"]])
+            params["pageMatch"] = u'|'.join([r'^%s$' % re.escape(name)
+                                             for name in params["pageList"]])
+            del params["pageList"]
 
-        if params["localMatch"] is not None:
-            params["localMatch"] = re.compile(params["localMatch"], re.U)
-        
-        if params["remoteMatch"] is not None:
-            params["remoteMatch"] = re.compile(params["remoteMatch"], re.U)
+        if params["pageMatch"] is not None:
+            params["pageMatch"] = re.compile(params["pageMatch"], re.U)
+
+        # we do not support matching or listing pages if there is a group of pages
+        if params["groupList"]:
+            params["pageMatch"] = None
 
         return params
 
@@ -196,7 +245,6 @@
 
         params = self.fix_params(self.parse_page())
 
-
         try:
             if not self.request.cfg.interwikiname:
                 raise ActionStatus(_("Please set an interwikiname in your wikiconfig (see HelpOnConfiguration) to be able to use this action."))
@@ -224,32 +272,32 @@
         
         r_pages = remote.getPages()
         l_pages = local.getPages()
-        print "Got %i local, %i remote pages" % (len(l_pages), len(r_pages))
-        if params["localMatch"]:
-            l_pages = RemotePage.filter(l_pages, params["localMatch"])
-        if params["remoteMatch"]:
-            print "Filtering remote pages using regex %r" % params["remoteMatch"].pattern
-            r_pages = RemotePage.filter(r_pages, params["remoteMatch"])
-        print "After filtering: Got %i local, %i remote pages" % (len(l_pages), len(r_pages))
 
         if params["groupList"]:
-            pages_from_groupList = local.getGroupItems(params["groupList"])
-            if not params["localMatch"]:
-                l_pages = pages_from_groupList
-            else:
-                l_pages += pages_from_groupList
+            pages_from_groupList = set(local.getGroupItems(params["groupList"]))
+            r_pages = SyncPage.filter(r_pages, pages_from_groupList.__contains__)
+            l_pages = SyncPage.filter(l_pages, pages_from_groupList.__contains__)
 
-        l_pages = set(l_pages)
-        r_pages = set(r_pages)
+        m_pages = SyncPage.merge(l_pages, r_pages)
+
+        print "Got %i local, %i remote pages, %i merged pages" % (len(l_pages), len(r_pages), len(m_pages))
         
-        # XXX this is not correct if matching is active
-        remote_but_not_local = r_pages - l_pages
-        local_but_not_remote = l_pages - r_pages
+        if params["pageMatch"]:
+            m_pages = SyncPage.filter(m_pages, params["pageMatch"].match)
+        print "After filtering: Got %i merges pages" % (len(m_pages), )
+
+        on_both_sides = list(SyncPage.iter_local_and_remote(m_pages))
+        remote_but_not_local = list(SyncPage.iter_remote_only(m_pages))
+        local_but_not_remote = list(SyncPage.iter_local_only(m_pages))
         
         # some initial test code
         r_new_pages = u", ".join([unicode(x) for x in remote_but_not_local])
         l_new_pages = u", ".join([unicode(x) for x in local_but_not_remote])
-        raise ActionStatus("These pages are in the remote wiki, but not local: " + r_new_pages + "<br>These pages are in the local wiki, but not in the remote one: " + l_new_pages)
+        raise ActionStatus("These pages are in the remote wiki, but not local: " + wikiutil.escape(r_new_pages) + "<br>These pages are in the local wiki, but not in the remote one: " + wikiutil.escape(l_new_pages))
+        #if params["direction"] in (DOWN, BOTH):
+        #    for rp in remote_but_not_local:
+                # XXX add locking, acquire read-lock on rp
+                
 
 
 def execute(pagename, request):
--- a/MoinMoin/action/rss_rc.py	Wed Aug 02 17:06:24 2006 +0200
+++ b/MoinMoin/action/rss_rc.py	Wed Aug 02 17:07:09 2006 +0200
@@ -45,18 +45,7 @@
     except ValueError:
         ddiffs = 0
 
-    # prepare output
-    out = StringIO.StringIO()
-    handler = RssGenerator(out)
-
     # get data
-    interwiki = request.getBaseURL()
-    if interwiki[-1] != "/": interwiki = interwiki + "/"
-
-    logo = re.search(r'src="([^"]*)"', cfg.logo_string)
-    if logo:
-        logo = request.getQualifiedURL(logo.group(1))
-
     log = editlog.EditLog(request)
     logdata = []
     counter = 0
@@ -81,145 +70,173 @@
             break
     del log
 
-    # start SAX stream
-    handler.startDocument()
-    handler._out.write(
-        '<!--\n'
-        '    Add an "items=nnn" URL parameter to get more than the default 15 items.\n'
-        '    You cannot get more than %d items though.\n'
-        '    \n'
-        '    Add "unique=1" to get a list of changes where page names are unique,\n'
-        '    i.e. where only the latest change of each page is reflected.\n'
-        '    \n'
-        '    Add "diffs=1" to add change diffs to the description of each items.\n'
-        '    \n'
-        '    Add "ddiffs=1" to link directly to the diff (good for FeedReader).\n'
-        '    Current settings: items=%i, unique=%i, diffs=%i, ddiffs=%i'
-        '-->\n' % (items_limit, max_items, unique, diffs, ddiffs)
-        )
+    timestamp = timefuncs.formathttpdate(lastmod)
+    etag = "%d-%d-%d-%d-%d" % (lastmod, max_items, diffs, ddiffs, unique)
 
-    # emit channel description
-    handler.startNode('channel', {
-        (handler.xmlns['rdf'], 'about'): request.getBaseURL(),
-        })
-    handler.simpleNode('title', cfg.sitename)
-    handler.simpleNode('link', interwiki + wikiutil.quoteWikinameURL(pagename))
-    handler.simpleNode('description', 'RecentChanges at %s' % cfg.sitename)
-    if logo:
-        handler.simpleNode('image', None, {
-            (handler.xmlns['rdf'], 'resource'): logo,
-            })
-    if cfg.interwikiname:
-        handler.simpleNode(('wiki', 'interwiki'), cfg.interwikiname)
+    # for 304, we look at if-modified-since and if-none-match headers,
+    # one of them must match and the other is either not there or must match.
+    if request.if_modified_since == timestamp:
+        if request.if_none_match:
+            if request.if_none_match == etag:
+                request.emit_http_headers(["Status: 304 Not modified"])
+        else:
+            request.emit_http_headers(["Status: 304 Not modified"])
+    elif request.if_none_match == etag:
+        if request.if_modified_since:
+            if request.if_modified_since == timestamp:
+                request.emit_http_headers(["Status: 304 Not modified"])
+        else:
+            request.emit_http_headers(["Status: 304 Not modified"])
+    else:
+        # generate an Expires header, using whatever setting the admin
+        # defined for suggested cache lifetime of the RecentChanges RSS doc
+        expires = timefuncs.formathttpdate(time.time() + cfg.rss_cache)
 
-    handler.startNode('items')
-    handler.startNode(('rdf', 'Seq'))
-    for item in logdata:
-        link = "%s%s#%04d%02d%02d%02d%02d%02d" % ((interwiki,
-                wikiutil.quoteWikinameURL(item.pagename),) + item.time[:6])
-        handler.simpleNode(('rdf', 'li'), None, attr={
-            (handler.xmlns['rdf'], 'resource'): link,
-        })
-    handler.endNode(('rdf', 'Seq'))
-    handler.endNode('items')
-    handler.endNode('channel')
+        httpheaders = ["Content-Type: text/xml; charset=%s" % config.charset,
+                       "Expires: %s" % expires,
+                       "Last-Modified: %s" % timestamp,
+                       "Etag: %s" % etag, ]
 
-    # emit logo data
-    if logo:
-        handler.startNode('image', attr={
-            (handler.xmlns['rdf'], 'about'): logo,
+        # send the generated XML document
+        request.emit_http_headers(httpheaders)
+
+        interwiki = request.getBaseURL()
+        if interwiki[-1] != "/":
+            interwiki = interwiki + "/"
+
+        logo = re.search(r'src="([^"]*)"', cfg.logo_string)
+        if logo:
+            logo = request.getQualifiedURL(logo.group(1))
+
+        # prepare output
+        out = StringIO.StringIO()
+        handler = RssGenerator(out)
+
+        # start SAX stream
+        handler.startDocument()
+        handler._out.write(
+            '<!--\n'
+            '    Add an "items=nnn" URL parameter to get more than the default 15 items.\n'
+            '    You cannot get more than %d items though.\n'
+            '    \n'
+            '    Add "unique=1" to get a list of changes where page names are unique,\n'
+            '    i.e. where only the latest change of each page is reflected.\n'
+            '    \n'
+            '    Add "diffs=1" to add change diffs to the description of each items.\n'
+            '    \n'
+            '    Add "ddiffs=1" to link directly to the diff (good for FeedReader).\n'
+            '    Current settings: items=%i, unique=%i, diffs=%i, ddiffs=%i'
+            '-->\n' % (items_limit, max_items, unique, diffs, ddiffs)
+            )
+
+        # emit channel description
+        handler.startNode('channel', {
+            (handler.xmlns['rdf'], 'about'): request.getBaseURL(),
             })
         handler.simpleNode('title', cfg.sitename)
-        handler.simpleNode('link', interwiki)
-        handler.simpleNode('url', logo)
-        handler.endNode('image')
-
-    # emit items
-    for item in logdata:
-        page = Page(request, item.pagename)
-        link = interwiki + wikiutil.quoteWikinameURL(item.pagename)
-        rdflink = "%s#%04d%02d%02d%02d%02d%02d" % ((link,) + item.time[:6])
-        handler.startNode('item', attr={
-            (handler.xmlns['rdf'], 'about'): rdflink,
-        })
-
-        # general attributes
-        handler.simpleNode('title', item.pagename)
-        if ddiffs:
-            handler.simpleNode('link', link+"?action=diff")
-        else:
-            handler.simpleNode('link', link)
-
-        handler.simpleNode(('dc', 'date'), timefuncs.W3CDate(item.time))
-
-        # description
-        desc_text = item.comment
-        if diffs:
-            # TODO: rewrite / extend wikiutil.pagediff
-            # searching for the matching pages doesn't really belong here
-            revisions = page.getRevList()
-
-            rl = len(revisions)
-            for idx in range(rl):
-                rev = revisions[idx]
-                if rev <= item.rev:
-                    if idx+1 < rl:
-                        lines = wikiutil.pagediff(request, item.pagename, revisions[idx+1], item.pagename, 0, ignorews=1)
-                        if len(lines) > 20:
-                            lines = lines[:20] + ['...\n']
-                        lines = '\n'.join(lines)
-                        lines = wikiutil.escape(lines)
-                        desc_text = '%s\n<pre>\n%s\n</pre>\n' % (desc_text, lines)
-                    break
-        if desc_text:
-            handler.simpleNode('description', desc_text)
+        handler.simpleNode('link', interwiki + wikiutil.quoteWikinameURL(pagename))
+        handler.simpleNode('description', 'RecentChanges at %s' % cfg.sitename)
+        if logo:
+            handler.simpleNode('image', None, {
+                (handler.xmlns['rdf'], 'resource'): logo,
+                })
+        if cfg.interwikiname:
+            handler.simpleNode(('wiki', 'interwiki'), cfg.interwikiname)
 
-        # contributor
-        edattr = {}
-        if cfg.show_hosts:
-            edattr[(handler.xmlns['wiki'], 'host')] = item.hostname
-        if item.editor[0] == 'interwiki':
-            edname = "%s:%s" % item.editor[1]
-            ##edattr[(None, 'link')] = interwiki + wikiutil.quoteWikiname(edname)
-        else: # 'ip'
-            edname = item.editor[1]
-            ##edattr[(None, 'link')] = link + "?action=info"
-
-        # this edattr stuff, esp. None as first tuple element breaks things (tracebacks)
-        # if you know how to do this right, please send us a patch
-
-        handler.startNode(('dc', 'contributor'))
-        handler.startNode(('rdf', 'Description'), attr=edattr)
-        handler.simpleNode(('rdf', 'value'), edname)
-        handler.endNode(('rdf', 'Description'))
-        handler.endNode(('dc', 'contributor'))
+        handler.startNode('items')
+        handler.startNode(('rdf', 'Seq'))
+        for item in logdata:
+            link = "%s%s#%04d%02d%02d%02d%02d%02d" % ((interwiki,
+                    wikiutil.quoteWikinameURL(item.pagename),) + item.time[:6])
+            handler.simpleNode(('rdf', 'li'), None, attr={
+                (handler.xmlns['rdf'], 'resource'): link,
+            })
+        handler.endNode(('rdf', 'Seq'))
+        handler.endNode('items')
+        handler.endNode('channel')
 
-        # wiki extensions
-        handler.simpleNode(('wiki', 'version'), "%i" % (item.ed_time_usecs))
-        handler.simpleNode(('wiki', 'status'), ('deleted', 'updated')[page.exists()])
-        handler.simpleNode(('wiki', 'diff'), link + "?action=diff")
-        handler.simpleNode(('wiki', 'history'), link + "?action=info")
-        # handler.simpleNode(('wiki', 'importance'), ) # ( major | minor ) 
-        # handler.simpleNode(('wiki', 'version'), ) # ( #PCDATA ) 
-
-        handler.endNode('item')
-
-    # end SAX stream
-    handler.endDocument()
+        # emit logo data
+        if logo:
+            handler.startNode('image', attr={
+                (handler.xmlns['rdf'], 'about'): logo,
+                })
+            handler.simpleNode('title', cfg.sitename)
+            handler.simpleNode('link', interwiki)
+            handler.simpleNode('url', logo)
+            handler.endNode('image')
 
-    # generate an Expires header, using whatever setting the admin
-    # defined for suggested cache lifetime of the RecentChanges RSS doc
-    expires = timefuncs.formathttpdate(time.time() + cfg.rss_cache)
-
-    httpheaders = ["Content-Type: text/xml; charset=%s" % config.charset,
-                   "Expires: %s" % expires]
+        # emit items
+        for item in logdata:
+            page = Page(request, item.pagename)
+            link = interwiki + wikiutil.quoteWikinameURL(item.pagename)
+            rdflink = "%s#%04d%02d%02d%02d%02d%02d" % ((link,) + item.time[:6])
+            handler.startNode('item', attr={
+                (handler.xmlns['rdf'], 'about'): rdflink,
+            })
 
-    # use a correct Last-Modified header, set to whatever the mod date
-    # on the most recent page was; if there were no mods, don't send one
-    if lastmod:
-        httpheaders.append("Last-Modified: %s" % timefuncs.formathttpdate(lastmod))
+            # general attributes
+            handler.simpleNode('title', item.pagename)
+            if ddiffs:
+                handler.simpleNode('link', link+"?action=diff")
+            else:
+                handler.simpleNode('link', link)
 
-    # send the generated XML document
-    request.emit_http_headers(httpheaders)
-    request.write(out.getvalue())
+            handler.simpleNode(('dc', 'date'), timefuncs.W3CDate(item.time))
 
+            # description
+            desc_text = item.comment
+            if diffs:
+                # TODO: rewrite / extend wikiutil.pagediff
+                # searching for the matching pages doesn't really belong here
+                revisions = page.getRevList()
+
+                rl = len(revisions)
+                for idx in range(rl):
+                    rev = revisions[idx]
+                    if rev <= item.rev:
+                        if idx+1 < rl:
+                            lines = wikiutil.pagediff(request, item.pagename, revisions[idx+1], item.pagename, 0, ignorews=1)
+                            if len(lines) > 20:
+                                lines = lines[:20] + ['...\n']
+                            lines = '\n'.join(lines)
+                            lines = wikiutil.escape(lines)
+                            desc_text = '%s\n<pre>\n%s\n</pre>\n' % (desc_text, lines)
+                        break
+            if desc_text:
+                handler.simpleNode('description', desc_text)
+
+            # contributor
+            edattr = {}
+            if cfg.show_hosts:
+                edattr[(handler.xmlns['wiki'], 'host')] = item.hostname
+            if item.editor[0] == 'interwiki':
+                edname = "%s:%s" % item.editor[1]
+                ##edattr[(None, 'link')] = interwiki + wikiutil.quoteWikiname(edname)
+            else: # 'ip'
+                edname = item.editor[1]
+                ##edattr[(None, 'link')] = link + "?action=info"
+
+            # this edattr stuff, esp. None as first tuple element breaks things (tracebacks)
+            # if you know how to do this right, please send us a patch
+
+            handler.startNode(('dc', 'contributor'))
+            handler.startNode(('rdf', 'Description'), attr=edattr)
+            handler.simpleNode(('rdf', 'value'), edname)
+            handler.endNode(('rdf', 'Description'))
+            handler.endNode(('dc', 'contributor'))
+
+            # wiki extensions
+            handler.simpleNode(('wiki', 'version'), "%i" % (item.ed_time_usecs))
+            handler.simpleNode(('wiki', 'status'), ('deleted', 'updated')[page.exists()])
+            handler.simpleNode(('wiki', 'diff'), link + "?action=diff")
+            handler.simpleNode(('wiki', 'history'), link + "?action=info")
+            # handler.simpleNode(('wiki', 'importance'), ) # ( major | minor ) 
+            # handler.simpleNode(('wiki', 'version'), ) # ( #PCDATA ) 
+
+            handler.endNode('item')
+
+        # end SAX stream
+        handler.endDocument()
+
+        request.write(out.getvalue())
+
--- a/MoinMoin/config/multiconfig.py	Wed Aug 02 17:06:24 2006 +0200
+++ b/MoinMoin/config/multiconfig.py	Wed Aug 02 17:07:09 2006 +0200
@@ -553,17 +553,33 @@
 
         # check if mail is possible and set flag:
         self.mail_enabled = (self.mail_smarthost is not None or self.mail_sendmail is not None) and self.mail_from
-        
-        self.meta_dict = wikiutil.MetaDict(os.path.join(data_dir, 'meta'), self.cache_dir)
 
-        # interwiki ID processing
-        self.load_IWID()
+        # Cache variables for the properties below
+        self._iwid = self._iwid_full = self._meta_dict = None
+
+    def load_meta_dict(self):
+        """ The meta_dict contains meta data about the wiki instance. """
+        if getattr(self, "_meta_dict", None) is None:
+            self._meta_dict = wikiutil.MetaDict(os.path.join(self.data_dir, 'meta'), self.cache_dir)
+        return self._meta_dict
+    meta_dict = property(load_meta_dict)
+
+    # lazily load iwid(_full)
+    def make_iwid_property(attr):
+        def getter(self):
+            if getattr(self, attr, None) is None:
+                self.load_IWID()
+            return getattr(self, attr)
+        return property(getter)
+    iwid = make_iwid_property("_iwid")
+    iwid_full = make_iwid_property("_iwid_full")
 
     def load_IWID(self):
         """ Loads the InterWikiID of this instance. It is used to identify the instance
             globally.
             The IWID is available as cfg.iwid
             The full IWID containing the interwiki name is available as cfg.iwid_full
+            This method is called by the property.
         """
 
         try:
@@ -573,11 +589,11 @@
             self.meta_dict['IWID'] = iwid
             self.meta_dict.sync()
 
-        self.iwid = iwid
+        self._iwid = iwid
         if self.interwikiname is not None:
-            self.iwid_full = packLine([iwid, self.interwikiname])
+            self._iwid_full = packLine([iwid, self.interwikiname])
         else:
-            self.iwid_full = packLine([iwid])
+            self._iwid_full = packLine([iwid])
 
     def _config_check(self):
         """ Check namespace and warn about unknown names
--- a/MoinMoin/request/CLI.py	Wed Aug 02 17:06:24 2006 +0200
+++ b/MoinMoin/request/CLI.py	Wed Aug 02 17:07:09 2006 +0200
@@ -26,6 +26,8 @@
         self.http_host = 'localhost'
         self.http_referer = ''
         self.script_name = '.'
+        self.if_modified_since = None
+        self.if_none_match = None
         RequestBase.__init__(self, properties)
         self.cfg.caching_formats = [] # don't spoil the cache
         self.initTheme() # usually request.run() does this, but we don't use it
--- a/MoinMoin/request/STANDALONE.py	Wed Aug 02 17:06:24 2006 +0200
+++ b/MoinMoin/request/STANDALONE.py	Wed Aug 02 17:07:09 2006 +0200
@@ -33,6 +33,10 @@
             self.http_user_agent = sa.headers.getheader('user-agent', '')
             co = filter(None, sa.headers.getheaders('cookie'))
             self.saved_cookie = ', '.join(co) or ''
+            self.if_modified_since = (sa.headers.getheader('if-modified-since')
+                                      or self.if_modified_since)
+            self.if_none_match = (sa.headers.getheader('if-none-match')
+                                  or self.if_none_match)
 
             # Copy rest from standalone request   
             self.server_name = sa.server.server_name
--- a/MoinMoin/request/TWISTED.py	Wed Aug 02 17:06:24 2006 +0200
+++ b/MoinMoin/request/TWISTED.py	Wed Aug 02 17:07:09 2006 +0200
@@ -23,6 +23,8 @@
             self.http_accept_language = self.twistd.getHeader('Accept-Language')
             self.saved_cookie = self.twistd.getHeader('Cookie')
             self.http_user_agent = self.twistd.getHeader('User-Agent')
+            self.if_modified_since = self.twistd.getHeader('If-Modified-Since')
+            self.if_none_match = self.twistd.getHeader('If-None-Match')
 
             # Copy values from twisted request
             self.server_protocol = self.twistd.clientproto
--- a/MoinMoin/request/__init__.py	Wed Aug 02 17:06:24 2006 +0200
+++ b/MoinMoin/request/__init__.py	Wed Aug 02 17:07:09 2006 +0200
@@ -315,8 +315,7 @@
         """
         # Values we can just copy
         self.env = env
-        self.http_accept_language = env.get('HTTP_ACCEPT_LANGUAGE',
-                                            self.http_accept_language)
+        self.http_accept_language = env.get('HTTP_ACCEPT_LANGUAGE', self.http_accept_language)
         self.server_name = env.get('SERVER_NAME', self.server_name)
         self.server_port = env.get('SERVER_PORT', self.server_port)
         self.saved_cookie = env.get('HTTP_COOKIE', '')
@@ -326,6 +325,8 @@
         self.request_method = env.get('REQUEST_METHOD', None)
         self.remote_addr = env.get('REMOTE_ADDR', '')
         self.http_user_agent = env.get('HTTP_USER_AGENT', '')
+        self.if_modified_since = env.get('If-modified-since') or env.get(cgiMetaVariable('If-modified-since'))
+        self.if_none_match = env.get('If-none-match') or env.get(cgiMetaVariable('If-none-match'))
 
         # REQUEST_URI is not part of CGI spec, but an addition of Apache.
         self.request_uri = env.get('REQUEST_URI', '')
@@ -336,8 +337,7 @@
         self.setHost(env.get('HTTP_HOST'))
         self.fixURI(env)
         self.setURL(env)
-
-        ##self.debugEnvironment(env)
+        #self.debugEnvironment(env)
 
     def setHttpReferer(self, referer):
         """ Set http_referer, making sure its ascii
@@ -1361,7 +1361,7 @@
             environment.append('  %s = %r\n' % (key, env[key]))
         environment = ''.join(environment)
 
-        data = '\nRequest Attributes\n%s\nEnviroment\n%s' % (attributes, environment)
+        data = '\nRequest Attributes\n%s\nEnvironment\n%s' % (attributes, environment)
         f = open('/tmp/env.log', 'a')
         try:
             f.write(data)
--- a/docs/CHANGES	Wed Aug 02 17:06:24 2006 +0200
+++ b/docs/CHANGES	Wed Aug 02 17:07:09 2006 +0200
@@ -184,6 +184,8 @@
       and caches the results (farm wide cache/i18n/*).
     * added the diff parser from ParserMarket, thanks to Emilio Lopes, Fabien
       Ninoles and Jrgen Hermann.
+    * Added support for "304 not modified" response header for AttachFile get
+      and rss_rc actions - faster, less traffic, less load.
 
   Bugfixes:
     * on action "info" page, "revert" link will not be displayed for empty page
@@ -205,6 +207,7 @@
     * BadContent and LocalBadContent now get noindex,nofollow robots header,
       same as POSTs.
     * Fixed handling of anchors in wiki links for the Restructured text parser.
+    * Fixed http header output.
 
   Other changes:
     * we use (again) the same browser compatibility check as FCKeditor uses
--- a/docs/CHANGES.aschremmer	Wed Aug 02 17:06:24 2006 +0200
+++ b/docs/CHANGES.aschremmer	Wed Aug 02 17:07:09 2006 +0200
@@ -79,6 +79,9 @@
 Week 30: Implemented IWID support, added function to generate random strings. Added support
          for editing the InterWikiMap in the wiki. Added locking to the PickleTagStore and the MetaDict classes. Added handling of
          various options and detection of anonymous wikis to the SyncPages action.
+Week 31: Load the IWID and the meta dict lazily. Reworked RemotePage/SyncPage,
+         fixed option handling again, refined semantics of options, introduced
+         direction option, replaced "localMatch"/"remoteMatch" by "pageMatch".
 
 2006-07-18: the requested daily entry is missing here, see http://moinmoin.wikiwikiweb.de/GoogleSoc2006/BetterProgress
 2006-07-19: the requested daily entry is missing here, see http://moinmoin.wikiwikiweb.de/GoogleSoc2006/BetterProgress