changeset 1434:7bfc51951aa5

merge with xapian branch
author Franz Pletz <fpletz AT franz-pletz DOT org>
date Mon, 21 Aug 2006 02:31:07 +0200
parents 6b0ea72d7665 (current diff) cc9805777571 (diff)
children 17e40b5bf1df
files MoinMoin/action/fullsearch.py MoinMoin/search/Xapian.py
diffstat 33 files changed, 424 insertions(+), 341 deletions(-) [+]
line wrap: on
line diff
--- a/MoinMoin/Page.py	Mon Aug 21 02:30:05 2006 +0200
+++ b/MoinMoin/Page.py	Mon Aug 21 02:31:07 2006 +0200
@@ -2,7 +2,7 @@
 """
     MoinMoin - Page class
 
-    @copyright: 2000-2004 by Jrgen Hermann <jh@web.de>
+    @copyright: 2000-2004 by Jürgen Hermann <jh@web.de>
     @license: GNU GPL, see COPYING for details.
 """
 
@@ -845,7 +845,7 @@
         self._raw_body = body
         self._raw_body_modified = modified
 
-    def url(self, request, querystr=None, escape=1):
+    def url(self, request, querystr=None, escape=1, anchor=None, relative=True):
         """ Return complete URL for this page, including scriptname
 
         @param request: the request object
@@ -853,13 +853,18 @@
             (str or dict, see wikiutil.makeQueryString)
         @param escape: escape url for html, to be backward compatible
             with old code (bool)
+        @param anchor: if specified, make a link to this anchor
         @rtype: str
         @return: complete url of this page, including scriptname
         """
-        url = '%s/%s' % (request.getScriptname(),
-                     wikiutil.quoteWikinameURL(self.page_name))
+        # Create url, excluding scriptname
+        url = wikiutil.quoteWikinameURL(self.page_name)
+        if querystr:
+            if isinstance(querystr, dict):
+                action = querystr.get('action', None)
+            else:
+                action = None # XXX we don't support getting the action out of a str
 
-        if querystr:
             querystr = wikiutil.makeQueryString(querystr)
 
             # TODO: remove in 2.0
@@ -867,15 +872,32 @@
             # New code should call with escape=0 to prevent the warning.
             if escape:
                 import warnings
-                warnings.warn("In moin 2.0 query string in url will not be"
-                              " escaped. See"
-                              " http://moinmoin.wikiwikiweb.de/ApiChanges")
+                warnings.warn("In moin 2.0 query string in url will not be escaped. "
+                              "See http://moinmoin.wikiwikiweb.de/ApiChanges. "
+                              "%s" % querystr)
                 querystr = wikiutil.escape(querystr)
 
+            # make action URLs denyable by robots.txt:
+            if action is not None and request.cfg.url_prefix_action is not None:
+                url = "%s/%s/%s" % (request.cfg.url_prefix_action, action, url)
             url = '%s?%s' % (url, querystr)
 
+        # Add anchor
+        if anchor:
+            url = "%s#%s" % (url, wikiutil.url_quote_plus(anchor))
+
+        if not relative:
+            url = '%s/%s' % (request.getScriptname(), url)
         return url
 
+    def link_to_raw(self, request, text, querystr=None, anchor=None, **kw):
+        """ core functionality of link_to, without the magic """
+        url = self.url(request, querystr, escape=0, anchor=anchor)
+        # escaping is done by link_tag -> formatter.url -> ._open()
+        link = wikiutil.link_tag(request, url, text,
+                                 formatter=getattr(self, 'formatter', None), **kw)
+        return link
+
     def link_to(self, request, text=None, querystr=None, anchor=None, **kw):
         """ Return HTML markup that links to this page.
 
@@ -893,27 +915,14 @@
         """
         if not text:
             text = self.split_title(request)
-
-        # Create url, excluding scriptname
-        url = wikiutil.quoteWikinameURL(self.page_name)
-        if querystr:
-            if not isinstance(querystr, type({})):
-                # makeQueryString does not escape strings any more
-                querystr = wikiutil.escape(querystr)
-                
-            querystr = wikiutil.makeQueryString(querystr)
-            url = "%s?%s" % (url, querystr)
-
-        # Add anchor
-        if anchor:
-            url = "%s#%s" % (url, wikiutil.url_quote_plus(anchor))
+        text = wikiutil.escape(text)
 
         # Add css class for non existing page
         if not self.exists():
             kw['css_class'] = 'nonexistent'
 
-        link = wikiutil.link_tag(request, url, wikiutil.escape(text),
-                                 formatter=getattr(self, 'formatter', None), **kw)
+        link = self.link_to_raw(request, text, querystr, anchor, **kw)
+
         # Create a link to attachments if any exist
         if kw.get('attachment_indicator', 0):
             from MoinMoin.action import AttachFile
@@ -1200,13 +1209,6 @@
 
             # send the page header
             if self.default_formatter:
-                full_text_query = 'linkto:"%s"' % self.page_name
-                link = '%s/%s?action=fullsearch&amp;value=%s&amp;context=180' % (
-                    request.getScriptname(),
-                    wikiutil.quoteWikinameURL(self.page_name),
-                    wikiutil.url_quote_plus(full_text_query))
-
-                title = self.split_title(request)
                 if self.rev:
                     msg = "<strong>%s</strong><br>%s" % (
                         _('Revision %(rev)d as of %(date)s') % {
@@ -1233,7 +1235,9 @@
                     request.user.addTrail(self.page_name)
                     trail = request.user.getTrail()
 
-                request.theme.send_title(title,  page=self, link=link, msg=msg,
+                title = self.split_title(request)
+
+                request.theme.send_title(title,  page=self, msg=msg,
                                     pagename=self.page_name, print_mode=print_mode,
                                     media=media, pi_refresh=pi_refresh,
                                     allow_doubleclick=1, trail=trail,
--- a/MoinMoin/PageEditor.py	Mon Aug 21 02:30:05 2006 +0200
+++ b/MoinMoin/PageEditor.py	Mon Aug 21 02:31:07 2006 +0200
@@ -2,7 +2,7 @@
 """
     MoinMoin - PageEditor class
 
-    @copyright: 2000-2004 by Jrgen Hermann <jh@web.de>
+    @copyright: 2000-2004 by Jürgen Hermann <jh@web.de>
     @license: GNU GPL, see COPYING for details.
 """
 
--- a/MoinMoin/action/Despam.py	Mon Aug 21 02:30:05 2006 +0200
+++ b/MoinMoin/action/Despam.py	Mon Aug 21 02:31:07 2006 +0200
@@ -45,7 +45,12 @@
                        Column('pages', label=_("Pages"), align='right'),
                        Column('link', label='', align='left')]
     for nr, editor in editors:
-        dataset.addRow((editor, unicode(nr), pg.link_to(request, text=_("Select Author"), querystr="action=Despam&editor=%s" % wikiutil.url_quote_plus(editor))))
+        dataset.addRow((editor, unicode(nr),
+            pg.link_to(request, text=_("Select Author"),
+                querystr={
+                    'action': 'Despam',
+                    'editor': editor, # was: url_quote_plus()
+                })))
 
     table = DataBrowserWidget(request)
     table.setData(dataset)
--- a/MoinMoin/action/LocalSiteMap.py	Mon Aug 21 02:30:05 2006 +0200
+++ b/MoinMoin/action/LocalSiteMap.py	Mon Aug 21 02:31:07 2006 +0200
@@ -64,12 +64,14 @@
     def visit(self, request, name, depth):
         """ Visit a page, i.e. create a link.
         """
-        if not name: return
-        self.append('&nbsp;' * (5*depth))
-        self.append('&nbsp;' + wikiutil.link_tag(request, '%s?action=%s' %
-            (wikiutil.quoteWikinameURL(name), __name__.split('.')[-1]), name))
+        if not name:
+            return
+        pg = Page(request, name)
+        action = __name__.split('.')[-1]
+        self.append('&nbsp;' * (5*depth+1))
+        self.append(pg.link_to(request, name, querystr={'action': action}))
         self.append("&nbsp;<small>[")
-        self.append(Page(request, name).link_to(request, 'view'))
+        self.append(pg.link_to(request, 'view'))
         self.append("</small>]<br>")
 
     def append(self, text):
--- a/MoinMoin/action/RenamePage.py	Mon Aug 21 02:30:05 2006 +0200
+++ b/MoinMoin/action/RenamePage.py	Mon Aug 21 02:31:07 2006 +0200
@@ -54,7 +54,7 @@
 
     def do_action_finish(self, success):
         if success:
-            url = Page(self.request, self.newpagename).url(self.request)
+            url = Page(self.request, self.newpagename).url(self.request, escape=0, relative=False)
             self.request.http_redirect(url)
             self.request.finish()
         else:
--- a/MoinMoin/action/RenderAsDocbook.py	Mon Aug 21 02:30:05 2006 +0200
+++ b/MoinMoin/action/RenderAsDocbook.py	Mon Aug 21 02:31:07 2006 +0200
@@ -8,7 +8,6 @@
 from MoinMoin.Page import Page
 
 def execute(pagename, request):
-    url = Page(request, pagename).url(request, {'action': 'show',
-                                                'mimetype': 'text/docbook'}, 0)
+    url = Page(request, pagename).url(request, {'action': 'show', 'mimetype': 'text/docbook'}, escape=0, relative=False)
     request.http_redirect(url)
 
--- a/MoinMoin/action/SubscribeUser.py	Mon Aug 21 02:30:05 2006 +0200
+++ b/MoinMoin/action/SubscribeUser.py	Mon Aug 21 02:31:07 2006 +0200
@@ -74,7 +74,7 @@
             else:
                 result.append(formatter.smiley('{X}'))
                 result.append(formatter.text(" "))
-            result.append(formatter.url(1, Page(request, userobj.name).url(request)))
+            result.append(formatter.url(1, Page(request, userobj.name).url(request, escape=0)))
             result.append(formatter.text(userobj.name))
             result.append(formatter.url(0))
             result.append(formatter.linebreak(preformatted=0))
--- a/MoinMoin/action/SyncPages.py	Mon Aug 21 02:30:05 2006 +0200
+++ b/MoinMoin/action/SyncPages.py	Mon Aug 21 02:31:07 2006 +0200
@@ -48,7 +48,6 @@
         self.pagename = pagename
         self.page = PageEditor(request, pagename)
         self.status = []
-        request.flush()
 
     def log_status(self, level, message="", substitutions=(), raw_suffix=""):
         """ Appends the message with a given importance level to the internal log. """
@@ -62,7 +61,7 @@
         for line in self.status:
             macro_args = [line[1]] + list(line[2])
             table.append(table_line % {"smiley": line[0][1], "message":
-                macro_args and u"[[GetText2(|%s)]]" % (packLine(macro_args), ),
+                line[1] and (u"[[GetText2(|%s)]]" % (packLine(macro_args), )),
                 "raw_suffix": line[3]})
 
         return "\n".join(table)
@@ -142,18 +141,18 @@
         except ActionStatus, e:
             msg = u'<p class="error">%s</p>\n' % (e.args[0], )
         else:
-            msg = u"%s" % (_("Syncronisation finished."), )
+            msg = u"%s" % (_("Syncronisation finished. Look below for the status messages."), )
 
         self.page.saveText(self.page.get_raw_body() + "\n\n" + self.generate_log_table(), 0)
         # XXX release readlock on self.page
 
-        return self.page.send_page(self.request, msg=msg)
-    
+        self.page.send_page(self.request, msg=msg)
+
     def sync(self, params, local, remote):
         """ This method does the syncronisation work.
-            Currently, it handles the case where the pages exist on both sides.
-            One of the major missing parts is rename handling.
-            Now there are a few other cases left that have to be implemented:
+            Currently, it handles nearly all cases.
+            The major missing part is rename handling.
+            There are a few other cases left that have to be implemented:
                 Wiki A    | Wiki B   | Remark
                 ----------+----------+------------------------------
                 exists    | non-     | Now the wiki knows that the page was renamed.
@@ -206,12 +205,12 @@
             m_pages = SyncPage.filter(m_pages, params["pageMatch"].match)
             self.log_status(self.INFO, _("After filtering: %s pages"), (str(len(m_pages)), ))
 
-        def handle_page(rp):
-            # XXX add locking, acquire read-lock on rp
+        def handle_page(sp):
+            # XXX add locking, acquire read-lock on sp
             if debug:
-                self.log_status(ActionClass.INFO, raw_suffix="Processing %r" % rp)
+                self.log_status(ActionClass.INFO, raw_suffix="Processing %r" % sp)
 
-            local_pagename = rp.local_name
+            local_pagename = sp.local_name
             current_page = PageEditor(self.request, local_pagename) # YYY direct access
             comment = u"Local Merge - %r" % (remote.get_interwiki_name() or remote.get_iwid())
 
@@ -225,49 +224,51 @@
             # some default values for non matching tags
             normalised_name = None
             remote_rev = None
-            local_rev = rp.local_rev # merge against the newest version
+            local_rev = sp.local_rev # merge against the newest version
             old_contents = ""
 
             if matching_tags:
                 newest_tag = matching_tags[-1]
 
-                local_change = newest_tag.current_rev != rp.local_rev
-                remote_change = newest_tag.remote_rev != rp.remote_rev
+                local_change = newest_tag.current_rev != sp.local_rev
+                remote_change = newest_tag.remote_rev != sp.remote_rev
 
                 # handle some cases where we cannot continue for this page
                 if not remote_change and (direction == DOWN or not local_change):
                     return # no changes done, next page
-                if rp.local_deleted and rp.remote_deleted:
+                if sp.local_deleted and sp.remote_deleted:
                     return
-                if rp.remote_deleted and not local_change:
-                    msg = local.delete_page(rp.local_name, comment)
+                if sp.remote_deleted and not local_change:
+                    msg = local.delete_page(sp.local_name, comment)
                     if not msg:
-                        self.log_status(ActionClass.INFO, _("Deleted page %s locally."), (rp.name, ))
+                        self.log_status(ActionClass.INFO, _("Deleted page %s locally."), (sp.name, ))
                     else:
-                        self.log_status(ActionClass.ERROR, _("Error while deleting page %s locally:"), (rp.name, ), msg)
+                        self.log_status(ActionClass.ERROR, _("Error while deleting page %s locally:"), (sp.name, ), msg)
                     return
-                if rp.local_deleted and not remote_change:
+                if sp.local_deleted and not remote_change:
                     if direction == DOWN:
                         return
-                    self.log_status(ActionClass.ERROR, "Nothing done, I should have deleted %r remotely" % rp) # XXX add
-                    msg = remote.delete_page(rp.remote_name)
-                    self.log_status(ActionClass.INFO, _("Deleted page %s remotely."), (rp.name, ))
+                    msg = remote.delete_page(sp.remote_name, sp.remote_rev, local_full_iwid)
+                    if not msg:
+                        self.log_status(ActionClass.INFO, _("Deleted page %s remotely."), (sp.name, ))
+                    else:
+                        self.log_status(ActionClass.ERROR, _("Error while deleting page %s remotely:"), (sp.name, ), msg)
                     return
-                if rp.local_mime_type != MIMETYPE_MOIN and not (local_change ^ remote_change):
-                    self.log_status(ActionClass.WARN, _("The item %s cannot be merged but was changed in both wikis. Please delete it in one of both wikis and try again."), (rp.name, ))
+                if sp.local_mime_type != MIMETYPE_MOIN and not (local_change ^ remote_change):
+                    self.log_status(ActionClass.WARN, _("The item %s cannot be merged but was changed in both wikis. Please delete it in one of both wikis and try again."), (sp.name, ))
                     return
-                if rp.local_mime_type != rp.remote_mime_type:
-                    self.log_status(ActionClass.WARN, _("The item %s has different mime types in both wikis and cannot be merged. Please delete it in one of both wikis or unify the mime type, and try again."), (rp.name, ))
+                if sp.local_mime_type != sp.remote_mime_type:
+                    self.log_status(ActionClass.WARN, _("The item %s has different mime types in both wikis and cannot be merged. Please delete it in one of both wikis or unify the mime type, and try again."), (sp.name, ))
                     return
-                if newest_tag.normalised_name != rp.name:
-                    self.log_status(ActionClass.WARN, _("The item %s was renamed locally. This is not implemented yet. Therefore the full syncronisation history is lost for this page."), (rp.name, )) # XXX implement renames
+                if newest_tag.normalised_name != sp.name:
+                    self.log_status(ActionClass.WARN, _("The item %s was renamed locally. This is not implemented yet. Therefore the full syncronisation history is lost for this page."), (sp.name, )) # XXX implement renames
                 else:
                     normalised_name = newest_tag.normalised_name
                     local_rev = newest_tag.current_rev
                     remote_rev = newest_tag.remote_rev
                     old_contents = Page(self.request, local_pagename, rev=newest_tag.current_rev).get_raw_body_str() # YYY direct access
 
-            self.log_status(ActionClass.INFO, _("Synchronising page %s with remote page %s ..."), (local_pagename, rp.remote_name))
+            self.log_status(ActionClass.INFO, _("Synchronising page %s with remote page %s ..."), (local_pagename, sp.remote_name))
 
             if direction == DOWN:
                 remote_rev = None # always fetch the full page, ignore remote conflict check
@@ -275,16 +276,16 @@
             else:
                 patch_base_contents = old_contents
 
-            if remote_rev != rp.remote_rev:
-                if rp.remote_deleted: # ignore remote changes
-                    current_remote_rev = rp.remote_rev
+            if remote_rev != sp.remote_rev:
+                if sp.remote_deleted: # ignore remote changes
+                    current_remote_rev = sp.remote_rev
                     is_remote_conflict = False
                     diff = None
-                    self.log_status(ActionClass.WARN, _("The page %s was deleted remotely but changed locally."), (rp.name, ))
+                    self.log_status(ActionClass.WARN, _("The page %s was deleted remotely but changed locally."), (sp.name, ))
                 else:
-                    diff_result = remote.get_diff(rp.remote_name, remote_rev, None, normalised_name)
+                    diff_result = remote.get_diff(sp.remote_name, remote_rev, None, normalised_name)
                     if diff_result is None:
-                        self.log_status(ActionClass.ERROR, _("The page %s could not be synced. The remote page was renamed. This is not supported yet. You may want to delete one of the pages to get it synced."), (rp.remote_name, ))
+                        self.log_status(ActionClass.ERROR, _("The page %s could not be synced. The remote page was renamed. This is not supported yet. You may want to delete one of the pages to get it synced."), (sp.remote_name, ))
                         return
                     is_remote_conflict = diff_result["conflict"]
                     assert diff_result["diffversion"] == 1
@@ -292,7 +293,7 @@
                     current_remote_rev = diff_result["current"]
             else:
                 current_remote_rev = remote_rev
-                if rp.local_mime_type == MIMETYPE_MOIN:
+                if sp.local_mime_type == MIMETYPE_MOIN:
                     is_remote_conflict = wikiutil.containsConflictMarker(old_contents.decode("utf-8"))
                 else:
                     is_remote_conflict = NotImplemented
@@ -300,7 +301,7 @@
 
             # do not sync if the conflict is remote and local, or if it is local
             # and the page has never been syncronised
-            if (rp.local_mime_type == MIMETYPE_MOIN and wikiutil.containsConflictMarker(current_page.get_raw_body())
+            if (sp.local_mime_type == MIMETYPE_MOIN and wikiutil.containsConflictMarker(current_page.get_raw_body()) # YYY direct access
                 and (remote_rev is None or is_remote_conflict)):
                 self.log_status(ActionClass.WARN, _("Skipped page %s because of a locally or remotely unresolved conflict."), (local_pagename, ))
                 return
@@ -308,60 +309,61 @@
             if remote_rev is None and direction == BOTH:
                 self.log_status(ActionClass.INFO, _("This is the first synchronisation between this page and the remote wiki."))
 
-            if rp.remote_deleted:
-                new_contents = ""
+            if sp.remote_deleted:
+                remote_contents = ""
             elif diff is None:
-                new_contents = old_contents
+                remote_contents = old_contents
             else:
-                new_contents = patch(patch_base_contents, decompress(diff))
+                remote_contents = patch(patch_base_contents, decompress(diff))
 
-            if rp.local_mime_type == MIMETYPE_MOIN:
-                new_contents_unicode = new_contents.decode("utf-8")
+            if sp.local_mime_type == MIMETYPE_MOIN:
+                remote_contents_unicode = remote_contents.decode("utf-8")
                 # here, the actual 3-way merge happens
                 if debug:
-                    self.log_status(ActionClass.INFO, raw_suffix="Merging %r, %r and %r" % (old_contents.decode("utf-8"), new_contents_unicode, current_page.get_raw_body()))
-                verynewtext = diff3.text_merge(old_contents.decode("utf-8"), new_contents_unicode, current_page.get_raw_body(), 2, *conflict_markers)
-                verynewtext_raw = verynewtext.encode("utf-8")
+                    self.log_status(ActionClass.INFO, raw_suffix="Merging %r, %r and %r" % (old_contents.decode("utf-8"), remote_contents_unicode, current_page.get_raw_body()))
+                merged_text = diff3.text_merge(old_contents.decode("utf-8"), remote_contents_unicode, current_page.get_raw_body(), 2, *conflict_markers) # YYY direct access
+                merged_text_raw = merged_text.encode("utf-8")
             else:
                 if diff is None:
-                    verynewtext_raw = new_contents
+                    merged_text_raw = remote_contents
                 else:
-                    verynewtext_raw = current_page.get_raw_body_str()
+                    merged_text_raw = current_page.get_raw_body_str() # YYY direct access
 
-            diff = textdiff(new_contents, verynewtext_raw)
+            diff = textdiff(remote_contents, merged_text_raw)
             if debug:
-                self.log_status(ActionClass.INFO, raw_suffix="Diff against %r" % new_contents)
+                self.log_status(ActionClass.INFO, raw_suffix="Diff against %r" % remote_contents)
 
             # XXX upgrade to write lock
             try:
-                current_page.saveText(verynewtext, rp.local_rev, comment=comment) # YYY direct access
+                current_page.saveText(merged_text, sp.local_rev, comment=comment) # YYY direct access
             except PageEditor.Unchanged:
                 pass
             except PageEditor.EditConflict:
                 assert False, "You stumbled on a problem with the current storage system - I cannot lock pages"
 
-            new_local_rev = current_page.get_real_rev()
+            new_local_rev = current_page.get_real_rev() # YYY direct access
 
             if direction == BOTH:
                 try:
-                    very_current_remote_rev = remote.merge_diff(rp.remote_name, compress(diff), new_local_rev, current_remote_rev, current_remote_rev, local_full_iwid, rp.name)
+                    very_current_remote_rev = remote.merge_diff(sp.remote_name, compress(diff), new_local_rev, current_remote_rev, current_remote_rev, local_full_iwid, sp.name)
                 except Exception, e:
                     raise # XXX rollback locally and do not tag locally
             else:
                 very_current_remote_rev = current_remote_rev
 
-            tags.add(remote_wiki=remote_full_iwid, remote_rev=very_current_remote_rev, current_rev=new_local_rev, direction=direction, normalised_name=rp.name)
+            tags.add(remote_wiki=remote_full_iwid, remote_rev=very_current_remote_rev, current_rev=new_local_rev, direction=direction, normalised_name=sp.name)
 
-            if rp.local_mime_type != MIMETYPE_MOIN or not wikiutil.containsConflictMarker(verynewtext):
+            if sp.local_mime_type != MIMETYPE_MOIN or not wikiutil.containsConflictMarker(merged_text):
                 self.log_status(ActionClass.INFO, _("Page successfully merged."))
             else:
                 self.log_status(ActionClass.WARN, _("Page merged with conflicts."))
 
             # XXX release lock
 
-        for rp in m_pages:
-            handle_page(rp)
+        for sp in m_pages:
+            handle_page(sp)
 
 
 def execute(pagename, request):
     ActionClass(pagename, request).render()
+
--- a/MoinMoin/action/__init__.py	Mon Aug 21 02:30:05 2006 +0200
+++ b/MoinMoin/action/__init__.py	Mon Aug 21 02:31:07 2006 +0200
@@ -277,7 +277,7 @@
 def do_goto(pagename, request):
     """ redirect to another page """
     target = request.form.get('target', [''])[0]
-    request.http_redirect(Page(request, target).url(request))
+    request.http_redirect(Page(request, target).url(request, escape=0, relative=False))
 
 def do_userform(pagename, request):
     """ save data posted from UserPreferences """
--- a/MoinMoin/action/diff.py	Mon Aug 21 02:30:05 2006 +0200
+++ b/MoinMoin/action/diff.py	Mon Aug 21 02:31:07 2006 +0200
@@ -143,9 +143,11 @@
             if ignorews:
                 request.write(_('(ignoring whitespace)') + '<br>')
             else:
-                qstr = 'action=diff&ignorews=1'
-                if rev1: qstr = '%s&rev1=%s' % (qstr, rev1)
-                if rev2: qstr = '%s&rev2=%s' % (qstr, rev2)
+                qstr = {'action': 'diff', 'ignorews': '1', }
+                if rev1:
+                    qstr['rev1'] = str(rev1)
+                if rev2:
+                    qstr['rev2'] = str(rev2)
                 request.write(Page(request, pagename).link_to(request,
                     text=_('Ignore changes in the amount of whitespace'),
                     querystr=qstr, rel='nofollow') + '<p>')
--- a/MoinMoin/action/fullsearch.py	Mon Aug 21 02:30:05 2006 +0200
+++ b/MoinMoin/action/fullsearch.py	Mon Aug 21 02:31:07 2006 +0200
@@ -137,9 +137,7 @@
         page = results.hits[0]
         if not page.attachment: # we did not find an attachment
             page = Page(request, page.page_name)
-            # TODO: remove escape=0 in 2.0
-            url = page.url(request, querystr={'highlight': query.highlight_re()},
-                           escape=0)
+            url = page.url(request, querystr={'highlight': query.highlight_re()}, escape=0, relative=False)
             request.http_redirect(url)
             return
 
--- a/MoinMoin/action/info.py	Mon Aug 21 02:30:05 2006 +0200
+++ b/MoinMoin/action/info.py	Mon Aug 21 02:31:07 2006 +0200
@@ -197,7 +197,6 @@
     # main function
     _ = request.getText
     page = Page(request, pagename)
-    qpagename = wikiutil.quoteWikinameURL(pagename)
     title = page.split_title(request)
 
     request.emit_http_headers()
--- a/MoinMoin/action/newpage.py	Mon Aug 21 02:30:05 2006 +0200
+++ b/MoinMoin/action/newpage.py	Mon Aug 21 02:31:07 2006 +0200
@@ -89,7 +89,7 @@
             if parent:
                 pagename = "%s/%s" % (parent, pagename)
 
-            url = Page(self.request, pagename).url(self.request, query, 0)
+            url = Page(self.request, pagename).url(self.request, query, escape=0, relative=False)
             self.request.http_redirect(url)
 
         return ''
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/MoinMoin/action/showtags.py	Mon Aug 21 02:31:07 2006 +0200
@@ -0,0 +1,23 @@
+# -*- coding: iso-8859-1 -*-
+"""
+    MoinMoin - "showtags" action
+
+    This action shows all sync tags related to a specific page.
+
+    @copyright: 2006 by MoinMoin:AlexanderSchremmer
+    @license: GNU GPL, see COPYING for details.
+"""
+
+from MoinMoin import config
+from MoinMoin.Page import Page
+from MoinMoin.wikisync import TagStore
+
+def execute(pagename, request):
+    mimetype = "text/plain"
+
+    request.emit_http_headers(["Content-Type: %s; charset=%s" % (mimetype, config.charset)])
+
+    page = Page(request, pagename)
+    tags = TagStore(page)
+    request.write(tags.dump())
+
--- a/MoinMoin/config/multiconfig.py	Mon Aug 21 02:30:05 2006 +0200
+++ b/MoinMoin/config/multiconfig.py	Mon Aug 21 02:31:07 2006 +0200
@@ -420,8 +420,7 @@
     url_prefix_static = '/moin_static160'
 
     # we need to prefix actions to be able to exclude them by robots.txt:
-    # TODO:
-    # url_prefix_action = '/action'
+    url_prefix_action = 'action' # no leading or trailing '/'
 
     logo_string = None
     interwikiname = None
@@ -583,6 +582,10 @@
         if self.url_prefix is not None: # remove this code when url_prefix setting is removed
             self.url_prefix_static = self.url_prefix
 
+        action_prefix = self.url_prefix_action
+        if action_prefix is not None and action_prefix.endswith('/'): # make sure there is no trailing '/'
+            self.url_prefix_action = action_prefix[:-1]
+
     def load_meta_dict(self):
         """ The meta_dict contains meta data about the wiki instance. """
         if getattr(self, "_meta_dict", None) is None:
--- a/MoinMoin/macro/Action.py	Mon Aug 21 02:30:05 2006 +0200
+++ b/MoinMoin/macro/Action.py	Mon Aug 21 02:31:07 2006 +0200
@@ -66,11 +66,8 @@
         text = wikiutil.escape(text, 1)
 
         # Create link
-        formatter = self.macro.formatter
-        page = wikiutil.quoteWikinameURL(formatter.page.page_name)
-        url = '%s?action=%s' % (page, action)
-        link = wikiutil.link_tag(self.request, url, text=text,
-                                 formatter=formatter)
+        page = self.macro.formatter.page
+        link = page.link_to(self.request, text, querystr='action=%s' % action)
         return link
 
 
--- a/MoinMoin/macro/EditTemplates.py	Mon Aug 21 02:30:05 2006 +0200
+++ b/MoinMoin/macro/EditTemplates.py	Mon Aug 21 02:31:07 2006 +0200
@@ -15,23 +15,16 @@
     # Get list of template pages readable by current user
     filter = re.compile(self.request.cfg.page_template_regex, re.UNICODE).search
     templates = self.request.rootpage.getPageList(filter=filter)
-
+    result = []
     if templates:
         templates.sort()
-
+        page = self.formatter.page
         # send list of template pages
-        result = self.formatter.bullet_list(1)
-        for page in templates:
-            result = result +\
-                     self.formatter.listitem(1) +\
-                     wikiutil.link_tag(self.request, "%s?action=edit&amp;template=%s" % (
-                        wikiutil.quoteWikinameURL(self.formatter.page.page_name),
-                        wikiutil.quoteWikinameURL(page)), page
-                     ) + \
-                     self.formatter.listitem(0)
+        result.append(self.formatter.bullet_list(1))
+        for template in templates:
+            result.append(self.formatter.listitem(1))
+            result.append(page.link_to(self.request, template, querystr={'action': 'edit', 'template': template}))
+            result.append(self.formatter.listitem(0))
+        result.append(self.formatter.bullet_list(0))
+    return ''.join(result)
 
-        result = result + self.formatter.bullet_list(0)
-        return result
-
-    return ''
-
--- a/MoinMoin/macro/MonthCalendar.py	Mon Aug 21 02:30:05 2006 +0200
+++ b/MoinMoin/macro/MonthCalendar.py	Mon Aug 21 02:31:07 2006 +0200
@@ -312,10 +312,10 @@
     qpagenames = '*'.join(map(wikiutil.quoteWikinameURL, parmpagename))
     qtemplate = wikiutil.quoteWikinameURL(parmtemplate)
     querystr = "calparms=%%s,%d,%d,%d,%%d,%%s" % (parmyear, parmmonth, parmoffset)
-    prevlink = p.url(request, querystr % (qpagenames, parmoffset2 - 1, qtemplate), 0)
-    nextlink = p.url(request, querystr % (qpagenames, parmoffset2 + 1, qtemplate), 0)
-    prevylink = p.url(request, querystr % (qpagenames, parmoffset2 - 12, qtemplate), 0)
-    nextylink = p.url(request, querystr % (qpagenames, parmoffset2 + 12, qtemplate), 0)
+    prevlink = p.url(request, querystr % (qpagenames, parmoffset2 - 1, qtemplate), escape=0)
+    nextlink = p.url(request, querystr % (qpagenames, parmoffset2 + 1, qtemplate), escape=0)
+    prevylink = p.url(request, querystr % (qpagenames, parmoffset2 - 12, qtemplate), escape=0)
+    nextylink = p.url(request, querystr % (qpagenames, parmoffset2 + 12, qtemplate), escape=0)
     prevmonth = formatter.url(1, prevlink, 'cal-link') + '&lt;' + formatter.url(0)
     nextmonth = formatter.url(1, nextlink, 'cal-link') + '&gt;' + formatter.url(0)
     prevyear = formatter.url(1, prevylink, 'cal-link') + '&lt;&lt;' + formatter.url(0)
@@ -428,7 +428,7 @@
                             r, g, b = (r, g+colorstep, b)
                 r, g, b = cliprgb(r, g, b)
                 style = 'background-color:#%02x%02x%02x' % (r, g, b)
-                fmtlink = formatter.url(1, daypage.url(request, query), csslink, **onmouse) + str(day) + formatter.url(0)
+                fmtlink = formatter.url(1, daypage.url(request, query, escape=0), csslink, **onmouse) + str(day) + formatter.url(0)
                 if day == currentday and month == currentmonth and year == currentyear:
                     cssday = "cal-today"
                     fmtlink = "<b>%s</b>" % fmtlink # for browser with CSS probs
--- a/MoinMoin/macro/Navigation.py	Mon Aug 21 02:30:05 2006 +0200
+++ b/MoinMoin/macro/Navigation.py	Mon Aug 21 02:31:07 2006 +0200
@@ -64,7 +64,7 @@
     """
 
     # querystring for slideshow links
-    PROJECTION = 'action=print&media=projection'
+    PROJECTION = {'action': 'print', 'media': 'projection', }
 
     def __init__(self, macro, args):
         """ Prepare common values used during processing.
@@ -76,7 +76,7 @@
         self.pagename = self.macro.formatter.page.page_name
         self.print_mode = self.macro.request.action == 'print'
         self.media = self.macro.request.form.get('media', [None])[0]
-        self.querystr = self.print_mode and self.PROJECTION or ''
+        self.querystr = self.print_mode and self.PROJECTION or {}
 
 
     def dispatch(self):
@@ -113,6 +113,7 @@
         """ Navigate from a subpage to its siblings.
         """
         _ = self._
+        request = self.macro.request
         # get parent page name
         parent = root or _getParent(self.pagename)
         if not parent:
@@ -127,7 +128,7 @@
 
         # iterate over children, adding links to all of them
         result = []
-        children = _getPages(self.macro.request, '^%s/' % parent)
+        children = _getPages(request, '^%s/' % parent)
         for child in children:
             # display short page name, leaving out the parent path
             # (and make sure the name doesn't get wrapped)
@@ -142,7 +143,7 @@
                 result.append(self.macro.formatter.text(shortname))
             else:
                 # link to sibling / child
-                result.append(Page(self.macro.request, child).link_to(self.macro.request, text=shortname, querystr=self.querystr))
+                result.append(Page(request, child).link_to(request, text=shortname, querystr=self.querystr))
             result.append(' &nbsp; ')
 
         return ''.join(result)
@@ -157,12 +158,13 @@
         _ = self._
         curpage = focus or self.pagename
         result = []
-
+        request = self.macro.request
+        pg = Page(request, curpage)
         if self.print_mode:
             # projection mode
             label = _('Wiki')
-            toggle = ''
-            result.append(Page(self.macro.request, curpage).link_to(self.macro.request, text=_('Edit'), querystr='action=edit'))
+            toggle = {}
+            result.append(pg.link_to(request, text=_('Edit'), querystr={'action': 'edit'}))
             result.append(' &nbsp; ')
         else:
             # wiki mode
@@ -170,15 +172,15 @@
             toggle = self.PROJECTION
 
         # add mode toggle link
-        result.append(Page(self.macro.request, curpage).link_to(self.macro.request, text=label, querystr=toggle))
+        result.append(pg.link_to(request, text=label, querystr=toggle))
 
         # leave out the following on slide pages
         if focus is None:
-            children = _getPages(self.macro.request, '^%s/' % self.pagename)
+            children = _getPages(request, '^%s/' % self.pagename)
             if children:
                 # add link to first child if one exists
                 result.append(' &nbsp; ')
-                result.append(Page(self.macro.request, children[0]).link_to(self.macro.request, text=_('Start'), querystr=self.querystr))
+                result.append(Page(request, children[0]).link_to(request, text=_('Start'), querystr=self.querystr))
 
         return ''.join(result)
 
@@ -187,6 +189,7 @@
         """ Navigate within a slide show.
         """
         _ = self._
+        request = self.macro.request
         parent = root or _getParent(self.pagename)
         if not parent:
             return (self.macro.formatter.sysmsg(1) +
@@ -197,7 +200,7 @@
         result = []
         labels = ['^', '|<', '<<', '>>', '>|']
         filter_regex = '^%s/' % re.escape(parent)
-        pos, size, links = _getLinks(self.macro.request, self.pagename, filter_regex)
+        pos, size, links = _getLinks(request, self.pagename, filter_regex)
         pos += 1
         links = zip(labels, (parent,) + links)
 
@@ -206,7 +209,7 @@
             result.append(' ')
             if name:
                 # active link
-                result.append(Page(self.macro.request, name).link_to(self.macro.request, text=label, querystr=self.querystr))
+                result.append(Page(request, name).link_to(request, text=label, querystr=self.querystr))
             else:
                 # ghosted link
                 result.append(self.macro.formatter.text(label))
--- a/MoinMoin/macro/RecentChanges.py	Mon Aug 21 02:30:05 2006 +0200
+++ b/MoinMoin/macro/RecentChanges.py	Mon Aug 21 02:31:07 2006 +0200
@@ -54,36 +54,29 @@
     is_new = lines[-1].action == 'SAVENEW'
     # check whether this page is newer than the user's bookmark
     hilite = line.ed_time_usecs > (bookmark_usecs or line.ed_time_usecs)
-    page = Page(request, line.pagename)
+    page = Page(request, pagename)
 
     html_link = ''
     if not page.exists():
         # indicate page was deleted
-        html_link = request.theme.make_icon('deleted')
+        html_link = request.theme.make_icon('deleted') # TODO: we could link to the last existing rev here
     elif page.isConflict():
         img = macro.formatter.smiley("/!\\")
         #img = request.theme.make_icon('help')
-        html_link = wikiutil.link_tag(request,
-                                      wikiutil.quoteWikinameURL(pagename) + "?action=edit",
-                                      img, formatter=macro.formatter, rel="nofollow")
+        html_link = page.link_to_raw(request, img, querystr={'action': 'edit'}, rel='nofollow')
     elif is_new:
         # show "NEW" icon if page was created after the user's bookmark
         if hilite:
             img = request.theme.make_icon('new')
-            html_link = wikiutil.link_tag(request, wikiutil.quoteWikinameURL(pagename),
-                                          img, formatter=macro.formatter, rel="nofollow")
+            html_link = page.link_to_raw(request, img, rel='nofollow')
     elif hilite:
         # show "UPDATED" icon if page was edited after the user's bookmark
         img = request.theme.make_icon('updated')
-        html_link = wikiutil.link_tag(request,
-                                      wikiutil.quoteWikinameURL(pagename) + "?action=diff&date=%d" % bookmark_usecs,
-                                      img, formatter=macro.formatter, rel="nofollow")
+        html_link = page.link_to_raw(request, img, querystr={'action': 'diff', 'date': '%d' % bookmark_usecs}, rel='nofollow')
     else:
         # show "DIFF" icon else
         img = request.theme.make_icon('diffrc')
-        html_link = wikiutil.link_tag(request,
-                                      wikiutil.quoteWikinameURL(line.pagename) + "?action=diff",
-                                      img, formatter=macro.formatter, rel="nofollow")
+        html_link = page.link_to_raw(request, img, querystr={'action': 'diff'}, rel='nofollow')
 
     # print name of page, with a link to it
     force_split = len(page.page_name) > _MAX_PAGENAME_LENGTH
@@ -131,10 +124,7 @@
     d['comments'] = comments
 
     img = request.theme.make_icon('info')
-    info_html = wikiutil.link_tag(request,
-                                  wikiutil.quoteWikinameURL(line.pagename) + "?action=info",
-                                  img, formatter=macro.formatter, rel="nofollow")
-    d['info_html'] = info_html
+    d['info_html'] = page.link_to_raw(request, img, querystr={'action': 'info'}, rel='nofollow')
 
     return request.theme.recentchanges_entry(d)
 
@@ -256,17 +246,11 @@
             currentBookmark = wikiutil.version2timestamp(bookmark_usecs)
             currentBookmark = user.getFormattedDateTime(currentBookmark)
             currentBookmark = _('(currently set to %s)') % currentBookmark
-
-            url = wikiutil.quoteWikinameURL(pagename) + "?action=bookmark&time=del"
-            deleteBookmark = wikiutil.link_tag(request, url, _("Delete Bookmark"),
-                                               formatter=macro.formatter, rel="nofollow")
+            deleteBookmark = page.link_to(request, _("Delete bookmark"), querystr={'action': 'bookmark', 'time': 'del'}, rel='nofollow')
             d['rc_curr_bookmark'] = currentBookmark + ' ' + deleteBookmark
 
         version = wikiutil.timestamp2version(tnow)
-        url = wikiutil.quoteWikinameURL(pagename) + \
-            "?action=bookmark&time=%d" % version
-        d['rc_update_bookmark'] = wikiutil.link_tag(request, url, _("Set bookmark"),
-                                                    formatter=macro.formatter, rel="nofollow")
+        d['rc_update_bookmark'] = page.link_to(request, _("Set bookmark"), querystr={'action': 'bookmark', 'time': '%d' % version}, rel='nofollow')
 
     # set max size in days
     max_days = min(int(request.form.get('max_days', [0])[0]), _DAYS_SELECTION[-1])
@@ -302,26 +286,22 @@
         if ((this_day != day or (not hilite and not max_days))) and len(pages) > 0:
             # new day or bookmark reached: print out stuff 
             this_day = day
-            for page in pages:
-                ignore_pages[page] = None
+            for p in pages:
+                ignore_pages[p] = None
             pages = pages.values()
             pages.sort(cmp_lines)
             pages.reverse()
 
             if request.user.valid:
-                d['bookmark_link_html'] = wikiutil.link_tag(
-                    request,
-                    wikiutil.quoteWikinameURL(
-                        macro.formatter.page.page_name) + "?action=bookmark&time=%d" % (pages[0][0].ed_time_usecs,),
-                        _("set bookmark"),
-                        formatter=macro.formatter, rel="nofollow")
+                bmtime = pages[0][0].ed_time_usecs
+                d['bookmark_link_html'] = page.link_to(request, _("Set bookmark"), querystr={'action': 'bookmark', 'time': '%d' % bmtime}, rel='nofollow')
             else:
                 d['bookmark_link_html'] = None
             d['date'] = request.user.getFormattedDate(wikiutil.version2timestamp(pages[0][0].ed_time_usecs))
             request.write(request.theme.recentchanges_daybreak(d))
 
-            for page in pages:
-                request.write(format_page_edits(macro, page, bookmark_usecs))
+            for p in pages:
+                request.write(format_page_edits(macro, p, bookmark_usecs))
             pages = {}
             day_count += 1
             if max_days and (day_count >= max_days):
@@ -348,26 +328,22 @@
             # end of loop reached: print out stuff 
             # XXX duplicated code from above
             # but above does not trigger if we have the first day in wiki history
-            for page in pages:
-                ignore_pages[page] = None
+            for p in pages:
+                ignore_pages[p] = None
             pages = pages.values()
             pages.sort(cmp_lines)
             pages.reverse()
 
             if request.user.valid:
-                d['bookmark_link_html'] = wikiutil.link_tag(
-                    request,
-                    wikiutil.quoteWikinameURL(
-                        macro.formatter.page.page_name) + "?action=bookmark&time=%d" % (pages[0][0].ed_time_usecs,),
-                        _("Set bookmark"),
-                        formatter=macro.formatter, rel="nofollow")
+                bmtime = pages[0][0].ed_time_usecs
+                d['bookmark_link_html'] = page.link_to(request, _("Set bookmark"), querystr={'action': 'bookmark', 'time': '%d' % bmtime}, rel='nofollow')
             else:
                 d['bookmark_link_html'] = None
             d['date'] = request.user.getFormattedDate(wikiutil.version2timestamp(pages[0][0].ed_time_usecs))
             request.write(request.theme.recentchanges_daybreak(d))
 
-            for page in pages:
-                request.write(format_page_edits(macro, page, bookmark_usecs))
+            for p in pages:
+                request.write(format_page_edits(macro, p, bookmark_usecs))
 
 
     d['rc_msg'] = msg
--- a/MoinMoin/macro/WantedPages.py	Mon Aug 21 02:30:05 2006 +0200
+++ b/MoinMoin/macro/WantedPages.py	Mon Aug 21 02:31:07 2006 +0200
@@ -23,13 +23,9 @@
 
     # Control bar - filter the list of pages
     # TODO: we should make this a widget and use on all page listing pages
-    controlbar = '''<div class="controlbar">
-<a href="%(qpagename)s?allpages=%(allpages)d">%(label)s</a>
-</div>''' % {
-        'qpagename': wikiutil.quoteWikinameURL(macro.formatter.page.page_name),
-        'allpages': not allpages,
-        'label': (_('Include system pages'), _('Exclude system pages'))[allpages],
-    }
+    label = (_('Include system pages'), _('Exclude system pages'))[allpages]
+    page = macro.formatter.page
+    controlbar = '<div class="controlbar">%s</div>' % page.link_to(request, label, querystr={'allpages': '%d' % allpages and '0' or '1'})
 
     # Get page dict readable by current user
     pages = request.rootpage.getPageDict()
--- a/MoinMoin/macro/__init__.py	Mon Aug 21 02:30:05 2006 +0200
+++ b/MoinMoin/macro/__init__.py	Mon Aug 21 02:31:07 2006 +0200
@@ -245,11 +245,10 @@
             links = [letter_link(letter) for letter in index_letters]
             return "<p>%s%s</p>" % (' | '.join(links), additional_html)
 
-        qpagename = wikiutil.quoteWikinameURL(self.formatter.page.page_name)
+        page = self.formatter.page
         allpages_txt = (_('Include system pages'), _('Exclude system pages'))[allpages]
-        index = _make_index_key(index_letters, u"""<br>
-<a href="%s?allpages=%d">%s</a>
-""" % (qpagename, not allpages, allpages_txt) )
+        allpages_link = page.link_to(self.request, allpages_txt, querystr={'allpages': allpages and '0' or '1'})
+        index = _make_index_key(index_letters, u'<br>%s' % allpages_link)
         # ?action=titleindex and ?action=titleindex&mimetype=text/xml removed
 
         return u'%s%s' % (index, u''.join(html))
--- a/MoinMoin/parser/text_moin_wiki.py	Mon Aug 21 02:30:05 2006 +0200
+++ b/MoinMoin/parser/text_moin_wiki.py	Mon Aug 21 02:31:07 2006 +0200
@@ -2,7 +2,8 @@
 """
     MoinMoin - MoinMoin Wiki Markup Parser
 
-    @copyright: 2000, 2001, 2002 by Jürgen Hermann <jh@web.de>
+    @copyright: 2000, 2001, 2002 by Jürgen Hermann <jh@web.de>,
+                2006 by MoinMoin:ThomasWaldmann
     @license: GNU GPL, see COPYING for details.
 """
 
@@ -38,9 +39,9 @@
     q_string = ur"(%s|%s)" % (sq_string, dq_string) # quoted string
     attachment_schemas = ["attachment", "inline", "drawing"]
     punct_pattern = re.escape(u'''"\'}]|:,.)?!''')
-    punct_no_quote_pattern = re.escape(u'''\}]|:,.)?!''')
+    punct_no_quote_pattern = re.escape(u'''}]|:,.)?!''')
     url_pattern = (u'http|https|ftp|nntp|news|mailto|telnet|wiki|file|irc|' +
-            u'|'.join(attachment_schemas) + 
+            u'|'.join(attachment_schemas) +
             (config.url_schemas and u'|' + u'|'.join(config.url_schemas) or ''))
 
     # some common rules
@@ -51,7 +52,7 @@
         'parent': ur'(?:%s)?' % re.escape(PARENT_PREFIX),
     }
     url_rule = ur'%(url_guard)s(%(url)s)\:(([^\s\<%(punct)s]|([%(punctnq)s][^\s\<%(punct)s]))+|%(q_string)s)' % {
-        'url_guard': u'(^|(?<!\w))',
+        'url_guard': ur'(^|(?<!\w))',
         'url': url_pattern,
         'punct': punct_pattern,
         'punctnq': punct_no_quote_pattern,
@@ -143,7 +144,7 @@
         # holds the nesting level (in chars) of open lists
         self.list_indents = []
         self.list_types = []
-        
+
         self.formatting_rules = self.formatting_rules % {'macronames': u'|'.join(macro.getNames(self.cfg))}
 
     def _close_item(self, result):
@@ -182,7 +183,7 @@
             #self.request.log("interwiki: join_wiki -> %s.%s.%s" % (wikiurl,pagename,href))
             return self.formatter.image(src=href)
 
-        return (self.formatter.interwikilink(1, wikiname, pagename) + 
+        return (self.formatter.interwikilink(1, wikiname, pagename) +
                 self.formatter.text(text) +
                 self.formatter.interwikilink(0, wikiname, pagename))
 
@@ -200,7 +201,7 @@
         # check for image, and possibly return IMG tag (images are always inlined)
         if not kw.get('pretty_url', 0) and wikiutil.isPicture(fname):
             return self.formatter.attachment_image(fname)
-                
+
         # inline the attachment
         if scheme == 'inline':
             return self.formatter.attachment_inlined(fname, text)
@@ -325,7 +326,7 @@
         # handle anchors
         parts = word.split("#", 1)
         anchor = ""
-        if len(parts)==2:
+        if len(parts) == 2:
             word, anchor = parts
 
         return (self.formatter.pagelink(1, word, anchor=anchor) +
@@ -372,7 +373,7 @@
         """
         word = text[1:-1] # strip brackets
         first_char = word[0]
-        if first_char in "'\"": # this is quoted
+        if first_char in wikiutil.QUOTE_CHARS:
             # split on closing quote
             target, linktext = word[1:].split(first_char, 1)
         else: # not quoted
@@ -388,13 +389,13 @@
     def _url_bracket_repl(self, word):
         """Handle bracketed URLs."""
         word = word[1:-1] # strip brackets
-        
+
         # Local extended link? [:page name:link text] XXX DEPRECATED
         if word[0] == ':':
             words = word[1:].split(':', 1)
             if len(words) == 1:
                 words = words * 2
-            target_and_text = 'wiki:Self:"%s" %s' % tuple(words)
+            target_and_text = 'wiki:Self:%s %s' % (wikiutil.quoteName(words[0]), words[1])
             return self.interwiki(target_and_text, pretty_url=1)
 
         scheme_and_rest = word.split(":", 1)
@@ -414,7 +415,7 @@
                 return self.interwiki(word, pretty_url=1)
             if scheme in self.attachment_schemas:
                 return self.attachment(word, pretty_url=1)
-            
+
             words = word.split(None, 1)
             if len(words) == 1:
                 words = words * 2
@@ -450,7 +451,7 @@
     def _ent_symbolic_repl(self, word):
         """Handle symbolic SGML entities."""
         return self.formatter.rawHTML(word)
-    
+
     def _indent_repl(self, match):
         """Handle pure indentation (no - * 1. markup)."""
         result = []
@@ -510,59 +511,59 @@
 
     def _indent_to(self, new_level, list_type, numtype, numstart):
         """Close and open lists."""
-        open = []   # don't make one out of these two statements!
-        close = []
+        openlist = []   # don't make one out of these two statements!
+        closelist = []
 
         if self._indent_level() != new_level and self.in_table:
-            close.append(self.formatter.table(0))
+            closelist.append(self.formatter.table(0))
             self.in_table = 0
-        
+
         while self._indent_level() > new_level:
-            self._close_item(close)
+            self._close_item(closelist)
             if self.list_types[-1] == 'ol':
                 tag = self.formatter.number_list(0)
             elif self.list_types[-1] == 'dl':
                 tag = self.formatter.definition_list(0)
             else:
                 tag = self.formatter.bullet_list(0)
-            close.append(tag)
+            closelist.append(tag)
 
             del self.list_indents[-1]
             del self.list_types[-1]
-            
+
             if self.list_types: # we are still in a list
                 if self.list_types[-1] == 'dl':
                     self.in_dd = 1
                 else:
                     self.in_li = 1
-                
+
         # Open new list, if necessary
         if self._indent_level() < new_level:
             self.list_indents.append(new_level)
             self.list_types.append(list_type)
 
             if self.formatter.in_p:
-                close.append(self.formatter.paragraph(0))
-            
+                closelist.append(self.formatter.paragraph(0))
+
             if list_type == 'ol':
                 tag = self.formatter.number_list(1, numtype, numstart)
             elif list_type == 'dl':
                 tag = self.formatter.definition_list(1)
             else:
                 tag = self.formatter.bullet_list(1)
-            open.append(tag)
-            
+            openlist.append(tag)
+
             self.first_list_item = 1
             self.in_li = 0
             self.in_dd = 0
-            
+
         # If list level changes, close an open table
-        if self.in_table and (open or close):
-            close[0:0] = [self.formatter.table(0)]
+        if self.in_table and (openlist or closelist):
+            closelist[0:0] = [self.formatter.table(0)]
             self.in_table = 0
-        
+
         self.in_list = self.list_types != []
-        return ''.join(close) + ''.join(open)
+        return ''.join(closelist) + ''.join(openlist)
 
 
     def _undent(self):
@@ -725,9 +726,9 @@
                     attrs['colspan'] = '"%d"' % (word.count("|")/2)
 
             # return the complete cell markup
-            result.append(self.formatter.table_cell(1, attrs) + attrerr)         
+            result.append(self.formatter.table_cell(1, attrs) + attrerr)
             result.append(self._line_anchordef())
-            return ''.join(result) 
+            return ''.join(result)
         else:
             return self.formatter.text(word)
 
@@ -740,7 +741,7 @@
         level = 1
         while h[level:level+1] == '=':
             level += 1
-        depth = min(5,level)
+        depth = min(5, level)
 
         # this is needed for Included pages
         # TODO but it might still result in unpredictable results
@@ -755,10 +756,10 @@
             unique_id = '-%d' % self.titles[pntt]
         result = self._closeP()
         result += self.formatter.heading(1, depth, id="head-"+sha.new(pntt.encode(config.charset)).hexdigest()+unique_id)
-                                     
+
         return (result + self.formatter.text(title_text) +
                 self.formatter.heading(0, depth))
-    
+
     def _parser_repl(self, word):
         """Handle parsed code displays."""
         if word.startswith('{{{'):
@@ -823,7 +824,7 @@
         if self.formatter.in_p:
             return self.formatter.paragraph(0)
         return ''
-        
+
     def _macro_repl(self, word):
         """Handle macros ([[macroname]])."""
         macro_name = word[2:-2]
@@ -849,26 +850,26 @@
         lastpos = 0
 
         ###result.append(u'<span class="info">[scan: <tt>"%s"</tt>]</span>' % line)
-      
+
         for match in scan_re.finditer(line):
             # Add text before the match
             if lastpos < match.start():
-                
+
                 ###result.append(u'<span class="info">[add text before match: <tt>"%s"</tt>]</span>' % line[lastpos:match.start()])
-                
+
                 if not (self.inhibit_p or self.in_pre or self.formatter.in_p):
                     result.append(self.formatter.paragraph(1, css_class="line862"))
                 result.append(self.formatter.text(line[lastpos:match.start()]))
-            
+
             # Replace match with markup
             if not (self.inhibit_p or self.in_pre or self.formatter.in_p or
                     self.in_table or self.in_list):
                 result.append(self.formatter.paragraph(1, css_class="line867"))
             result.append(self.replace(match))
             lastpos = match.end()
-        
+
         ###result.append('<span class="info">[no match, add rest: <tt>"%s"<tt>]</span>' % line[lastpos:])
-        
+
         # Add paragraph with the remainder of the line
         if not (self.in_pre or self.in_li or self.in_dd or self.inhibit_p or
                 self.formatter.in_p) and lastpos < len(line):
@@ -881,17 +882,17 @@
         result = []
         for type, hit in match.groupdict().items():
             if hit is not None and not type in ["hmarker", ]:
-                
+
                 ###result.append(u'<span class="info">[replace: %s: "%s"]</span>' % (type, hit))
                 if self.in_pre and type not in ['pre', 'ent']:
-                    return self.formatter.text(hit) 
+                    return self.formatter.text(hit)
                 else:
                     # Open p for certain types
                     if not (self.inhibit_p or self.formatter.in_p
                             or self.in_pre or (type in self.no_new_p_before)):
                         result.append(self.formatter.paragraph(1, css_class="line891"))
-                    
-                    # Get replace method and replece hit
+
+                    # Get replace method and replace hit
                     replace = getattr(self, '_' + type + '_repl')
                     result.append(replace(hit))
                     return ''.join(result)
@@ -925,13 +926,13 @@
                 'word_rule': self.word_rule,
                 'rules': rules,
             }
-        self.request.clock.start('compile_huge_and_ugly')        
+        self.request.clock.start('compile_huge_and_ugly')
         scan_re = re.compile(rules, re.UNICODE)
         number_re = re.compile(self.ol_rule, re.UNICODE)
         term_re = re.compile(self.dl_rule, re.UNICODE)
-        indent_re = re.compile("^\s*", re.UNICODE)
+        indent_re = re.compile(ur"^\s*", re.UNICODE)
         eol_re = re.compile(r'\r?\n', re.UNICODE)
-        self.request.clock.stop('compile_huge_and_ugly')        
+        self.request.clock.stop('compile_huge_and_ugly')
 
         # get text and replace TABs
         rawtext = self.raw.expandtabs()
@@ -996,7 +997,7 @@
                         continue
                     if line[:endpos]:
                         self.parser_lines.append(line[:endpos])
-                    
+
                     # Close p before calling parser
                     # TODO: do we really need this?
                     self.request.write(self._closeP())
@@ -1014,7 +1015,7 @@
                 # we don't have \n as whitespace any more
                 # This is the space between lines we join to one paragraph
                 line += ' '
-                
+
                 # Paragraph break on empty lines
                 if not line.strip():
                     if self.in_table:
@@ -1064,7 +1065,7 @@
                         ## CHANGE: no automatic p on li
                         ##self.request.write(self.formatter.paragraph(1))
                         self.in_li = 1
-                        
+
                     # CHANGE: removed check for self.in_li
                     # paragraph should end before table, always!
                     if self.formatter.in_p:
@@ -1078,12 +1079,12 @@
                        line[indlen:indlen + 2] == "||" and
                        line.endswith("|| ") and
                        len(line) >= 5 + indlen)):
-                    
+
                     # Close table
                     self.request.write(self.formatter.table(0))
                     self.request.write(self._line_anchordef())
                     self.in_table = 0
-                                            
+
             # Scan line, format and write
             formatted_line = self.scan(scan_re, line)
             self.request.write(formatted_line)
@@ -1098,7 +1099,7 @@
         if self.in_table: self.request.write(self.formatter.table(0))
 
     # Private helpers ------------------------------------------------------------
-    
+
     def setParser(self, name):
         """ Set parser to parser named 'name' """
         mt = wikiutil.MimeType(name)
--- a/MoinMoin/request/__init__.py	Mon Aug 21 02:30:05 2006 +0200
+++ b/MoinMoin/request/__init__.py	Mon Aug 21 02:31:07 2006 +0200
@@ -1060,6 +1060,18 @@
 
             # The last component in path_info is the page name, if any
             path = self.getPathinfo()
+
+            # we can have all action URLs like this: /action/ActionName/PageName?action=ActionName&...
+            # this is just for robots.txt being able to forbid them for crawlers
+            prefix = self.cfg.url_prefix_action
+            if prefix is not None:
+                prefix = '/%s/' % prefix # e.g. '/action/'
+                if path.startswith(prefix):
+                    # remove prefix and action name
+                    path = path[len(prefix):]
+                    action, path = path.split('/', 1)
+                    path = '/' + path
+
             if path.startswith('/'):
                 pagename = self.normalizePagename(path)
             else:
@@ -1094,10 +1106,10 @@
                         wikitag, wikiurl, wikitail, error = wikiutil.resolve_wiki(self, pagetrail[-1])
                         url = wikiurl + wikiutil.quoteWikinameURL(wikitail)
                     else:
-                        url = Page(self, pagetrail[-1]).url(self)
+                        url = Page(self, pagetrail[-1]).url(self, escape=0)
                 else:
                     # Or to localized FrontPage
-                    url = wikiutil.getFrontPage(self).url(self)
+                    url = wikiutil.getFrontPage(self).url(self, escape=0)
                 self.http_redirect(url)
                 return self.finish()
 
--- a/MoinMoin/search/Xapian.py	Mon Aug 21 02:30:05 2006 +0200
+++ b/MoinMoin/search/Xapian.py	Mon Aug 21 02:31:07 2006 +0200
@@ -152,7 +152,7 @@
         # http://svn.xapian.org/*checkout*/trunk/xapian-applications/omega/docs/termprefixes.txt
         'author': 'A',
         'date':   'D', # numeric format: YYYYMMDD or "latest" - e.g. D20050224 or Dlatest
-                       #G   newsGroup (or sim2006-08-17 05:11:53ilar entity - e.g. a web forum name)
+                       #G   newsGroup (or similar entity - e.g. a web forum name)
         'hostname': 'H',
         'keyword': 'K',
         'lang': 'L',   # ISO Language code
--- a/MoinMoin/support/htmlmarkup.py	Mon Aug 21 02:30:05 2006 +0200
+++ b/MoinMoin/support/htmlmarkup.py	Mon Aug 21 02:31:07 2006 +0200
@@ -1,7 +1,8 @@
 # -*- coding: utf-8 -*-
-# copied from trac.util.markup, revision 3446, merged on 2006-06-30
+# copied from trac.util.html, revision 3609, merged on 2006-08-20
 #
 # Copyright (C) 2003-2006 Edgewall Software
+# Copyright 2006 MoinMoin:AlexanderSchremmer
 # All rights reserved.
 #
 # This software is licensed as described in the file COPYING, which
@@ -318,8 +319,11 @@
             else:
                 yield escape(child, quotes=False)
 
+    def __unicode__(self):
+        return u''.join(self.serialize())
+
     def __str__(self):
-        return Markup(''.join(self.serialize()))
+        return ''.join(self.serialize())
 
     def __add__(self, other):
         return Fragment()(self, other)
--- a/MoinMoin/theme/__init__.py	Mon Aug 21 02:30:05 2006 +0200
+++ b/MoinMoin/theme/__init__.py	Mon Aug 21 02:31:07 2006 +0200
@@ -183,9 +183,8 @@
         """
         html = u''
         if self.cfg.logo_string:
-            pagename = wikiutil.getFrontPage(self.request).page_name
-            pagename = wikiutil.quoteWikinameURL(pagename)
-            logo = wikiutil.link_tag(self.request, pagename, self.cfg.logo_string)
+            page = wikiutil.getFrontPage(self.request)
+            logo = page.link_to_raw(self.request, self.cfg.logo_string)
             html = u'''<div id="logo">%s</div>''' % logo
         return html
 
@@ -214,19 +213,23 @@
         """
         _ = self.request.getText
         content = []
-        if d['title_link']: # having a link means we have a (linked) pagename ONLY as title, not a message title
-                            # XXX this method is rather ugly and should be improved
+        if d['title_text'] == d['page_name']: # just showing a page, no action
             curpage = ''
             segments = d['page_name'].split('/') # was: title_text
             for s in segments[:-1]:
                 curpage += s
                 content.append("<li>%s</li>" % Page(self.request, curpage).link_to(self.request, s))
                 curpage += '/'
-            content.append(('<li><a class="backlink" title="%(title)s" rel="nofollow" href="%(href)s">%(text)s</a></li>') % {
-                'title': _('Click to do a full-text search for this title'),
-                'href': d['title_link'],
-                'text': wikiutil.escape(segments[-1]),
-                })
+            link_text = segments[-1]
+            link_title = _('Click to do a full-text search for this title')
+            link_query = {
+                'action': 'fullsearch',
+                'value': 'linkto:"%s"' % d['page_name'],
+                'context': '180',
+            }
+            # we dont use d['title_link'] any more, but make it ourselves:
+            link = d['page'].link_to(self.request, link_text, querystr=link_query, title=link_title, css_class='backlink', rel='nofollow')
+            content.append(('<li>%s</li>') % link)
         else:
             content.append('<li>%s</li>' % wikiutil.escape(d['title_text']))
 
@@ -524,9 +527,7 @@
 
         if isinstance(msg, (str, unicode)):
             # Render simple strings with a close link
-            close = d['page'].link_to(self.request,
-                                      text=_('Clear message'),
-                                      querystr={'action': 'show'})
+            close = d['page'].link_to(self.request, text=_('Clear message'))
             html = u'<p>%s</p>\n<div class="buttons">%s</div>\n' % (msg, close)
         else:
             # msg is a widget
@@ -1160,10 +1161,10 @@
         _ = self.request.getText
         return """\
 <script type="text/javascript">
-var gui_editor_link_href = "%(url)s?action=edit&editor=gui";
+var gui_editor_link_href = "%(url)s";
 var gui_editor_link_text = "%(text)s";
 </script>        
-""" % {'url': page.url(self.request),
+""" % {'url': page.url(self.request, querystr={'action': 'edit', 'editor': 'gui', }, escape=0),
        'text': _('Edit (GUI)', formatted=False),
       }
 
@@ -1424,7 +1425,6 @@
         current page being rendered.
         
         @param text: the title text
-        @keyword link: URL for the title
         @keyword msg: additional message (after saving)
         @keyword pagename: 'PageName'
         @keyword page: the page instance that called us.
@@ -1599,7 +1599,7 @@
 
         # If in print mode, start page div and emit the title
         if keywords.get('print_mode', 0):
-            d = {'title_text': text, 'title_link': None, 'page': page, }
+            d = {'title_text': text, 'page': page, }
             request.themedict = d
             output.append(self.startPage())
             output.append(self.interwiki(d))
@@ -1612,7 +1612,6 @@
                 'theme': self.name,
                 'script_name': scriptname,
                 'title_text': text,
-                'title_link': keywords.get('link', ''),
                 'logo_string': request.cfg.logo_string,
                 'site_name': request.cfg.sitename,
                 'page': page,
--- a/MoinMoin/wikisync.py	Mon Aug 21 02:30:05 2006 +0200
+++ b/MoinMoin/wikisync.py	Mon Aug 21 02:31:07 2006 +0200
@@ -155,10 +155,6 @@
         """ Returns a list of SyncPage instances. """
         return NotImplemented
 
-    def delete_page(self, pagename):
-        """ Deletes the page called pagename. """
-        return NotImplemented
-
 
 class MoinRemoteWiki(RemoteWiki):
     """ Used for MoinMoin wikis reachable via XMLRPC. """
@@ -218,10 +214,17 @@
         result = self.connection.mergeDiff(pagename, xmlrpclib.Binary(diff), local_rev, delta_remote_rev, last_remote_rev, interwiki_name, n_name)
         return result
 
-    def delete_page(self, pagename):
-        return # XXX not implemented yet
+    def delete_page(self, pagename, last_remote_rev, interwiki_name):
+        try:
+            result = self.connection.mergeDiff(pagename, None, None, None, last_remote_rev, interwiki_name, None)
+        except xmlrpclib.Fault, e:
+            if e.faultCode == "NOT_ALLOWED":
+                return e.faultString
+            raise
+        return ""
 
     # Methods implementing the RemoteWiki interface
+
     def get_interwiki_name(self):
         return self.remote_interwikiname
 
@@ -281,7 +284,7 @@
     # Public methods:
 
     # Methods implementing the RemoteWiki interface
-    def delete_page(self, page_name, comment):
+    def delete_page(self, pagename, comment):
         page = PageEditor(self.request, page_name)
         try:
             page.deletePage(comment)
@@ -318,15 +321,15 @@
         return "<MoinLocalWiki>"
 
 
-# ------------------ Tags ------------------ 
+# ------------------ Tags ------------------
 
 
 class Tag(object):
     """ This class is used to store information about merging state. """
-    
+
     def __init__(self, remote_wiki, remote_rev, current_rev, direction, normalised_name):
         """ Creates a new Tag.
-        
+
         @param remote_wiki: The identifier of the remote wiki.
         @param remote_rev: The revision number on the remote end.
         @param current_rev: The related local revision.
@@ -390,7 +393,7 @@
 
     def __init__(self, page):
         """ Creates a new TagStore that uses pickle files.
-        
+
         @param page: a Page object where the tags should be related to
         """
 
@@ -416,7 +419,7 @@
             self.tags = []
         else:
             datafile.close()
-    
+
     def commit(self):
         """ Writes the memory contents to the data file. """
         datafile = file(self.filename, "wb")
@@ -468,3 +471,4 @@
 # currently we just have one implementation, so we do not need
 # a factory method
 TagStore = PickleTagStore
+
--- a/MoinMoin/wikiutil.py	Mon Aug 21 02:30:05 2006 +0200
+++ b/MoinMoin/wikiutil.py	Mon Aug 21 02:31:07 2006 +0200
@@ -191,7 +191,7 @@
     """
     if qstr is None:
         qstr = {}
-    if isinstance(qstr, type({})):
+    if isinstance(qstr, dict):
         qstr.update(kw)
         items = ['%s=%s' % (url_quote_plus(key, want_unicode=want_unicode), url_quote_plus(value, want_unicode=want_unicode)) for key, value in qstr.items()]
         qstr = '&'.join(items)
@@ -490,6 +490,26 @@
             self.wlock.release()
 
 
+# Quoting of wiki names, file names, etc. (in the wiki markup) -----------------------------------
+
+QUOTE_CHARS = u"'\""
+
+def quoteName(name):
+    """ put quotes around a given name """
+    for quote_char in QUOTE_CHARS:
+        if quote_char not in name:
+            return u"%s%s%s" % (quote_char, name, quote_char)
+    else:
+        return name # XXX we need to be able to escape the quote char for worst case
+
+def unquoteName(name):
+    """ if there are quotes around the name, strip them """
+    for quote_char in QUOTE_CHARS:
+        if quote_char == name[0] == name[-1]:
+            return name[1:-1]
+    else:
+        return name
+
 #############################################################################
 ### InterWiki
 #############################################################################
@@ -594,7 +614,7 @@
         except ValueError:
             wikiname, rest = 'Self', wikiurl
     first_char = rest[0]
-    if first_char in "'\"": # quoted pagename
+    if first_char in QUOTE_CHARS: # quoted pagename
         pagename_linktext = rest[1:].split(first_char, 1)
     else: # not quoted, split on whitespace
         pagename_linktext = rest.split(None, 1)
@@ -822,7 +842,7 @@
     if re.match(Parser.word_rule + "$", pagename):
         return pagename
     else:
-        return u'["%s"]' % pagename
+        return u'["%s"]' % pagename # XXX use quoteName(pagename) later
 
 #############################################################################
 ### mimetype support
@@ -860,7 +880,6 @@
     MIMETYPES_spoil_mapping[value] = key
 
 
-# mimetype stuff ------------------------------------------------------------
 class MimeType(object):
     """ represents a mimetype like text/plain """
 
@@ -1458,6 +1477,8 @@
     @rtype: string
     @return: formatted link tag
     """
+    if formatter is None:
+        formatter = request.html_formatter
     if kw.has_key('css_class'):
         css_class = kw['css_class']
         del kw['css_class'] # one time is enough
@@ -1469,26 +1490,29 @@
         text = params # default
     if formatter:
         url = "%s/%s" % (request.getScriptname(), params)
+        # formatter.url will escape the url part
         if on is not None:
-            return formatter.url(on, url, css_class, **kw)
-        return (formatter.url(1, url, css_class, **kw) +
+            tag = formatter.url(on, url, css_class, **kw)
+        else:
+            tag = (formatter.url(1, url, css_class, **kw) +
                 formatter.rawHTML(text) +
                 formatter.url(0))
-    if on is not None and not on:
-        return '</a>'
-
-    attrs = ''
-    if css_class:
-        attrs += ' class="%s"' % css_class
-    if id:
-        attrs += ' id="%s"' % id
-    if name:
-        attrs += ' name="%s"' % name
-    result = '<a%s href="%s/%s">' % (attrs, request.getScriptname(), params)
-    if on:
-        return result
-    else:
-        return "%s%s</a>" % (result, text)
+    else: # this shouldn't be used any more:
+        if on is not None and not on:
+            tag = '</a>'
+        else:
+            attrs = ''
+            if css_class:
+                attrs += ' class="%s"' % css_class
+            if id:
+                attrs += ' id="%s"' % id
+            if name:
+                attrs += ' name="%s"' % name
+            tag = '<a%s href="%s/%s">' % (attrs, request.getScriptname(), params) # XXX wikiutil.escape(params) !?
+            if not on:
+                tag = "%s%s</a>" % (tag, text)
+        request.log("Warning: wikiutil.link_tag called without formatter and without request.html_formatter. tag=%r" % (tag, ))
+    return tag
 
 def containsConflictMarker(text):
     """ Returns true if there is a conflict marker in the text. """
--- a/MoinMoin/xmlrpc/__init__.py	Mon Aug 21 02:30:05 2006 +0200
+++ b/MoinMoin/xmlrpc/__init__.py	Mon Aug 21 02:31:07 2006 +0200
@@ -5,7 +5,7 @@
     If you want to use wikirpc function "putPage", read the comments in
     xmlrpc_putPage or it won't work!
     
-    Parts of this code are based on Jrgen Hermann's wikirpc.py,
+    Parts of this code are based on Jürgen Hermann's wikirpc.py,
     Les Orchard's "xmlrpc.cgi" and further work by Gustavo Niemeyer.
 
     See http://www.ecyrd.com/JSPWiki/Wiki.jsp?page=WikiRPCInterface
@@ -130,7 +130,7 @@
             else:
                 # wrap response in a singleton tuple
                 response = (response,)
-    
+
                 # serialize it
                 response = xmlrpclib.dumps(response, methodresponse=1)
 
@@ -182,7 +182,7 @@
         request.
 
         See http://www.xmlrpc.com/discuss/msgReader$1208
-        
+
         Copied from SimpleXMLRPCServer.py
         """
 
@@ -275,7 +275,7 @@
         pagelist = self.request.rootpage.getPageList(filter=p_filter, exists=not options["include_deleted"],
                                                      include_underlay=options["include_underlay"],
                                                      return_objects=options["include_revno"])
-        
+
         if options['include_revno']:
             pages = []
             for x in pagelist:
@@ -289,7 +289,7 @@
 
     def xmlrpc_getRecentChanges(self, date):
         """ Get RecentChanges since date
-        
+
         @param date: date since when rc will be listed
         @rtype: list
         @return: a list of changed pages since date, which should be in
@@ -695,14 +695,22 @@
     def xmlrpc_mergeDiff(self, pagename, diff, local_rev, delta_remote_rev, last_remote_rev, interwiki_name, normalised_name):
         """ Merges a diff sent by the remote machine and returns the number of the new revision.
             Additionally, this method tags the new revision.
-            
+
             @param pagename: The pagename that is currently dealt with.
             @param diff: The diff that can be applied to the version specified by delta_remote_rev.
+                If it is None, the page is deleted.
             @param local_rev: The revno of the page on the other wiki system, used for the tag.
             @param delta_remote_rev: The revno that the diff is taken against.
             @param last_remote_rev: The last revno of the page `pagename` that is known by the other wiki site.
             @param interwiki_name: Used to build the interwiki tag.
             @param normalised_name: The normalised pagename that is common to both wikis.
+
+            @return Returns the current revision number after the merge was done. Or one of the following errors:
+                * "SUCCESS" - the page could be merged and tagged successfully.
+                * "NOT_EXIST" - item does not exist and there was not any content supplied.
+                * "LASTREV_INVALID" - the page was changed and the revision got invalid
+                * "INTERNAL_ERROR" - there was an internal error
+                * "NOT_ALLOWED" - you are not allowed to do the merge operation on the page
         """
         from MoinMoin.util.bdiff import decompress, patch
         from MoinMoin.wikisync import TagStore, BOTH
@@ -712,7 +720,7 @@
         pagename = self._instr(pagename)
 
         comment = u"Remote Merge - %r" % unpackLine(interwiki_name)[-1]
-        
+
         # User may read page?
         if not self.request.user.may.read(pagename) or not self.request.user.may.write(pagename):
             return self.notAllowedFault()
@@ -728,6 +736,13 @@
         if not currentpage.exists() and diff is None:
             return xmlrpclib.Fault("NOT_EXIST", "The page does not exist and no diff was supplied.")
 
+        if diff is None: # delete the page
+            try:
+                currentpage.deletePage(comment)
+            except PageEditor.AccessDenied, (msg, ):
+                return xmlrpclib.Fault("NOT_ALLOWED", msg)
+            return currentpage.get_real_rev()
+
         # base revision used for the diff
         basepage = Page(self.request, pagename, rev=(delta_remote_rev or 0))
 
@@ -744,7 +759,7 @@
             return LASTREV_INVALID
 
         current_rev = currentpage.get_real_rev()
-        
+
         tags = TagStore(currentpage)
         tags.add(remote_wiki=interwiki_name, remote_rev=local_rev, current_rev=current_rev, direction=BOTH, normalised_name=normalised_name)
 
--- a/docs/CHANGES	Mon Aug 21 02:30:05 2006 +0200
+++ b/docs/CHANGES	Mon Aug 21 02:31:07 2006 +0200
@@ -138,6 +138,9 @@
       anyway), just use emit_http_headers and include a Status: XXX header.
       Method will vanish with moin 1.7. 
     * cfg.url_prefix is DEPRECATED, please use cfg.url_prefix_static.
+    * d['title_link'] is not supported any more. You can easily make that link
+      on your own in your theme, see example in MoinMoin/theme/__init__.py,
+      function "title".
 
   New Features:
     * Removed "underscore in URL" == "blank in pagename magic" - it made more
@@ -225,11 +228,33 @@
       1. The setting is now called url_prefix_static (to make it more clear
          that we mean the static stuff, not the wiki script url).
       2. The strongly recommended (and default) value of it is '/moin_static160'
-         for moin version 1.6.0 (and will be ...161 for moin 1.6.1). We use a
-         very long cache lifetime for static stuff now, so it is required to
-         change the URL of static stuff when the static stuff changes (e.g. on
-         a version upgrade of moin) to avoid problems with stale cache content.
+         for moin version 1.6.0 (and will be ...161 for moin 1.6.1). It is
+         possible and recommended to use a very long cache lifetime for static
+         stuff now (Expires: access plus 1 year), because we require to change
+         the URL of static stuff when the static stuff changes (e.g. on a
+         version upgrade of moin) to avoid problems with stale cache content.
          Your moin will be faster with lower load and traffic because of this.
+         For standalone server, we use 1 year expiry for static stuff now.
+         For Apache, Lighttpd and other "external" servers, you have to care
+         for configuring them to use a long expiry and change url_prefix_static
+         related configuration on upgrade.
+    * url_prefix_action ['action'] was introduced for lowering load and traffic
+      caused by searchengine crawlers. Up to now, crawlers where causing a high
+      load in internet moin wikis because they tried to get about everything,
+      including all actions linked from the user interface.
+      Known crawlers only get 403 for most actions, but nevertheless they first
+      tried. There was no means keeping them away from actions due to the rather
+      braindead robots.txt standard. You can only disallow pathes there, but
+      moin's actions were querystring based, not path based (this would need
+      regex support in robots.txt, but there is no such thing).
+      This changed now. Moin will now generate action URLs you can handle in
+      robots.txt, like /action/info/PageName?action=info. So if you don't want
+      bots triggering actions, just disallow /action/ there. Keep in mind that
+      attachments are handled by /action/AttachFile, so if you want attached
+      files and pictures indexed by search engine, don't disallow
+      /action/AttachFile/ in your robots.txt.
+    * We don't use ...?action=show any more for the "Clear message" links shown
+      in the message boxes, but directly link to the page.
 
 Version 1.5-current:
    * moin.fcg improved - if you use FastCGI, you must use the new file:
--- a/docs/CHANGES.aschremmer	Mon Aug 21 02:30:05 2006 +0200
+++ b/docs/CHANGES.aschremmer	Mon Aug 21 02:31:07 2006 +0200
@@ -4,25 +4,19 @@
   Known main issues:
     * Do I need to tag delete operations?
     * How to handle renames?
-    * How will we store tags? (Metadata support would be handy)
+    * How should we store tags? (Metadata support would be handy)
       (currently done in Pickle files)
 
   ToDo:
-    * Delete remote pages.
+    * Implement rollback
     * Reduce round-trip times by caching queries and using MultiCall objects (coroutines?)
     * Check what needs to be documented on MoinMaster.
-
-    * Show tags in an action=info view?
-
     * Test with prefixes
     * Search for XXX
-    * Delete iters?
-    * Maybe refactor YYY into MoinLocalWiki
-    * Remove amount of "very" in the code
-    * Clean up trailing whitespace.
-
     * Implement a cross-site authentication system, i.e. mainly an
       identity storage. (does OpenID make sense?)
+
+    * Maybe refactor YYY into MoinLocalWiki
     * Add page locking, i.e. use the one in the new storage layer.
     * Do older tags of one wiki site have to be stored as well? Why don't we
       keep just one tag?
@@ -47,6 +41,7 @@
     * XMLRPC functions may return Fault instances
     * diff3 algorithm extended, a new mode should reduce the conflicts
     * GetText2 macro
+    * showtags action
 
   Bugfixes (only stuff that is buggy in moin/1.6 main branch):
     * Conflict resolution fixes. (merged into main)
--- a/wiki/htdocs/robots.txt	Mon Aug 21 02:30:05 2006 +0200
+++ b/wiki/htdocs/robots.txt	Mon Aug 21 02:31:07 2006 +0200
@@ -2,5 +2,8 @@
 
 User-agent: *
 Crawl-delay: 20
-Disallow:
+# This has to match script url + cfg.url_prefix_action - it
+# saves lots of search engine load and traffic by disallowing crawlers
+# to request action related URLs:
+Disallow: /action/