changeset 1454:4f355732e01f

merge with main
author Franz Pletz <fpletz AT franz-pletz DOT org>
date Tue, 22 Aug 2006 20:13:48 +0200
parents 5b60d84fddb7 (current diff) c65bff91b9e6 (diff)
children d20d04b46ed4
files MoinMoin/config/multiconfig.py
diffstat 20 files changed, 307 insertions(+), 194 deletions(-) [+]
line wrap: on
line diff
--- a/MoinMoin/action/SyncPages.py	Mon Aug 21 19:36:23 2006 +0200
+++ b/MoinMoin/action/SyncPages.py	Tue Aug 22 20:13:48 2006 +0200
@@ -24,13 +24,13 @@
 from MoinMoin.PageEditor import PageEditor, conflict_markers
 from MoinMoin.Page import Page
 from MoinMoin.wikidicts import Dict, Group
-from MoinMoin.wikisync import TagStore, UnsupportedWikiException, SyncPage
+from MoinMoin.wikisync import TagStore, UnsupportedWikiException, SyncPage, NotAllowedException
 from MoinMoin.wikisync import MoinLocalWiki, MoinRemoteWiki, UP, DOWN, BOTH, MIMETYPE_MOIN
 from MoinMoin.util.bdiff import decompress, patch, compress, textdiff
 from MoinMoin.util import diff3
 
 
-debug = True
+debug = False
 
 
 # map sync directions
@@ -59,10 +59,17 @@
         table = []
 
         for line in self.status:
-            macro_args = [line[1]] + list(line[2])
-            table.append(table_line % {"smiley": line[0][1], "message":
-                line[1] and (u"[[GetText2(|%s)]]" % (packLine(macro_args), )),
-                "raw_suffix": line[3]})
+            if line[1]:
+                if line[2]:
+                    macro_args = [line[1]] + list(line[2])
+                    message = u"[[GetText2(|%s)]]" % (packLine(macro_args), )
+                else:
+                    message = u"[[GetText(%s)]]" % (line[1], )
+            else:
+                message = u""
+            table.append(table_line % {"smiley": line[0][1],
+                                       "message": message,
+                                       "raw_suffix": line[3]})
 
         return "\n".join(table)
 
@@ -76,6 +83,8 @@
             "pageList": None,
             "groupList": None,
             "direction": "foo", # is defaulted below
+            "user": None,     # this should be refactored into a password agent
+            "password": None, # or OpenID like solution (XXX)
         }
 
         options.update(Dict(self.request, self.pagename).get_dict())
@@ -117,7 +126,6 @@
 
         params = self.fix_params(self.parse_page())
 
-        # XXX aquire readlock on self.page
         try:
             if params["direction"] == UP:
                 raise ActionStatus(_("The only supported directions are BOTH and DOWN."))
@@ -130,21 +138,27 @@
 
             local = MoinLocalWiki(self.request, params["localPrefix"], params["pageList"])
             try:
-                remote = MoinRemoteWiki(self.request, params["remoteWiki"], params["remotePrefix"], params["pageList"], verbose=debug)
-            except UnsupportedWikiException, (msg, ):
+                remote = MoinRemoteWiki(self.request, params["remoteWiki"], params["remotePrefix"], params["pageList"], params["user"], params["password"], verbose=debug)
+            except (UnsupportedWikiException, NotAllowedException), (msg, ):
                 raise ActionStatus(msg)
 
             if not remote.valid:
                 raise ActionStatus(_("The ''remoteWiki'' is unknown."))
-
-            self.sync(params, local, remote)
         except ActionStatus, e:
             msg = u'<p class="error">%s</p>\n' % (e.args[0], )
-        else:
-            msg = u"%s" % (_("Syncronisation finished. Look below for the status messages."), )
 
-        self.page.saveText(self.page.get_raw_body() + "\n\n" + self.generate_log_table(), 0)
-        # XXX release readlock on self.page
+        try:
+            try:
+                self.sync(params, local, remote)
+            except Exception, e:
+                self.log_status(self.ERROR, _("A severe error occured:"), raw_suffix=repr(e))
+                raise
+            else:
+                msg = u"%s" % (_("Syncronisation finished. Look below for the status messages."), )
+        finally:
+            # XXX aquire readlock on self.page
+            self.page.saveText(self.page.get_raw_body() + "\n\n" + self.generate_log_table(), 0)
+            # XXX release readlock on self.page
 
         self.page.send_page(self.request, msg=msg)
 
@@ -307,7 +321,7 @@
                 return
 
             if remote_rev is None and direction == BOTH:
-                self.log_status(ActionClass.INFO, _("This is the first synchronisation between this page and the remote wiki."))
+                self.log_status(ActionClass.INFO, _("This is the first synchronisation between the local and the remote wiki for the page %s."), (sp.name, ))
 
             if sp.remote_deleted:
                 remote_contents = ""
@@ -319,9 +333,9 @@
             if sp.local_mime_type == MIMETYPE_MOIN:
                 remote_contents_unicode = remote_contents.decode("utf-8")
                 # here, the actual 3-way merge happens
+                merged_text = diff3.text_merge(old_contents.decode("utf-8"), remote_contents_unicode, current_page.get_raw_body(), 1, *conflict_markers) # YYY direct access
                 if debug:
-                    self.log_status(ActionClass.INFO, raw_suffix="Merging %r, %r and %r" % (old_contents.decode("utf-8"), remote_contents_unicode, current_page.get_raw_body()))
-                merged_text = diff3.text_merge(old_contents.decode("utf-8"), remote_contents_unicode, current_page.get_raw_body(), 2, *conflict_markers) # YYY direct access
+                    self.log_status(ActionClass.INFO, raw_suffix="Merging %r, %r and %r into %r" % (old_contents.decode("utf-8"), remote_contents_unicode, current_page.get_raw_body(), merged_text))
                 merged_text_raw = merged_text.encode("utf-8")
             else:
                 if diff is None:
@@ -335,28 +349,44 @@
 
             # XXX upgrade to write lock
             try:
-                current_page.saveText(merged_text, sp.local_rev, comment=comment) # YYY direct access
+                local_change_done = True
+                current_page.saveText(merged_text, sp.local_rev or 0, comment=comment) # YYY direct access
             except PageEditor.Unchanged:
-                pass
+                local_change_done = False
             except PageEditor.EditConflict:
+                local_change_done = False
                 assert False, "You stumbled on a problem with the current storage system - I cannot lock pages"
 
             new_local_rev = current_page.get_real_rev() # YYY direct access
 
-            if direction == BOTH:
-                try:
-                    very_current_remote_rev = remote.merge_diff(sp.remote_name, compress(diff), new_local_rev, current_remote_rev, current_remote_rev, local_full_iwid, sp.name)
-                except Exception, e:
-                    raise # XXX rollback locally and do not tag locally
-            else:
-                very_current_remote_rev = current_remote_rev
+            def rollback_local_change(): # YYY direct local access
+                rev = new_local_rev - 1
+                revstr = '%08d' % rev
+                oldpg = Page(self.request, sp.local_name, rev=rev)
+                pg = PageEditor(self.request, sp.local_name)
+                savemsg = pg.saveText(oldpg.get_raw_body(), 0, comment=u"Wikisync rollback", extra=revstr, action="SAVE/REVERT")
+
+            try:
+                if direction == BOTH:
+                    try:
+                        very_current_remote_rev = remote.merge_diff(sp.remote_name, compress(diff), new_local_rev, current_remote_rev, current_remote_rev, local_full_iwid, sp.name)
+                    except NotAllowedException:
+                        self.log_status(ActionClass.ERROR, _("The page %s could not be merged because you are not allowed to modify the page in the remote wiki."), (sp.name, ))
+                        return
+                else:
+                    very_current_remote_rev = current_remote_rev
+
+                local_change_done = False # changes are committed remotely, all is fine
+            finally:
+                if local_change_done:
+                    rollback_local_change()
 
             tags.add(remote_wiki=remote_full_iwid, remote_rev=very_current_remote_rev, current_rev=new_local_rev, direction=direction, normalised_name=sp.name)
 
             if sp.local_mime_type != MIMETYPE_MOIN or not wikiutil.containsConflictMarker(merged_text):
-                self.log_status(ActionClass.INFO, _("Page successfully merged."))
+                self.log_status(ActionClass.INFO, _("Page %s successfully merged."), (sp.name, ))
             else:
-                self.log_status(ActionClass.WARN, _("Page merged with conflicts."))
+                self.log_status(ActionClass.WARN, _("Page %s merged with conflicts."), (sp.name, ))
 
             # XXX release lock
 
--- a/MoinMoin/action/__init__.py	Mon Aug 21 19:36:23 2006 +0200
+++ b/MoinMoin/action/__init__.py	Tue Aug 22 20:13:48 2006 +0200
@@ -91,8 +91,9 @@
             for convenience we give him some pre-assembled html for the buttons.
         """
         _ = self._
+        f = self.request.formatter
         prompt = _("Execute action %(actionname)s?") % {'actionname': self.actionname}
-        return "<p>%s</p>%s" % (prompt, buttons_html)
+        return f.paragraph(1) + f.text(prompt) + f.paragraph(0) + f.rawHTML(buttons_html)
 
     def make_buttons(self):
         """ return a list of form buttons for the action form """
--- a/MoinMoin/action/diff.py	Mon Aug 21 19:36:23 2006 +0200
+++ b/MoinMoin/action/diff.py	Tue Aug 22 20:13:48 2006 +0200
@@ -117,49 +117,48 @@
             oldrev2 = 0 # XXX
 
     edit_count = abs(oldcount1 - oldcount2)
-
-    # this should use the formatter, but there is none?
-    request.write('<div id="content">\n') # start content div
-    request.write('<p class="diff-header">')
-    request.write(_('Differences between revisions %d and %d') % (oldpage.get_real_rev(), newpage.get_real_rev()))
+    f = request.formatter
+    request.write(f.div(1, id="content"))
+    request.write(f.paragraph(1, css_class="diff-header"))
+    request.write(f.text(_('Differences between revisions %d and %d') % (oldpage.get_real_rev(), newpage.get_real_rev())))
     if edit_count > 1:
-        request.write(' ' + _('(spanning %d versions)') % (edit_count,))
-    request.write('</p>')
+        request.write(f.text(' ' + _('(spanning %d versions)') % (edit_count,)))
+    request.write(f.paragraph(0))
 
     if request.user.show_fancy_diff:
         from MoinMoin.util import diff_html
-        request.write(diff_html.diff(request, oldpage.get_raw_body(), newpage.get_raw_body()))
+        request.write(f.rawHTML(diff_html.diff(request, oldpage.get_raw_body(), newpage.get_raw_body())))
         newpage.send_page(request, count_hit=0, content_only=1, content_id="content-below-diff")
     else:
         from MoinMoin.util import diff_text
         lines = diff_text.diff(oldpage.getlines(), newpage.getlines())
         if not lines:
-            msg = _("No differences found!")
+            msg = f.text(_("No differences found!"))
             if edit_count > 1:
-                msg = msg + '<p>' + _('The page was saved %(count)d times, though!') % {
-                    'count': edit_count}
+                msg = msg + f.paragraph(1) + f.text(_('The page was saved %(count)d times, though!') % {
+                    'count': edit_count}) + f.paragraph(0)
             request.write(msg)
         else:
             if ignorews:
-                request.write(_('(ignoring whitespace)') + '<br>')
+                request.write(f.text(_('(ignoring whitespace)')), f.linebreak())
             else:
                 qstr = {'action': 'diff', 'ignorews': '1', }
                 if rev1:
                     qstr['rev1'] = str(rev1)
                 if rev2:
                     qstr['rev2'] = str(rev2)
-                request.write(Page(request, pagename).link_to(request,
+                request.write(f.paragraph(1), Page(request, pagename).link_to(request,
                     text=_('Ignore changes in the amount of whitespace'),
-                    querystr=qstr, rel='nofollow') + '<p>')
+                    querystr=qstr, rel='nofollow'), f.paragraph(0))
 
-            request.write('<pre>')
+            request.write(f.preformatted(1))
             for line in lines:
                 if line[0] == "@":
-                    request.write('<hr>')
-                request.write(wikiutil.escape(line)+'\n')
-            request.write('</pre>')
+                    request.write(f.rule(1))
+                request.write(f.text(wikiutil.escape(line)+'\n'))
+            request.write(f.preformatted(0))
 
-    request.write('</div>\n') # end content div
+    request.write(f.div(0)) # end content div
     request.theme.send_footer(pagename)
     request.theme.send_closing_html()
 
--- a/MoinMoin/action/info.py	Mon Aug 21 19:36:23 2006 +0200
+++ b/MoinMoin/action/info.py	Tue Aug 22 20:13:48 2006 +0200
@@ -23,19 +23,23 @@
 
     def general(page, pagename, request):
         _ = request.getText
-
-        request.write('<h2>%s</h2>\n' % _('General Information'))
+        f = request.formatter
 
-        # show page size
-        request.write(("<p>%s</p>" % _("Page size: %d")) % page.size())
+        request.write(f.heading(1, 1),
+                      f.text(_('General Information')),
+                      f.heading(0, 1))
 
-        # show SHA digest fingerprint
+        request.write(f.paragraph(1),
+                      f.text(_("Page size: %d") % page.size()),
+                      f.paragraph(0))
+
         import sha
         digest = sha.new(page.get_raw_body().encode(config.charset)).hexdigest().upper()
-        request.write('<p>%(label)s <tt>%(value)s</tt></p>' % {
-            'label': _("SHA digest of this page's content is:"),
-            'value': digest,
-            })
+        request.write(f.paragraph(1),
+                      f.rawHTML('%(label)s <tt>%(value)s</tt>' % {
+                          'label': _("SHA digest of this page's content is:"),
+                          'value': digest, }),
+                      f.paragraph(0))
 
         # show attachments (if allowed)
         attachment_info = action.getHandler(request, 'AttachFile', 'info')
@@ -45,25 +49,28 @@
         # show subscribers
         subscribers = page.getSubscribers(request, include_self=1, return_users=1)
         if subscribers:
-            request.write('<p>', _('The following users subscribed to this page:'))
+            request.write(f.paragraph(1))
+            request.write(f.text(_('The following users subscribed to this page:')))
             for lang in subscribers.keys():
-                request.write('<br>[%s] ' % lang)
+                request.write(f.linebreak(), f.text('[%s] ' % lang))
                 for user in subscribers[lang]:
                     # do NOT disclose email addr, only WikiName
                     userhomepage = Page(request, user.name)
                     if userhomepage.exists():
-                        request.write(userhomepage.link_to(request) + ' ')
+                        request.write(f.rawHTML(userhomepage.link_to(request) + ' '))
                     else:
-                        request.write(user.name + ' ')
-            request.write('</p>')
+                        request.write(f.text(user.name + ' '))
+            request.write(f.paragraph(0))
 
         # show links
         links = page.getPageLinks(request)
         if links:
-            request.write('<p>', _('This page links to the following pages:'), '<br>')
+            request.write(f.paragraph(1))
+            request.write(f.text(_('This page links to the following pages:')))
+            request.write(f.linebreak())
             for linkedpage in links:
-                request.write("%s%s " % (Page(request, linkedpage).link_to(request), ",."[linkedpage == links[-1]]))
-            request.write("</p>")
+                request.write(f.rawHTML("%s%s " % (Page(request, linkedpage).link_to(request), ",."[linkedpage == links[-1]])))
+            request.write(f.paragraph(0))
 
     def history(page, pagename, request):
         # show history as default
@@ -206,6 +213,7 @@
     # this will be automatically fixed.
     lang = page.language or request.cfg.language_default
     request.setContentLanguage(lang)
+    f = request.formatter
 
     request.theme.send_title(_('Info for "%s"') % (title,), pagename=pagename)
     menu_items = [
@@ -216,11 +224,11 @@
         (_('Show "%(title)s"') % {'title': _('Page hits and edits')},
          {'action': 'info', 'hitcounts': '1'}),
     ]
-    request.write('<div id="content">\n') # start content div
-    request.write("<p>")
+    request.write(f.div(1, id="content")) # start content div
+    request.write(f.paragraph(1))
     for text, querystr in menu_items:
         request.write("[%s] " % page.link_to(request, text=text, querystr=querystr, rel='nofollow'))
-    request.write("</p>")
+    request.write(f.paragraph(0))
 
     show_hitcounts = int(request.form.get('hitcounts', [0])[0]) != 0
     show_general = int(request.form.get('general', [0])[0]) != 0
@@ -233,7 +241,7 @@
     else:
         history(page, pagename, request)
 
-    request.write('</div>\n') # end content div
+    request.write(f.div(0)) # end content div
     request.theme.send_footer(pagename)
     request.theme.send_closing_html()
 
--- a/MoinMoin/config/multiconfig.py	Mon Aug 21 19:36:23 2006 +0200
+++ b/MoinMoin/config/multiconfig.py	Tue Aug 22 20:13:48 2006 +0200
@@ -400,7 +400,7 @@
     # a regex of HTTP_USER_AGENTS that should be excluded from logging
     # and receive a FORBIDDEN for anything except viewing a page
     ua_spiders = ('archiver|cfetch|crawler|curl|gigabot|googlebot|holmes|htdig|httrack|httpunit|jeeves|larbin|leech|'
-                  'linkbot|linkmap|linkwalk|mercator|mirror|msnbot|nutbot|omniexplorer|puf|robot|scooter|'
+                  'linkbot|linkmap|linkwalk|mercator|mirror|msnbot|neomo|nutbot|omniexplorer|puf|robot|scooter|seekbot|'
                   'sherlock|slurp|sitecheck|spider|teleport|voyager|webreaper|wget')
 
     # Wiki identity
--- a/MoinMoin/macro/MonthCalendar.py	Mon Aug 21 19:36:23 2006 +0200
+++ b/MoinMoin/macro/MonthCalendar.py	Tue Aug 22 20:13:48 2006 +0200
@@ -334,16 +334,16 @@
         while st < l:
             ch = parmpagename[0][st:st+chstep]
             r, g, b = cliprgb(r, g, b)
-            pagelinks = pagelinks + '<a style="%s" href="%s">%s</a>' % \
-                ('background-color:#%02x%02x%02x;color:#000000;text-decoration:none' % \
-                    (r, g, b), Page(request, parmpagename[0]).url(request), ch)
+            link = Page(request, parmpagename[0]).link_to(request, ch,
+                        style='background-color:#%02x%02x%02x;color:#000000;text-decoration:none' % (r, g, b))
+            pagelinks = pagelinks + link
             r, g, b = (r, g+colorstep, b)
             st = st + chstep
         r, g, b = (255-colorstep, 255, 255-colorstep)
         for page in parmpagename[1:]:
-            pagelinks = pagelinks + '*<a style="%s" href="%s">%s</a>' % \
-                            ('background-color:#%02x%02x%02x;color:#000000;text-decoration:none' % \
-                                (r, g, b), Page(request, page).url(request), page)
+            link = Page(request, page).link_to(request, page,
+                        style='background-color:#%02x%02x%02x;color:#000000;text-decoration:none' % (r, g, b))
+            pagelinks = pagelinks + '*' + link
         showpagename = '   %s<BR>\n' % pagelinks
     else:
         showpagename = ''
--- a/MoinMoin/macro/OrphanedPages.py	Mon Aug 21 19:36:23 2006 +0200
+++ b/MoinMoin/macro/OrphanedPages.py	Tue Aug 22 20:13:48 2006 +0200
@@ -24,23 +24,26 @@
             if link in orphaned:
                 del orphaned[link]
 
-    # check for the extreme case
-    if not orphaned:
-        return "<p>%s</p>" % _("No orphaned pages in this wiki.")
-
-    # return a list of page links
-    orphanednames = orphaned.keys()
-    orphanednames.sort()
     result = []
-    result.append(macro.formatter.number_list(1))
-    for name in orphanednames:
-        if not name: continue
-        result.append(macro.formatter.listitem(1))
-        result.append(macro.formatter.pagelink(1, name, generated=1))
-        result.append(macro.formatter.text(name))
-        result.append(macro.formatter.pagelink(0, name))
-        result.append(macro.formatter.listitem(0))
-    result.append(macro.formatter.number_list(0))
+    f = macro.formatter
+    if not orphaned:
+        result.append(f.paragraph(1))
+        result.append(f.text(_("No orphaned pages in this wiki.")))
+        result.append(f.paragraph(0))
+    else:
+        # return a list of page links
+        orphanednames = orphaned.keys()
+        orphanednames.sort()
+        result.append(f.number_list(1))
+        for name in orphanednames:
+            if not name:
+                continue
+            result.append(f.listitem(1))
+            result.append(f.pagelink(1, name, generated=1))
+            result.append(f.text(name))
+            result.append(f.pagelink(0, name))
+            result.append(f.listitem(0))
+        result.append(f.number_list(0))
 
     return ''.join(result)
 
--- a/MoinMoin/macro/SystemAdmin.py	Mon Aug 21 19:36:23 2006 +0200
+++ b/MoinMoin/macro/SystemAdmin.py	Tue Aug 22 20:13:48 2006 +0200
@@ -23,7 +23,6 @@
     if not request.user.isSuperUser():
         return ''
 
-    result = []
     _MENU = {
         'attachments': (("File attachment browser"), do_admin_browser),
         'users': (("User account browser"), do_user_browser),
@@ -31,21 +30,24 @@
     choice = request.form.get('sysadm', [None])[0]
 
     # create menu
-    menuitems = [(label, id) for id, (label, handler) in _MENU.items()]
+    menuitems = [(label, fnid) for fnid, (label, handler) in _MENU.items()]
     menuitems.sort()
-    for label, id in menuitems:
-        if id == choice:
-            result.append(macro.formatter.strong(1))
-            result.append(macro.formatter.text(label))
-            result.append(macro.formatter.strong(0))
+    result = []
+    f = macro.formatter
+    for label, fnid in menuitems:
+        if fnid == choice:
+            result.append(f.strong(1))
+            result.append(f.text(label))
+            result.append(f.strong(0))
         else:
-            result.append(wikiutil.link_tag(request, "%s?sysadm=%s" % (macro.formatter.page.page_name, id), label))
-        result.append('<br>')
-    result.append('<br>')
+            #result.append(wikiutil.link_tag(request, "%s?sysadm=%s" % (macro.formatter.page.page_name, id), label))
+            result.append(f.page.link_to(request, label, querystr={'sysadm': fnid}))
+        result.append(f.linebreak())
+    result.append(f.linebreak())
 
     # add chosen content
     if _MENU.has_key(choice):
-        result.append(_MENU[choice][1](request))
+        result.append(f.rawHTML(_MENU[choice][1](request)))
 
-    return macro.formatter.rawHTML(''.join(result))
+    return ''.join(result)
 
--- a/MoinMoin/macro/WantedPages.py	Mon Aug 21 19:36:23 2006 +0200
+++ b/MoinMoin/macro/WantedPages.py	Tue Aug 22 20:13:48 2006 +0200
@@ -25,7 +25,9 @@
     # TODO: we should make this a widget and use on all page listing pages
     label = (_('Include system pages'), _('Exclude system pages'))[allpages]
     page = macro.formatter.page
-    controlbar = '<div class="controlbar">%s</div>' % page.link_to(request, label, querystr={'allpages': '%d' % allpages and '0' or '1'})
+    controlbar = macro.formatter.div(1, css_class="controlbar") + \
+                 page.link_to(request, label, querystr={'allpages': '%d' % allpages and '0' or '1'}) + \
+                 macro.formatter.div(0)
 
     # Get page dict readable by current user
     pages = request.rootpage.getPageDict()
--- a/MoinMoin/mail/mailimport.py	Mon Aug 21 19:36:23 2006 +0200
+++ b/MoinMoin/mail/mailimport.py	Tue Aug 22 20:13:48 2006 +0200
@@ -235,7 +235,7 @@
                 break
 
     # build an attachment link table for the page with the e-mail
-    attachment_links = [""] + [u'''[attachment:"%s/%s" %s]''' % (pagename, att, att) for att in attachments]
+    attachment_links = [""] + [u'''[attachment:%s %s]''' % (wikiutil.quoteName("%s/%s" % (pagename, att)), att) for att in attachments]
 
     # assemble old page content and new mail body together
     old_content = Page(request, pagename).get_raw_body()
@@ -280,7 +280,7 @@
         from_col = email_to_markup(request, msg['from_addr'])
         to_col = ' '.join([email_to_markup(request, (realname, mailaddr))
                            for realname, mailaddr in msg['target_addrs'] if mailaddr != wiki_address])
-        subj_col = '["%s" %s]' % (pagename, msg['subject'])
+        subj_col = '[%s %s]' % (wikiutil.quoteName(pagename), msg['subject'])
         date_col = msg['date']
         attach_col = " ".join(attachment_links)
         new_line = u'|| %s || %s || %s || [[DateTime(%s)]] || %s ||' % (from_col, to_col, subj_col, date_col, attach_col)
--- a/MoinMoin/parser/text_moin_wiki.py	Mon Aug 21 19:36:23 2006 +0200
+++ b/MoinMoin/parser/text_moin_wiki.py	Tue Aug 22 20:13:48 2006 +0200
@@ -62,6 +62,9 @@
     ol_rule = ur"^\s+(?:[0-9]+|[aAiI])\.(?:#\d+)?\s"
     dl_rule = ur"^\s+.*?::\s"
 
+    # this is used inside <pre> / parser sections (we just want to know when it's over):
+    pre_formatting_rules = ur"""(?P<pre>(\}\}\}))"""
+
     # the big, fat, ugly one ;)
     formatting_rules = ur"""(?P<ent_numeric>&#(\d{1,5}|x[0-9a-fA-F]+);)
 (?:(?P<emph_ibb>'''''(?=[^']+'''))
@@ -134,7 +137,14 @@
         self.in_list = 0 # between <ul/ol/dl> and </ul/ol/dl>
         self.in_li = 0 # between <li> and </li>
         self.in_dd = 0 # between <dd> and </dd>
-        self.in_pre = 0
+
+        # states of the parser concerning being inside/outside of some "pre" section:
+        # None == we are not in any kind of pre section (was: 0)
+        # 'search_parser' == we didn't get a parser yet, still searching for it (was: 1)
+        # 'found_parser' == we found a valid parser (was: 2)
+        # 'no_parser' == we have no (valid) parser, use a normal <pre>...</pre> (was: 3)
+        self.in_pre = None
+
         self.in_table = 0
         self.is_big = False
         self.is_small = False
@@ -772,7 +782,7 @@
             # empty bang paths lead to a normal code display
             # can be used to escape real, non-empty bang paths
             word = ''
-            self.in_pre = 3
+            self.in_pre = 'no_parser'
             return self._closeP() + self.formatter.preformatted(1)
         elif s_word.startswith('#!'):
             # First try to find a parser for this (will go away in 2.0)
@@ -781,27 +791,27 @@
 
         if self.parser:
             self.parser_name = parser_name
-            self.in_pre = 2
+            self.in_pre = 'found_parser'
             self.parser_lines = [word]
             return ''
         elif s_word:
-            self.in_pre = 3
+            self.in_pre = 'no_parser'
             return self._closeP() + self.formatter.preformatted(1) + \
                    self.formatter.text(s_word + ' (-)')
         else:
-            self.in_pre = 1
+            self.in_pre = 'search_parser'
             return ''
 
     def _pre_repl(self, word):
         """Handle code displays."""
         word = word.strip()
         if word == '{{{' and not self.in_pre:
-            self.in_pre = 3
-            return self._closeP() + self.formatter.preformatted(self.in_pre)
+            self.in_pre = 'no_parser'
+            return self._closeP() + self.formatter.preformatted(1)
         elif word == '}}}' and self.in_pre:
-            self.in_pre = 0
+            self.in_pre = None
             self.inhibit_p = 0
-            return self.formatter.preformatted(self.in_pre)
+            return self.formatter.preformatted(0)
         return self.formatter.text(word)
 
 
@@ -883,19 +893,16 @@
         for type, hit in match.groupdict().items():
             if hit is not None and not type in ["hmarker", ]:
 
-                ###result.append(u'<span class="info">[replace: %s: "%s"]</span>' % (type, hit))
-                if self.in_pre and type not in ['pre', 'ent']:
-                    return self.formatter.text(hit)
-                else:
-                    # Open p for certain types
-                    if not (self.inhibit_p or self.formatter.in_p
-                            or self.in_pre or (type in self.no_new_p_before)):
-                        result.append(self.formatter.paragraph(1, css_class="line891"))
+                ##result.append(u'<span class="info">[replace: %s: "%s"]</span>' % (type, hit))
+                # Open p for certain types
+                if not (self.inhibit_p or self.formatter.in_p
+                        or self.in_pre or (type in self.no_new_p_before)):
+                    result.append(self.formatter.paragraph(1, css_class="line891"))
 
-                    # Get replace method and replace hit
-                    replace = getattr(self, '_' + type + '_repl')
-                    result.append(replace(hit))
-                    return ''.join(result)
+                # Get replace method and replace hit
+                replace = getattr(self, '_' + type + '_repl')
+                result.append(replace(hit))
+                return ''.join(result)
         else:
             # We should never get here
             import pprint
@@ -926,8 +933,10 @@
                 'word_rule': self.word_rule,
                 'rules': rules,
             }
+        pre_rules = self.pre_formatting_rules.replace('\n', '|')
         self.request.clock.start('compile_huge_and_ugly')
         scan_re = re.compile(rules, re.UNICODE)
+        pre_scan_re = re.compile(pre_rules, re.UNICODE)
         number_re = re.compile(self.ol_rule, re.UNICODE)
         term_re = re.compile(self.dl_rule, re.UNICODE)
         indent_re = re.compile(ur"^\s*", re.UNICODE)
@@ -972,8 +981,7 @@
             if self.in_pre:
                 # TODO: move this into function
                 # still looking for processing instructions
-                # TODO: use strings for pre state, not numbers
-                if self.in_pre == 1:
+                if self.in_pre == 'search_parser':
                     self.parser = None
                     parser_name = ''
                     if line.strip().startswith("#!"):
@@ -981,15 +989,15 @@
                         self.setParser(parser_name)
 
                     if self.parser:
-                        self.in_pre = 2
+                        self.in_pre = 'found_parser'
                         self.parser_lines = [line]
                         self.parser_name = parser_name
                         continue
                     else:
                         self.request.write(self._closeP() +
                                            self.formatter.preformatted(1))
-                        self.in_pre = 3
-                if self.in_pre == 2:
+                        self.in_pre = 'no_parser'
+                if self.in_pre == 'found_parser':
                     # processing mode
                     endpos = line.find("}}}")
                     if endpos == -1:
@@ -1004,7 +1012,7 @@
                     res = self.formatter.parser(self.parser_name, self.parser_lines)
                     self.request.write(res)
                     del self.parser_lines
-                    self.in_pre = 0
+                    self.in_pre = None
                     self.parser = None
 
                     # send rest of line through regex machinery
@@ -1086,10 +1094,10 @@
                     self.in_table = 0
 
             # Scan line, format and write
-            formatted_line = self.scan(scan_re, line)
+            scanning_re = self.in_pre and pre_scan_re or scan_re
+            formatted_line = self.scan(scanning_re, line)
             self.request.write(formatted_line)
-
-            if self.in_pre == 3:
+            if self.in_pre == 'no_parser':
                 self.request.write(self.formatter.linebreak())
 
         # Close code displays, paragraphs, tables and open lists
--- a/MoinMoin/request/__init__.py	Mon Aug 21 19:36:23 2006 +0200
+++ b/MoinMoin/request/__init__.py	Tue Aug 22 20:13:48 2006 +0200
@@ -779,6 +779,8 @@
                 if isinstance(d, unicode):
                     # if we are REALLY sure, we can use "strict"
                     d = d.encode(config.charset, 'replace')
+                elif d is None:
+                    continue
                 wd.append(d)
             except UnicodeError:
                 self.log("Unicode error on: %s" % repr(d))
--- a/MoinMoin/server/standalone.py	Mon Aug 21 19:36:23 2006 +0200
+++ b/MoinMoin/server/standalone.py	Tue Aug 22 20:13:48 2006 +0200
@@ -575,7 +575,7 @@
             MoinRequestHandler.serve_moin, config.memoryProfile)
 
     if config.logPath:
-        sys.stderr = file(config.logPath, 'at')
+        sys.stderr = file(config.logPath, 'at', 0)
     registerSignalHandlers(quit)
     httpd = makeServer(config)
 
--- a/MoinMoin/theme/__init__.py	Mon Aug 21 19:36:23 2006 +0200
+++ b/MoinMoin/theme/__init__.py	Tue Aug 22 20:13:48 2006 +0200
@@ -326,7 +326,9 @@
         for scheme in self.linkSchemas:
             if pagename.startswith(scheme):
                 title = wikiutil.escape(title)
-                link = '<a href="%s">%s</a>' % (pagename, title)
+                link = self.request.formatter.url(1, pagename) + \
+                       self.request.formatter.text(title) +\
+                       self.request.formatter.url(0)
                 return pagename, link
 
         # remove wiki: url prefix
@@ -845,8 +847,10 @@
         @rtype: unicode
         @return: rss href
         """
-        return (u'%s/RecentChanges?action=rss_rc&amp;ddiffs=1&amp;unique=1'
-                % self.request.getScriptname())
+        request = self.request
+        url = Page(request, 'RecentChanges').url(request, querystr={
+                'action':'rss_rc', 'ddiffs': '1', 'unique': '1', }, escape=0, relative=False)
+        return url
 
     def rsslink(self):
         """ Create rss link in head, used by FireFox
@@ -1345,9 +1349,9 @@
         if self.shouldUseRSS():
             link = [
                 u'<div class="rcrss">',
-                u'<a href="%s">' % self.rsshref(),
-                self.make_icon("rss"),
-                u'</a>',
+                self.request.formatter.url(1, self.rsshref()),
+                self.request.formatter.rawHTML(self.make_icon("rss")),
+                self.request.formatter.url(0),
                 u'</div>',
                 ]
             html += ''.join(link)
--- a/MoinMoin/util/diff3.py	Mon Aug 21 19:36:23 2006 +0200
+++ b/MoinMoin/util/diff3.py	Tue Aug 22 20:13:48 2006 +0200
@@ -98,7 +98,7 @@
         result.extend(new[new_nr:])
     # other added lines
     elif old_nr == old_len and new_nr == new_len:
-        result.extend(other[other_nr])
+        result.extend(other[other_nr:])
     # new deleted lines
     elif (new_nr == new_len and (old_len - old_nr == other_len - other_nr) and
           match(old, other, old_nr, other_nr, old_len-old_nr) == old_len - old_nr):
--- a/MoinMoin/wikidicts.py	Mon Aug 21 19:36:23 2006 +0200
+++ b/MoinMoin/wikidicts.py	Tue Aug 22 20:13:48 2006 +0200
@@ -296,7 +296,7 @@
         self.groupdict[groupname] = grp
 
     def hasgroup(self, groupname):
-        return self.dictdict.has_key(groupname)
+        return self.groupdict.has_key(groupname)
 
     def membergroups(self, member):
         """list all groups where member is a member of"""
--- a/MoinMoin/wikisync.py	Mon Aug 21 19:36:23 2006 +0200
+++ b/MoinMoin/wikisync.py	Tue Aug 22 20:13:48 2006 +0200
@@ -20,6 +20,7 @@
 from MoinMoin.Page import Page
 from MoinMoin.PageEditor import PageEditor
 from MoinMoin.packages import unpackLine, packLine
+from MoinMoin.support.multicall import MultiCall
 
 
 MIMETYPE_MOIN = "text/wiki"
@@ -43,6 +44,9 @@
 class UnsupportedWikiException(Exception): pass
 
 
+class NotAllowedException(Exception): pass
+
+
 class SyncPage(object):
     """ This class represents a page in one or two wiki(s). """
     def __init__(self, name, local_rev=None, remote_rev=None, local_name=None, remote_name=None,
@@ -158,7 +162,7 @@
 
 class MoinRemoteWiki(RemoteWiki):
     """ Used for MoinMoin wikis reachable via XMLRPC. """
-    def __init__(self, request, interwikiname, prefix, pagelist, verbose=False):
+    def __init__(self, request, interwikiname, prefix, pagelist, user, password, verbose=False):
         self.request = request
         self.prefix = prefix
         self.pagelist = pagelist
@@ -178,7 +182,16 @@
         try:
             iw_list = self.connection.interwikiName()
         except xmlrpclib.Fault, e:
-            raise UnsupportedWikiException(_("The remote version of MoinMoin is too old, the version 1.6 is required at least."))
+            raise UnsupportedWikiException(_("The remote version of MoinMoin is too old, version 1.6 is required at least."))
+
+        if user and password:
+            token = self.connection.getAuthToken(user, password)
+            if token:
+                self.token = token
+            else:
+                raise NotAllowedException(_("Invalid username or password."))
+        else:
+            self.token = None
 
         self.remote_interwikiname = remote_interwikiname = iw_list[0]
         self.remote_iwid = remote_iwid = iw_list[1]
@@ -201,7 +214,13 @@
         """ Returns the binary diff of the remote page named pagename, given
             from_rev and to_rev. """
         try:
-            result = self.connection.getDiff(pagename, from_rev, to_rev, n_name)
+            if self.token:
+                m = MultiCall(self.connection)
+                m.applyAuthToken(self.token)
+                m.getDiff(pagename, from_rev, to_rev, n_name)
+                tokres, result = m()
+            else:
+                result = self.connection.getDiff(pagename, from_rev, to_rev, n_name)
         except xmlrpclib.Fault, e:
             if e.faultCode == "INVALID_TAG":
                 return None
@@ -211,12 +230,29 @@
 
     def merge_diff(self, pagename, diff, local_rev, delta_remote_rev, last_remote_rev, interwiki_name, n_name):
         """ Merges the diff into the page on the remote side. """
-        result = self.connection.mergeDiff(pagename, xmlrpclib.Binary(diff), local_rev, delta_remote_rev, last_remote_rev, interwiki_name, n_name)
+        try:
+            if self.token:
+                m = MultiCall(self.connection)
+                m.applyAuthToken(self.token)
+                m.mergeDiff(pagename, xmlrpclib.Binary(diff), local_rev, delta_remote_rev, last_remote_rev, interwiki_name, n_name)
+                tokres, result = m()
+            else:
+                result = self.connection.mergeDiff(pagename, xmlrpclib.Binary(diff), local_rev, delta_remote_rev, last_remote_rev, interwiki_name, n_name)
+        except xmlrpclib.Fault, e:
+            if e.faultCode == "NOT_ALLOWED":
+                raise NotAllowedException
+            raise
         return result
 
     def delete_page(self, pagename, last_remote_rev, interwiki_name):
         try:
-            result = self.connection.mergeDiff(pagename, None, None, None, last_remote_rev, interwiki_name, None)
+            if self.token:
+                m = MultiCall(self.connection)
+                m.applyAuthToken(self.token)
+                m.mergeDiff(pagename, None, None, None, last_remote_rev, interwiki_name, None)
+                tokres, result = m()
+            else:
+                result = self.connection.mergeDiff(pagename, None, None, None, last_remote_rev, interwiki_name, None)
         except xmlrpclib.Fault, e:
             if e.faultCode == "NOT_ALLOWED":
                 return e.faultString
@@ -239,7 +275,13 @@
                    "prefix": self.prefix,
                    "pagelist": self.pagelist,
                    "mark_deleted": True}
-        pages = self.connection.getAllPagesEx(options)
+        if self.token:
+            m = MultiCall(self.connection)
+            m.applyAuthToken(self.token)
+            m.getAllPagesEx(options)
+            tokres, pages = m()
+        else:
+            pages = self.connection.getAllPagesEx(options)
         rpages = []
         for name, revno in pages:
             normalised_name = normalise_pagename(name, self.prefix)
--- a/MoinMoin/xmlrpc/__init__.py	Mon Aug 21 19:36:23 2006 +0200
+++ b/MoinMoin/xmlrpc/__init__.py	Tue Aug 22 20:13:48 2006 +0200
@@ -723,7 +723,7 @@
 
         # User may read page?
         if not self.request.user.may.read(pagename) or not self.request.user.may.write(pagename):
-            return self.notAllowedFault()
+            return xmlrpclib.Fault("NOT_ALLOWED", "You are not allowed to write to this page.")
 
         # XXX add locking here!
 
--- a/docs/CHANGES	Mon Aug 21 19:36:23 2006 +0200
+++ b/docs/CHANGES	Tue Aug 22 20:13:48 2006 +0200
@@ -266,6 +266,8 @@
   * tuning:
     * more efficient locking code on POSIX platforms, we do much less I/O there now
     * removed most chmod calls in favour of a single os.umask call
+  * fixed a bad crash that happens (on ANY page!) when you put a *Dict page's
+    name as a member into a *Group page
   
 Version 1.5.4:
     HINT: read docs/README.migration.
--- a/docs/CHANGES.aschremmer	Mon Aug 21 19:36:23 2006 +0200
+++ b/docs/CHANGES.aschremmer	Tue Aug 22 20:13:48 2006 +0200
@@ -7,76 +7,83 @@
     * How should we store tags? (Metadata support would be handy)
       (currently done in Pickle files)
 
-  ToDo:
-    * Implement rollback
+  ToDo: (this should not go into CHANGES)
     * Reduce round-trip times by caching queries and using MultiCall objects (coroutines?)
-    * Check what needs to be documented on MoinMaster.
-    * Test with prefixes
-    * Search for XXX
-    * Implement a cross-site authentication system, i.e. mainly an
-      identity storage. (does OpenID make sense?)
+    * Implement a password agent. (does OpenID make sense?)
 
+  Longterm ToDo:
     * Maybe refactor YYY into MoinLocalWiki
-    * Add page locking, i.e. use the one in the new storage layer.
+    * Add page locking, i.e. use the one in the new storage layer (see XXX).
     * Do older tags of one wiki site have to be stored as well? Why don't we
       keep just one tag?
-    * Put author names into the comment field, transmit mimetypes.
+    * Put author names into the comment field
+    * Transmit mimetypes (see XXX). Needs new storage system.
     * Implement renamed pages.
+    * Cache the result of remote.get_pages locally to reduce the load.
 
   New Features:
-    * XMLRPC method to return the Moin version
-    * XMLRPC multicall support
     * Conflict icon in RecentChanges
-    * XMLRPC Authentication System
-    * Binary Diffing
-    * XMLRPC method to get binary diffs
-    * XMLRPC method to merge remote changes locally
-    * XMLRPC method to get the interwiki name
-    * TagStore/PickleTagStore class
-    * XMLRPC method to get the pagelist in a special way (revnos,
-      no system pages etc.)
-    * IWID support - i.e. every instance has a unique ID
-    * InterWiki page editable in the wiki, modification detection based on mtimes
-    * SyncPages action
-    * XMLRPC functions may return Fault instances
-    * diff3 algorithm extended, a new mode should reduce the conflicts
-    * GetText2 macro
-    * showtags action
+    * New XMLRPC methods (see doc strings for details):
+      * getMoinVersion
+      * system.multicall -- multicall support
+      * Authentication System: getAuthToken/appyAuthToken
+      * getDiff -- method to get binary diffs
+      * mergeDiff -- method to local changes remotely
+      * interwikiName -- method to get the IWID and the interwiki moniker
+      * getAllPagesEx -- method to get the pagelist in a special way (revnos,
+        no system pages etc.)
+    * IWID support - i.e. every wiki instance has a unique ID
+    * The list of InterWiki sites is editable in the wiki (page InterWikiMap),
+      it is getting reloaded every minute
+    * Syncronisation of wikis using the SyncPages action
+    * GetText2 macro that allows to translate messages that contain data
+    * showtags action that lists all tags related to a page
 
-  Bugfixes (only stuff that is buggy in moin/1.6 main branch):
-    * Conflict resolution fixes. (merged into main)
-    * Python 2.5 compatibility fixes in the Page caching logic (merged)
-    * sre pickle issues in the wikidicts code (merged)
+  Bugfixes:
+    * Conflict resolution fixes.
+    * Python 2.5 compatibility fixes in the Page caching logic
+    * sre pickle issues in the wikidicts code
     * cgitb can hide particular names, this avoids information leaks
       if the user files cannot be parsed for example
     * Fixed User.__repr__ - it is insane to put the ID in there
-    * Worked around the FastCGI problem on Lighttpd: empty lines in the error log, thanks to Jay Soffian
+    * Worked around the FastCGI problem on Lighttpd: empty lines in the error
+      log, thanks to Jay Soffian
     * Fixed the MetaDict code to use locks.
     * Fixed bug in request.py that avoided showing a traceback if there was a fault
       after the first headers were sent.
     * Fixed severe race conditions in the meta dict and the sync tags code.
     * Mute the tempnam warning in the caching module.
+    * diff3 algorithm extended, a new mode should reduce the conflicts,
+      fixed a bug that silently truncated pages if there was content added
+      on one side at the end
+    * Standalone opens it logfile unbuffered from now on, thanks to
+      Carsten Grohmann
 
   Other Changes:
     * Refactored conflict resolution and XMLRPC code.
-    * Enhanced API at some points.
+    * Added a module for binary diffs
 
   Developer notes:
-    * ...
+    * There is a new Page method called Page.get_raw_body_str that returns
+      the encoded page body. This is useful if you just deal with byte data
+      (e.g. while generating binary diffs).
+    * The TagStore/PickleTagStore system is used to store the syncronisation tags.
+    * XMLRPC functions may return Fault instances
+    * Moin got multicall support, including a module that makes it usable on the
+      client-side without requiring Python 2.4
 
 Do not forget to check the related wiki page: http://moinmoin.wikiwikiweb.de/WikiSyncronisation
 
 Diary
 =====
 
-Week 21: Basic Infrastructur setup (repos),
+Week 21: Basic infrastructure setup (repos),
          initial talks to the mentor, started writing the design document,
          helped other students to get started
-Week 22: Tax forms, Fulfilled transcription request,
+Week 22: Tax forms, fulfilled transcription request,
          written conflict icon support, refactored conflict handling,
-         changed conflict icon,
-         Added xmlrpc multicall support into the server and
-         backported the client code from python 2.4
+         changed conflict icon, added xmlrpc multicall support into the server
+         and backported the client code from Python 2.4
 Week 23: Debian-Sprint in Extremadura, Spain. Initial thoughts about Mercurial as
          a base for syncronisation. (See wiki)
 Week 24: Evaluation of OpenID as a base for authentication, written local testing scripts
@@ -110,10 +117,13 @@
          the prefix and the pageList on the remote side. Finished the direction==DOWN mode.
 Week 33: Started designing the solutions for the other sync cases. Store and transmit the
          normalised name. Implemented preliminary mime type support, only transmission of the mime type
-         and the new storage API is missing. Changed getDiff interface. Detect renamed pages (and choke :).
-         Added infrastructure support for detecting deleted pages (not used in the merging logic yet).
+         and the new storage API is missing. Changed getDiff interface. Detect renamed pages (and inform
+         the user about the missing support for them).
+         Added infrastructure support for detecting deleted pages.
          Reworked i18n and logging support. Added GetText2 macro. Getting syncing of pages
          working that are just available on one side. Working synchronisation of deleted pages.
+         Implemented rollback in case of remote problems and exception logging.
+         Documented this system on MoinMaster. Added authentication support.
 
 2006-07-18: the requested daily entry is missing here, see http://moinmoin.wikiwikiweb.de/GoogleSoc2006/BetterProgress
 2006-07-19: the requested daily entry is missing here, see http://moinmoin.wikiwikiweb.de/GoogleSoc2006/BetterProgress