changeset 5813:7d636ee769da

merge editlogfixes branch
author Thomas Waldmann <tw AT waldmann-edv DOT de>
date Sun, 25 Dec 2011 16:43:04 +0100
parents 23a9acba224c (diff) 1d78194ecfeb (current diff)
children a7b3ccda38d8
files
diffstat 98 files changed, 9819 insertions(+), 4067 deletions(-) [+]
line wrap: on
line diff
--- a/MoinMoin/action/PackagePages.py	Sun Dec 25 16:38:04 2011 +0100
+++ b/MoinMoin/action/PackagePages.py	Sun Dec 25 16:43:04 2011 +0100
@@ -201,7 +201,7 @@
             script.append(packLine(["AddRevision", str(cnt), page.page_name, userid, "Created by the PackagePages action."]))
 
             timestamp = wikiutil.version2timestamp(page.mtime_usecs())
-            
+
             # avoid getting strange exceptions from zipfile in case of pre-1980 timestamps
             nineteeneighty = (10 * 365 + 3) * 24 * 3600 # 1970 + 10y + 3d
             timestamp = max(nineteeneighty, timestamp) # zip can not store timestamps before 1980
--- a/MoinMoin/action/serveopenid.py	Sun Dec 25 16:38:04 2011 +0100
+++ b/MoinMoin/action/serveopenid.py	Sun Dec 25 16:43:04 2011 +0100
@@ -16,7 +16,7 @@
 from openid import sreg
 from openid.cryptutil import randomString
 from openid.server import server
-from openid.message import IDENTIFIER_SELECT
+from openid.message import IDENTIFIER_SELECT, OPENID_NS
 from MoinMoin.widget import html
 from MoinMoin.web.request import MoinMoinFinish
 
@@ -295,7 +295,24 @@
         #       sreg_resp = sreg.SRegResponse.extractResponse(openidreq, sreg_data)
         #       sreg_resp.addToOpenIDResponse(reply.fields)
 
-        reply = openidreq.answer(True, identity=identity, server_url=server_url)
+        request = self.request
+
+        # obtain the endpoint if not overridden by an identity endpoint
+        page_url = request.getQualifiedURL(
+                         request.page.url(request, querystr={'action': 'serveopenid'}))
+        endpoint_changed = server_url != page_url
+
+        # prepare the response
+        reply = openidreq.answer(True, identity=identity, server_url=server_url or page_url)
+
+        # if the endpoint has changed, perhaps reflecting an identity-specific
+        # endpoint, remove any association handle in use, working around any
+        # association-related issues in relying parties (such as python-openid)
+        if openidreq.assoc_handle and endpoint_changed:
+            store = MoinOpenIDStore(request)
+            signatory = server.Signatory(store)
+            reply.fields.setArg(OPENID_NS, "invalidate_handle", openidreq.assoc_handle)
+            signatory.invalidate(openidreq.assoc_handle, dumb=False)
         return reply
 
     def user_trusts_url(self, trustroot):
--- a/MoinMoin/config/multiconfig.py	Sun Dec 25 16:38:04 2011 +0100
+++ b/MoinMoin/config/multiconfig.py	Sun Dec 25 16:43:04 2011 +0100
@@ -832,7 +832,7 @@
     # and receive a FORBIDDEN for anything except viewing a page
     # list must not contain 'java' because of twikidraw wanting to save drawing uses this useragent
     ('ua_spiders',
-     ('archiver|cfetch|charlotte|crawler|curl|gigabot|googlebot|heritrix|holmes|htdig|httrack|httpunit|'
+     ('archiver|cfetch|charlotte|crawler|gigabot|googlebot|heritrix|holmes|htdig|httrack|httpunit|'
       'intelix|jeeves|larbin|leech|libwww-perl|linkbot|linkmap|linkwalk|litefinder|mercator|'
       'microsoft.url.control|mirror| mj12bot|msnbot|msrbot|neomo|nutbot|omniexplorer|puf|robot|scooter|seekbot|'
       'sherlock|slurp|sitecheck|snoopy|spider|teleport|twiceler|voilabot|voyager|webreaper|wget|yeti'),
--- a/MoinMoin/macro/TableOfContents.py	Sun Dec 25 16:38:04 2011 +0100
+++ b/MoinMoin/macro/TableOfContents.py	Sun Dec 25 16:43:04 2011 +0100
@@ -157,7 +157,8 @@
 
     # this is so we get a correctly updated TOC if we just preview in the editor -
     # the new content is not stored on disk yet, but available as macro.parser.raw:
-    p.set_raw_body(macro.parser.raw, modified=1)
+    format = '#format %s\n' % p.pi['format']
+    p.set_raw_body(format + macro.parser.raw, modified=1)
 
     output = macro.request.redirectedOutput(p.send_page,
                                             content_only=True,
--- a/MoinMoin/script/account/homepage.py	Sun Dec 25 16:38:04 2011 +0100
+++ b/MoinMoin/script/account/homepage.py	Sun Dec 25 16:43:04 2011 +0100
@@ -90,6 +90,7 @@
             homepage_default_text = homepage_default_text.replace('@EMAIL@', "<<MailTo(%(obfuscated_mail)s)>>")
         else:
             homepage_default_text = '''#acl %(username)s:read,write,delete,revert Default
+#format wiki
 
 == %(username)s ==
 
--- a/MoinMoin/support/flup/client/fcgi_app.py	Sun Dec 25 16:38:04 2011 +0100
+++ b/MoinMoin/support/flup/client/fcgi_app.py	Sun Dec 25 16:43:04 2011 +0100
@@ -31,6 +31,7 @@
 import struct
 import socket
 import errno
+import types
 
 __all__ = ['FCGIApp']
 
@@ -386,11 +387,14 @@
         if self._connect is not None:
             # The simple case. Create a socket and connect to the
             # application.
-            if type(self._connect) is str:
+            if isinstance(self._connect, types.StringTypes):
                 sock = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM)
+                sock.connect(self._connect)
+            elif hasattr(socket, 'create_connection'):
+                sock = socket.create_connection(self._connect)
             else:
                 sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
-            sock.connect(self._connect)
+                sock.connect(self._connect)
             return sock
 
         # To be done when I have more time...
--- a/MoinMoin/support/flup/client/scgi_app.py	Sun Dec 25 16:38:04 2011 +0100
+++ b/MoinMoin/support/flup/client/scgi_app.py	Sun Dec 25 16:43:04 2011 +0100
@@ -31,6 +31,7 @@
 import struct
 import socket
 import errno
+import types
 
 __all__ = ['SCGIApp']
 
@@ -136,11 +137,14 @@
         return [result]
 
     def _getConnection(self):
-        if type(self._connect) is str:
+        if isinstance(self._connect, types.StringTypes):
             sock = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM)
+            sock.connect(self._connect)
+        elif hasattr(socket, 'create_connection'):
+            sock = socket.create_connection(self._connect)
         else:
             sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
-        sock.connect(self._connect)
+            sock.connect(self._connect)
         return sock
     
     _environPrefixes = ['SERVER_', 'HTTP_', 'REQUEST_', 'REMOTE_', 'PATH_',
--- a/MoinMoin/support/flup/server/ajp_base.py	Sun Dec 25 16:38:04 2011 +0100
+++ b/MoinMoin/support/flup/server/ajp_base.py	Sun Dec 25 16:43:04 2011 +0100
@@ -960,7 +960,8 @@
         all errors should be caught at the application level.
         """
         if self.debug:
-            request.startResponse(200, 'OK', [('Content-Type', 'text/html')])
+            request.startResponse(500, 'Internal Server Error',
+                                  [('Content-Type', 'text/html')])
             import cgitb
             request.write(cgitb.html(sys.exc_info()))
         else:
@@ -972,5 +973,6 @@
 <p>An unhandled exception was thrown by the application.</p>
 </body></html>
 """
-            request.startResponse(200, 'OK', [('Content-Type', 'text/html')])
+            request.startResponse(500, 'Internal Server Error',
+                                  [('Content-Type', 'text/html')])
             request.write(errorpage)
--- a/MoinMoin/support/flup/server/fcgi_base.py	Sun Dec 25 16:38:04 2011 +0100
+++ b/MoinMoin/support/flup/server/fcgi_base.py	Sun Dec 25 16:43:04 2011 +0100
@@ -821,7 +821,7 @@
         outrec = Record(FCGI_UNKNOWN_TYPE)
         outrec.contentData = struct.pack(FCGI_UnknownTypeBody, inrec.type)
         outrec.contentLength = FCGI_UnknownTypeBody_LEN
-        self.writeRecord(rec)
+        self.writeRecord(outrec)
         
 class MultiplexedConnection(Connection):
     """
@@ -1186,7 +1186,10 @@
 
         if not environ.has_key('PATH_INFO') or not environ['PATH_INFO']:
             if reqUri is not None:
-                environ['PATH_INFO'] = reqUri[0]
+                scriptName = environ['SCRIPT_NAME']
+                if not reqUri[0].startswith(scriptName):
+                    environ['wsgi.errors'].write('WARNING: SCRIPT_NAME does not match REQUEST_URI')
+                environ['PATH_INFO'] = reqUri[0][len(scriptName):]
             else:
                 environ['PATH_INFO'] = ''
         if not environ.has_key('QUERY_STRING') or not environ['QUERY_STRING']:
@@ -1214,7 +1217,8 @@
         """
         if self.debug:
             import cgitb
-            req.stdout.write('Content-Type: text/html\r\n\r\n' +
+            req.stdout.write('Status: 500 Internal Server Error\r\n' +
+                             'Content-Type: text/html\r\n\r\n' +
                              cgitb.html(sys.exc_info()))
         else:
             errorpage = """<!DOCTYPE HTML PUBLIC "-//IETF//DTD HTML 2.0//EN">
@@ -1225,5 +1229,6 @@
 <p>An unhandled exception was thrown by the application.</p>
 </body></html>
 """
-            req.stdout.write('Content-Type: text/html\r\n\r\n' +
+            req.stdout.write('Status: 500 Internal Server Error\r\n' +
+                             'Content-Type: text/html\r\n\r\n' +
                              errorpage)
--- a/MoinMoin/support/flup/server/fcgi_single.py	Sun Dec 25 16:38:04 2011 +0100
+++ b/MoinMoin/support/flup/server/fcgi_single.py	Sun Dec 25 16:43:04 2011 +0100
@@ -93,7 +93,7 @@
             if kw.has_key(key):
                 del kw[key]
         SingleServer.__init__(self, jobClass=self._connectionClass,
-                              jobArgs=(self,), **kw)
+                              jobArgs=(self, None), **kw)
         self.capability = {
             FCGI_MAX_CONNS: 1,
             FCGI_MAX_REQS: 1,
--- a/MoinMoin/support/flup/server/paste_factory.py	Sun Dec 25 16:38:04 2011 +0100
+++ b/MoinMoin/support/flup/server/paste_factory.py	Sun Dec 25 16:43:04 2011 +0100
@@ -30,18 +30,26 @@
 
 def run_ajp_thread(wsgi_app, global_conf,
                    scriptName='', host='localhost', port='8009',
-                   allowedServers='127.0.0.1', debug=NoDefault):
+                   allowedServers='127.0.0.1', debug=NoDefault,
+                   minSpare=None, maxSpare=None, maxThreads=None):
     import flup.server.ajp
     addr = (host, int(port))
     if debug is NoDefault:
         debug = global_conf.get('debug', False)
     debug = asbool(debug)
+    threadpool_args = {}
+    if minSpare is not None:
+        threadpool_args['minSpare'] = int(minSpare)
+    if maxSpare is not None:
+        threadpool_args['maxSpare'] = int(maxSpare)
+    if maxThreads is not None:
+        threadpool_args['maxThreads'] = int(maxThreads)
     s = flup.server.ajp.WSGIServer(
         wsgi_app,
         scriptName=scriptName,
         bindAddress=addr,
         allowedServers=aslist(allowedServers),
-        debug=debug,
+        debug=debug, **threadpool_args
         )
     s.run()
 
@@ -76,7 +84,8 @@
 def run_fcgi_thread(wsgi_app, global_conf,
                     host=None, port=None,
                     socket=None, umask=None,
-                    multiplexed=False, debug=NoDefault):
+                    multiplexed=False, debug=NoDefault,
+                    minSpare=None, maxSpare=None, maxThreads=None):
     import flup.server.fcgi
     if socket:
         assert host is None and port is None
@@ -91,11 +100,19 @@
     if debug is NoDefault:
         debug = global_conf.get('debug', False)
     debug = asbool(debug)
+    threadpool_args = {}
+    if minSpare is not None:
+        threadpool_args['minSpare'] = int(minSpare)
+    if maxSpare is not None:
+        threadpool_args['maxSpare'] = int(maxSpare)
+    if maxThreads is not None:
+        threadpool_args['maxThreads'] = int(maxThreads)
     s = flup.server.fcgi.WSGIServer(
         wsgi_app,
         bindAddress=sock, umask=umask,
         multiplexed=asbool(multiplexed),
-        debug=debug)
+        debug=debug, **threadpool_args
+        )
     s.run()
 
 def run_fcgi_fork(wsgi_app, global_conf,
@@ -139,18 +156,26 @@
 def run_scgi_thread(wsgi_app, global_conf,
                     scriptName=NoDefault, host='localhost', port='4000',
                     allowedServers='127.0.0.1',
-                    debug=NoDefault):
+                    debug=NoDefault,
+                    minSpare=None, maxSpare=None, maxThreads=None):
     import flup.server.scgi
     addr = (host, int(port))
     if debug is NoDefault:
         debug = global_conf.get('debug', False)
     debug = asbool(debug)
+    threadpool_args = {}
+    if minSpare is not None:
+        threadpool_args['minSpare'] = int(minSpare)
+    if maxSpare is not None:
+        threadpool_args['maxSpare'] = int(maxSpare)
+    if maxThreads is not None:
+        threadpool_args['maxThreads'] = int(maxThreads)
     s = flup.server.scgi.WSGIServer(
         wsgi_app,
         scriptName=scriptName,
         bindAddress=addr,
         allowedServers=aslist(allowedServers),
-        debug=debug,
+        debug=debug, **threadpool_args
         )
     s.run()
 
--- a/MoinMoin/support/flup/server/scgi_base.py	Sun Dec 25 16:38:04 2011 +0100
+++ b/MoinMoin/support/flup/server/scgi_base.py	Sun Dec 25 16:43:04 2011 +0100
@@ -525,7 +525,10 @@
                 environ['SCRIPT_NAME'] = ''
             if not environ.has_key('PATH_INFO') or not environ['PATH_INFO']:
                 if reqUri is not None:
-                    environ['PATH_INFO'] = reqUri[0]
+                    scriptName = environ['SCRIPT_NAME']
+                    if not reqUri[0].startswith(scriptName):
+                        self.logger.warning('SCRIPT_NAME does not match request URI')
+                    environ['PATH_INFO'] = reqUri[0][len(scriptName):]
                 else:
                     environ['PATH_INFO'] = ''
         else:
@@ -550,7 +553,8 @@
         """
         if self.debug:
             import cgitb
-            request.stdout.write('Content-Type: text/html\r\n\r\n' +
+            request.stdout.write('Status: 500 Internal Server Error\r\n' +
+                                 'Content-Type: text/html\r\n\r\n' +
                                  cgitb.html(sys.exc_info()))
         else:
             errorpage = """<!DOCTYPE HTML PUBLIC "-//IETF//DTD HTML 2.0//EN">
@@ -561,5 +565,6 @@
 <p>An unhandled exception was thrown by the application.</p>
 </body></html>
 """
-            request.stdout.write('Content-Type: text/html\r\n\r\n' +
+            request.stdout.write('Status: 500 Internal Server Error\r\n' +
+                                 'Content-Type: text/html\r\n\r\n' +
                                  errorpage)
--- a/MoinMoin/support/flup/server/threadedserver.py	Sun Dec 25 16:38:04 2011 +0100
+++ b/MoinMoin/support/flup/server/threadedserver.py	Sun Dec 25 16:43:04 2011 +0100
@@ -105,7 +105,8 @@
             self._mainloopPeriodic()
 
         # Restore signal handlers.
-        self._restoreSignalHandlers()
+        if not sys.platform.startswith('win'):
+            self._restoreSignalHandlers()
 
         # Return bool based on whether or not SIGHUP was received.
         return self._hupReceived
--- a/MoinMoin/support/pygments/__init__.py	Sun Dec 25 16:38:04 2011 +0100
+++ b/MoinMoin/support/pygments/__init__.py	Sun Dec 25 16:43:04 2011 +0100
@@ -20,13 +20,13 @@
     The `Pygments tip`_ is installable with ``easy_install Pygments==dev``.
 
     .. _Pygments tip:
-       http://dev.pocoo.org/hg/pygments-main/archive/tip.tar.gz#egg=Pygments-dev
+       http://bitbucket.org/birkenfeld/pygments-main/get/tip.zip#egg=Pygments-dev
 
     :copyright: Copyright 2006-2010 by the Pygments team, see AUTHORS.
     :license: BSD, see LICENSE for details.
 """
 
-__version__ = '1.3.1'
+__version__ = '1.4'
 __docformat__ = 'restructuredtext'
 
 __all__ = ['lex', 'format', 'highlight']
--- a/MoinMoin/support/pygments/formatters/html.py	Sun Dec 25 16:38:04 2011 +0100
+++ b/MoinMoin/support/pygments/formatters/html.py	Sun Dec 25 16:43:04 2011 +0100
@@ -21,14 +21,17 @@
 __all__ = ['HtmlFormatter']
 
 
-def escape_html(text):
+_escape_html_table = {
+    ord('&'): u'&amp;',
+    ord('<'): u'&lt;',
+    ord('>'): u'&gt;',
+    ord('"'): u'&quot;',
+    ord("'"): u'&#39;',
+}
+
+def escape_html(text, table=_escape_html_table):
     """Escape &, <, > as well as single and double quotes for HTML."""
-    return text.replace('&', '&amp;').  \
-                replace('<', '&lt;').   \
-                replace('>', '&gt;').   \
-                replace('"', '&quot;'). \
-                replace("'", '&#39;')
-
+    return text.translate(table)
 
 def get_random_id():
     """Return a random id for javascript fields."""
@@ -371,22 +374,21 @@
             except ValueError:
                 pass
 
-        self._class_cache = {}
         self._create_stylesheet()
 
     def _get_css_class(self, ttype):
         """Return the css class of this token type prefixed with
         the classprefix option."""
-        if ttype in self._class_cache:
-            return self._class_cache[ttype]
-        return self.classprefix + _get_ttype_class(ttype)
+        ttypeclass = _get_ttype_class(ttype)
+        if ttypeclass:
+            return self.classprefix + ttypeclass
+        return ''
 
     def _create_stylesheet(self):
         t2c = self.ttype2class = {Token: ''}
         c2s = self.class2style = {}
-        cp = self.classprefix
         for ttype, ndef in self.style:
-            name = cp + _get_ttype_class(ttype)
+            name = self._get_css_class(ttype)
             style = ''
             if ndef['color']:
                 style += 'color: #%s; ' % ndef['color']
@@ -508,6 +510,7 @@
         st = self.linenostep
         la = self.lineanchors
         aln = self.anchorlinenos
+        nocls = self.noclasses
         if sp:
             lines = []
 
@@ -542,9 +545,16 @@
         # in case you wonder about the seemingly redundant <div> here: since the
         # content in the other cell also is wrapped in a div, some browsers in
         # some configurations seem to mess up the formatting...
-        yield 0, ('<table class="%stable">' % self.cssclass +
-                  '<tr><td class="linenos"><div class="linenodiv"><pre>' +
-                  ls + '</pre></div></td><td class="code">')
+        if nocls:
+            yield 0, ('<table class="%stable">' % self.cssclass +
+                      '<tr><td><div class="linenodiv" '
+                      'style="background-color: #f0f0f0; padding-right: 10px">'
+                      '<pre style="line-height: 125%">' +
+                      ls + '</pre></div></td><td class="code">')
+        else:
+            yield 0, ('<table class="%stable">' % self.cssclass +
+                      '<tr><td class="linenos"><div class="linenodiv"><pre>' +
+                      ls + '</pre></div></td><td class="code">')
         yield 0, dummyoutfile.getvalue()
         yield 0, '</td></tr></table>'
 
@@ -556,7 +566,23 @@
         num = self.linenostart
         mw = len(str(len(lines) + num - 1))
 
-        if sp:
+        if self.noclasses:
+            if sp:
+                for t, line in lines:
+                    if num%sp == 0:
+                        style = 'background-color: #ffffc0; padding: 0 5px 0 5px'
+                    else:
+                        style = 'background-color: #f0f0f0; padding: 0 5px 0 5px'
+                    yield 1, '<span style="%s">%*s</span> ' % (
+                        style, mw, (num%st and ' ' or num)) + line
+                    num += 1
+            else:
+                for t, line in lines:
+                    yield 1, ('<span style="background-color: #f0f0f0; '
+                              'padding: 0 5px 0 5px">%*s</span> ' % (
+                              mw, (num%st and ' ' or num)) + line)
+                    num += 1
+        elif sp:
             for t, line in lines:
                 yield 1, '<span class="lineno%s">%*s</span> ' % (
                     num%sp == 0 and ' special' or '', mw,
@@ -616,6 +642,7 @@
         # for <span style=""> lookup only
         getcls = self.ttype2class.get
         c2s = self.class2style
+        escape_table = _escape_html_table
 
         lspan = ''
         line = ''
@@ -630,7 +657,7 @@
                 cls = self._get_css_class(ttype)
                 cspan = cls and '<span class="%s">' % cls or ''
 
-            parts = escape_html(value).split('\n')
+            parts = value.translate(escape_table).split('\n')
 
             # for all but the last line
             for part in parts[:-1]:
--- a/MoinMoin/support/pygments/formatters/img.py	Sun Dec 25 16:38:04 2011 +0100
+++ b/MoinMoin/support/pygments/formatters/img.py	Sun Dec 25 16:43:04 2011 +0100
@@ -18,7 +18,7 @@
 
 # Import this carefully
 try:
-    import Image, ImageDraw, ImageFont
+    from PIL import Image, ImageDraw, ImageFont
     pil_available = True
 except ImportError:
     pil_available = False
--- a/MoinMoin/support/pygments/formatters/latex.py	Sun Dec 25 16:38:04 2011 +0100
+++ b/MoinMoin/support/pygments/formatters/latex.py	Sun Dec 25 16:43:04 2011 +0100
@@ -21,13 +21,15 @@
     return text.replace('\\', '\x00'). \
                 replace('{', '\x01'). \
                 replace('}', '\x02'). \
-                replace('^', '\x03'). \
-                replace('_', '\x04'). \
                 replace('\x00', r'\%sZbs{}' % commandprefix). \
                 replace('\x01', r'\%sZob{}' % commandprefix). \
                 replace('\x02', r'\%sZcb{}' % commandprefix). \
-                replace('\x03', r'\%sZca{}' % commandprefix). \
-                replace('\x04', r'\%sZus{}' % commandprefix)
+                replace('^', r'\%sZca{}' % commandprefix). \
+                replace('_', r'\%sZus{}' % commandprefix). \
+                replace('#', r'\%sZsh{}' % commandprefix). \
+                replace('%', r'\%sZpc{}' % commandprefix). \
+                replace('$', r'\%sZdl{}' % commandprefix). \
+                replace('~', r'\%sZti{}' % commandprefix)
 
 
 DOC_TEMPLATE = r'''
@@ -81,6 +83,9 @@
 # * \PY@tok@classname sets the \PY@it etc. to reflect the chosen style
 #   for its class.
 # * \PY resets the style, parses the classnames and then calls \PY@do.
+#
+# Tip: to read this code, print it out in substituted form using e.g.
+# >>> print STYLE_TEMPLATE % {'cp': 'PY'}
 
 STYLE_TEMPLATE = r'''
 \makeatletter
@@ -101,6 +106,10 @@
 \def\%(cp)sZob{\char`\{}
 \def\%(cp)sZcb{\char`\}}
 \def\%(cp)sZca{\char`\^}
+\def\%(cp)sZsh{\char`\#}
+\def\%(cp)sZpc{\char`\%%}
+\def\%(cp)sZdl{\char`\$}
+\def\%(cp)sZti{\char`\~}
 %% for compatibility with earlier versions
 \def\%(cp)sZat{@}
 \def\%(cp)sZlb{[}
@@ -131,12 +140,12 @@
 
     .. sourcecode:: latex
 
-        \begin{Verbatim}[commandchars=@\[\]]
-        @PY[k][def ]@PY[n+nf][foo](@PY[n][bar]):
-            @PY[k][pass]
+        \begin{Verbatim}[commandchars=\\{\}]
+        \PY{k}{def }\PY{n+nf}{foo}(\PY{n}{bar}):
+            \PY{k}{pass}
         \end{Verbatim}
 
-    The special command used here (``@PY``) and all the other macros it needs
+    The special command used here (``\PY``) and all the other macros it needs
     are output by the `get_style_defs` method.
 
     With the `full` option, a complete LaTeX document is output, including
--- a/MoinMoin/support/pygments/lexer.py	Sun Dec 25 16:38:04 2011 +0100
+++ b/MoinMoin/support/pygments/lexer.py	Sun Dec 25 16:43:04 2011 +0100
@@ -349,7 +349,53 @@
     self.tokens on the first instantiation.
     """
 
+    def _process_regex(cls, regex, rflags):
+        """Preprocess the regular expression component of a token definition."""
+        return re.compile(regex, rflags).match
+
+    def _process_token(cls, token):
+        """Preprocess the token component of a token definition."""
+        assert type(token) is _TokenType or callable(token), \
+               'token type must be simple type or callable, not %r' % (token,)
+        return token
+
+    def _process_new_state(cls, new_state, unprocessed, processed):
+        """Preprocess the state transition action of a token definition."""
+        if isinstance(new_state, str):
+            # an existing state
+            if new_state == '#pop':
+                return -1
+            elif new_state in unprocessed:
+                return (new_state,)
+            elif new_state == '#push':
+                return new_state
+            elif new_state[:5] == '#pop:':
+                return -int(new_state[5:])
+            else:
+                assert False, 'unknown new state %r' % new_state
+        elif isinstance(new_state, combined):
+            # combine a new state from existing ones
+            tmp_state = '_tmp_%d' % cls._tmpname
+            cls._tmpname += 1
+            itokens = []
+            for istate in new_state:
+                assert istate != new_state, 'circular state ref %r' % istate
+                itokens.extend(cls._process_state(unprocessed,
+                                                  processed, istate))
+            processed[tmp_state] = itokens
+            return (tmp_state,)
+        elif isinstance(new_state, tuple):
+            # push more than one state
+            for istate in new_state:
+                assert (istate in unprocessed or
+                        istate in ('#pop', '#push')), \
+                       'unknown new state ' + istate
+            return new_state
+        else:
+            assert False, 'unknown new state def %r' % new_state
+
     def _process_state(cls, unprocessed, processed, state):
+        """Preprocess a single state definition."""
         assert type(state) is str, "wrong state name %r" % state
         assert state[0] != '#', "invalid state name %r" % state
         if state in processed:
@@ -360,60 +406,31 @@
             if isinstance(tdef, include):
                 # it's a state reference
                 assert tdef != state, "circular state reference %r" % state
-                tokens.extend(cls._process_state(unprocessed, processed, str(tdef)))
+                tokens.extend(cls._process_state(unprocessed, processed,
+                                                 str(tdef)))
                 continue
 
             assert type(tdef) is tuple, "wrong rule def %r" % tdef
 
             try:
-                rex = re.compile(tdef[0], rflags).match
+                rex = cls._process_regex(tdef[0], rflags)
             except Exception, err:
                 raise ValueError("uncompilable regex %r in state %r of %r: %s" %
                                  (tdef[0], state, cls, err))
 
-            assert type(tdef[1]) is _TokenType or callable(tdef[1]), \
-                   'token type must be simple type or callable, not %r' % (tdef[1],)
+            token = cls._process_token(tdef[1])
 
             if len(tdef) == 2:
                 new_state = None
             else:
-                tdef2 = tdef[2]
-                if isinstance(tdef2, str):
-                    # an existing state
-                    if tdef2 == '#pop':
-                        new_state = -1
-                    elif tdef2 in unprocessed:
-                        new_state = (tdef2,)
-                    elif tdef2 == '#push':
-                        new_state = tdef2
-                    elif tdef2[:5] == '#pop:':
-                        new_state = -int(tdef2[5:])
-                    else:
-                        assert False, 'unknown new state %r' % tdef2
-                elif isinstance(tdef2, combined):
-                    # combine a new state from existing ones
-                    new_state = '_tmp_%d' % cls._tmpname
-                    cls._tmpname += 1
-                    itokens = []
-                    for istate in tdef2:
-                        assert istate != state, 'circular state ref %r' % istate
-                        itokens.extend(cls._process_state(unprocessed,
-                                                          processed, istate))
-                    processed[new_state] = itokens
-                    new_state = (new_state,)
-                elif isinstance(tdef2, tuple):
-                    # push more than one state
-                    for state in tdef2:
-                        assert (state in unprocessed or
-                                state in ('#pop', '#push')), \
-                               'unknown new state ' + state
-                    new_state = tdef2
-                else:
-                    assert False, 'unknown new state def %r' % tdef2
-            tokens.append((rex, tdef[1], new_state))
+                new_state = cls._process_new_state(tdef[2],
+                                                   unprocessed, processed)
+
+            tokens.append((rex, token, new_state))
         return tokens
 
     def process_tokendef(cls, name, tokendefs=None):
+        """Preprocess a dictionary of token definitions."""
         processed = cls._all_tokens[name] = {}
         tokendefs = tokendefs or cls.tokens[name]
         for state in tokendefs.keys():
@@ -421,6 +438,7 @@
         return processed
 
     def __call__(cls, *args, **kwds):
+        """Instantiate cls after preprocessing its token definitions."""
         if not hasattr(cls, '_tokens'):
             cls._all_tokens = {}
             cls._tmpname = 0
--- a/MoinMoin/support/pygments/lexers/_luabuiltins.py	Sun Dec 25 16:38:04 2011 +0100
+++ b/MoinMoin/support/pygments/lexers/_luabuiltins.py	Sun Dec 25 16:43:04 2011 +0100
@@ -60,14 +60,7 @@
            'debug.setmetatable',
            'debug.setupvalue',
            'debug.traceback'],
- 'io': ['file:close',
-        'file:flush',
-        'file:lines',
-        'file:read',
-        'file:seek',
-        'file:setvbuf',
-        'file:write',
-        'io.close',
+ 'io': ['io.close',
         'io.flush',
         'io.input',
         'io.lines',
@@ -175,7 +168,7 @@
             return name.startswith('math')
 
         def is_in_io_module(name):
-            return name.startswith('io.') or name.startswith('file:')
+            return name.startswith('io.')
 
         def is_in_os_module(name):
             return name.startswith('os.')
--- a/MoinMoin/support/pygments/lexers/_mapping.py	Sun Dec 25 16:38:04 2011 +0100
+++ b/MoinMoin/support/pygments/lexers/_mapping.py	Sun Dec 25 16:43:04 2011 +0100
@@ -30,16 +30,18 @@
     'ApacheConfLexer': ('pygments.lexers.text', 'ApacheConf', ('apacheconf', 'aconf', 'apache'), ('.htaccess', 'apache.conf', 'apache2.conf'), ('text/x-apacheconf',)),
     'AppleScriptLexer': ('pygments.lexers.other', 'AppleScript', ('applescript',), ('*.applescript',), ()),
     'AsymptoteLexer': ('pygments.lexers.other', 'Asymptote', ('asy', 'asymptote'), ('*.asy',), ('text/x-asymptote',)),
+    'AutohotkeyLexer': ('pygments.lexers.other', 'autohotkey', ('ahk',), ('*.ahk', '*.ahkl'), ('text/x-autohotkey',)),
     'BBCodeLexer': ('pygments.lexers.text', 'BBCode', ('bbcode',), (), ('text/x-bbcode',)),
     'BaseMakefileLexer': ('pygments.lexers.text', 'Makefile', ('basemake',), (), ()),
     'BashLexer': ('pygments.lexers.other', 'Bash', ('bash', 'sh', 'ksh'), ('*.sh', '*.ksh', '*.bash', '*.ebuild', '*.eclass'), ('application/x-sh', 'application/x-shellscript')),
     'BashSessionLexer': ('pygments.lexers.other', 'Bash Session', ('console',), ('*.sh-session',), ('application/x-shell-session',)),
     'BatchLexer': ('pygments.lexers.other', 'Batchfile', ('bat',), ('*.bat', '*.cmd'), ('application/x-dos-batch',)),
     'BefungeLexer': ('pygments.lexers.other', 'Befunge', ('befunge',), ('*.befunge',), ('application/x-befunge',)),
+    'BlitzMaxLexer': ('pygments.lexers.compiled', 'BlitzMax', ('blitzmax', 'bmax'), ('*.bmx',), ('text/x-bmx',)),
     'BooLexer': ('pygments.lexers.dotnet', 'Boo', ('boo',), ('*.boo',), ('text/x-boo',)),
     'BrainfuckLexer': ('pygments.lexers.other', 'Brainfuck', ('brainfuck', 'bf'), ('*.bf', '*.b'), ('application/x-brainfuck',)),
     'CLexer': ('pygments.lexers.compiled', 'C', ('c',), ('*.c', '*.h'), ('text/x-chdr', 'text/x-csrc')),
-    'CMakeLexer': ('pygments.lexers.text', 'CMake', ('cmake',), ('*.cmake',), ('text/x-cmake',)),
+    'CMakeLexer': ('pygments.lexers.text', 'CMake', ('cmake',), ('*.cmake', 'CMakeLists.txt'), ('text/x-cmake',)),
     'CObjdumpLexer': ('pygments.lexers.asm', 'c-objdump', ('c-objdump',), ('*.c-objdump',), ('text/x-c-objdump',)),
     'CSharpAspxLexer': ('pygments.lexers.dotnet', 'aspx-cs', ('aspx-cs',), ('*.aspx', '*.asax', '*.ascx', '*.ashx', '*.asmx', '*.axd'), ()),
     'CSharpLexer': ('pygments.lexers.dotnet', 'C#', ('csharp', 'c#'), ('*.cs',), ('text/x-csharp',)),
@@ -49,7 +51,7 @@
     'CheetahXmlLexer': ('pygments.lexers.templates', 'XML+Cheetah', ('xml+cheetah', 'xml+spitfire'), (), ('application/xml+cheetah', 'application/xml+spitfire')),
     'ClojureLexer': ('pygments.lexers.agile', 'Clojure', ('clojure', 'clj'), ('*.clj',), ('text/x-clojure', 'application/x-clojure')),
     'CoffeeScriptLexer': ('pygments.lexers.web', 'CoffeeScript', ('coffee-script', 'coffeescript'), ('*.coffee',), ('text/coffeescript',)),
-    'ColdfusionHtmlLexer': ('pygments.lexers.templates', 'Coldufsion HTML', ('cfm',), ('*.cfm', '*.cfml', '*.cfc'), ('application/x-coldfusion',)),
+    'ColdfusionHtmlLexer': ('pygments.lexers.templates', 'Coldfusion HTML', ('cfm',), ('*.cfm', '*.cfml', '*.cfc'), ('application/x-coldfusion',)),
     'ColdfusionLexer': ('pygments.lexers.templates', 'cfstatement', ('cfs',), (), ()),
     'CommonLispLexer': ('pygments.lexers.functional', 'Common Lisp', ('common-lisp', 'cl'), ('*.cl', '*.lisp', '*.el'), ('text/x-common-lisp',)),
     'CppLexer': ('pygments.lexers.compiled', 'C++', ('cpp', 'c++'), ('*.cpp', '*.hpp', '*.c++', '*.h++', '*.cc', '*.hh', '*.cxx', '*.hxx'), ('text/x-c++hdr', 'text/x-c++src')),
@@ -68,13 +70,15 @@
     'DelphiLexer': ('pygments.lexers.compiled', 'Delphi', ('delphi', 'pas', 'pascal', 'objectpascal'), ('*.pas',), ('text/x-pascal',)),
     'DiffLexer': ('pygments.lexers.text', 'Diff', ('diff', 'udiff'), ('*.diff', '*.patch'), ('text/x-diff', 'text/x-patch')),
     'DjangoLexer': ('pygments.lexers.templates', 'Django/Jinja', ('django', 'jinja'), (), ('application/x-django-templating', 'application/x-jinja')),
-    'DylanLexer': ('pygments.lexers.compiled', 'Dylan', ('dylan',), ('*.dylan',), ('text/x-dylan',)),
+    'DuelLexer': ('pygments.lexers.web', 'Duel', ('duel', 'Duel Engine', 'Duel View', 'JBST', 'jbst', 'JsonML+BST'), ('*.duel', '*.jbst'), ('text/x-duel', 'text/x-jbst')),
+    'DylanLexer': ('pygments.lexers.compiled', 'Dylan', ('dylan',), ('*.dylan', '*.dyl'), ('text/x-dylan',)),
     'ErbLexer': ('pygments.lexers.templates', 'ERB', ('erb',), (), ('application/x-ruby-templating',)),
     'ErlangLexer': ('pygments.lexers.functional', 'Erlang', ('erlang',), ('*.erl', '*.hrl'), ('text/x-erlang',)),
     'ErlangShellLexer': ('pygments.lexers.functional', 'Erlang erl session', ('erl',), ('*.erl-sh',), ('text/x-erl-shellsession',)),
     'EvoqueHtmlLexer': ('pygments.lexers.templates', 'HTML+Evoque', ('html+evoque',), ('*.html',), ('text/html+evoque',)),
     'EvoqueLexer': ('pygments.lexers.templates', 'Evoque', ('evoque',), ('*.evoque',), ('application/x-evoque',)),
     'EvoqueXmlLexer': ('pygments.lexers.templates', 'XML+Evoque', ('xml+evoque',), ('*.xml',), ('application/xml+evoque',)),
+    'FactorLexer': ('pygments.lexers.agile', 'Factor', ('factor',), ('*.factor',), ('text/x-factor',)),
     'FelixLexer': ('pygments.lexers.compiled', 'Felix', ('felix', 'flx'), ('*.flx', '*.flxh'), ('text/x-felix',)),
     'FortranLexer': ('pygments.lexers.compiled', 'Fortran', ('fortran',), ('*.f', '*.f90'), ('text/x-fortran',)),
     'GLShaderLexer': ('pygments.lexers.compiled', 'GLSL', ('glsl',), ('*.vert', '*.frag', '*.geo'), ('text/x-glslsrc',)),
@@ -85,6 +89,7 @@
     'GherkinLexer': ('pygments.lexers.other', 'Gherkin', ('Cucumber', 'cucumber', 'Gherkin', 'gherkin'), ('*.feature',), ('text/x-gherkin',)),
     'GnuplotLexer': ('pygments.lexers.other', 'Gnuplot', ('gnuplot',), ('*.plot', '*.plt'), ('text/x-gnuplot',)),
     'GoLexer': ('pygments.lexers.compiled', 'Go', ('go',), ('*.go',), ('text/x-gosrc',)),
+    'GoodDataCLLexer': ('pygments.lexers.other', 'GoodData-CL', ('gooddata-cl',), ('*.gdc',), ('text/x-gooddata-cl',)),
     'GroffLexer': ('pygments.lexers.text', 'Groff', ('groff', 'nroff', 'man'), ('*.[1234567]', '*.man'), ('application/x-troff', 'text/troff')),
     'HamlLexer': ('pygments.lexers.web', 'Haml', ('haml', 'HAML'), ('*.haml',), ('text/x-haml',)),
     'HaskellLexer': ('pygments.lexers.functional', 'Haskell', ('haskell', 'hs'), ('*.hs',), ('text/x-haskell',)),
@@ -94,14 +99,17 @@
     'HtmlLexer': ('pygments.lexers.web', 'HTML', ('html',), ('*.html', '*.htm', '*.xhtml', '*.xslt'), ('text/html', 'application/xhtml+xml')),
     'HtmlPhpLexer': ('pygments.lexers.templates', 'HTML+PHP', ('html+php',), ('*.phtml',), ('application/x-php', 'application/x-httpd-php', 'application/x-httpd-php3', 'application/x-httpd-php4', 'application/x-httpd-php5')),
     'HtmlSmartyLexer': ('pygments.lexers.templates', 'HTML+Smarty', ('html+smarty',), (), ('text/html+smarty',)),
-    'IniLexer': ('pygments.lexers.text', 'INI', ('ini', 'cfg'), ('*.ini', '*.cfg', '*.properties'), ('text/x-ini',)),
+    'HybrisLexer': ('pygments.lexers.other', 'Hybris', ('hybris', 'hy'), ('*.hy', '*.hyb'), ('text/x-hybris', 'application/x-hybris')),
+    'IniLexer': ('pygments.lexers.text', 'INI', ('ini', 'cfg'), ('*.ini', '*.cfg'), ('text/x-ini',)),
     'IoLexer': ('pygments.lexers.agile', 'Io', ('io',), ('*.io',), ('text/x-iosrc',)),
+    'IokeLexer': ('pygments.lexers.agile', 'Ioke', ('ioke', 'ik'), ('*.ik',), ('text/x-iokesrc',)),
     'IrcLogsLexer': ('pygments.lexers.text', 'IRC logs', ('irc',), ('*.weechatlog',), ('text/x-irclog',)),
+    'JadeLexer': ('pygments.lexers.web', 'Jade', ('jade', 'JADE'), ('*.jade',), ('text/x-jade',)),
     'JavaLexer': ('pygments.lexers.compiled', 'Java', ('java',), ('*.java',), ('text/x-java',)),
     'JavascriptDjangoLexer': ('pygments.lexers.templates', 'JavaScript+Django/Jinja', ('js+django', 'javascript+django', 'js+jinja', 'javascript+jinja'), (), ('application/x-javascript+django', 'application/x-javascript+jinja', 'text/x-javascript+django', 'text/x-javascript+jinja', 'text/javascript+django', 'text/javascript+jinja')),
     'JavascriptErbLexer': ('pygments.lexers.templates', 'JavaScript+Ruby', ('js+erb', 'javascript+erb', 'js+ruby', 'javascript+ruby'), (), ('application/x-javascript+ruby', 'text/x-javascript+ruby', 'text/javascript+ruby')),
     'JavascriptGenshiLexer': ('pygments.lexers.templates', 'JavaScript+Genshi Text', ('js+genshitext', 'js+genshi', 'javascript+genshitext', 'javascript+genshi'), (), ('application/x-javascript+genshi', 'text/x-javascript+genshi', 'text/javascript+genshi')),
-    'JavascriptLexer': ('pygments.lexers.web', 'JavaScript', ('js', 'javascript'), ('*.js',), ('application/x-javascript', 'text/x-javascript', 'text/javascript')),
+    'JavascriptLexer': ('pygments.lexers.web', 'JavaScript', ('js', 'javascript'), ('*.js',), ('application/javascript', 'application/x-javascript', 'text/x-javascript', 'text/javascript')),
     'JavascriptPhpLexer': ('pygments.lexers.templates', 'JavaScript+PHP', ('js+php', 'javascript+php'), (), ('application/x-javascript+php', 'text/x-javascript+php', 'text/javascript+php')),
     'JavascriptSmartyLexer': ('pygments.lexers.templates', 'JavaScript+Smarty', ('js+smarty', 'javascript+smarty'), (), ('application/x-javascript+smarty', 'text/x-javascript+smarty', 'text/javascript+smarty')),
     'JspLexer': ('pygments.lexers.templates', 'Java Server Page', ('jsp',), ('*.jsp',), ('application/x-jsp',)),
@@ -109,7 +117,7 @@
     'LiterateHaskellLexer': ('pygments.lexers.functional', 'Literate Haskell', ('lhs', 'literate-haskell'), ('*.lhs',), ('text/x-literate-haskell',)),
     'LlvmLexer': ('pygments.lexers.asm', 'LLVM', ('llvm',), ('*.ll',), ('text/x-llvm',)),
     'LogtalkLexer': ('pygments.lexers.other', 'Logtalk', ('logtalk',), ('*.lgt',), ('text/x-logtalk',)),
-    'LuaLexer': ('pygments.lexers.agile', 'Lua', ('lua',), ('*.lua',), ('text/x-lua', 'application/x-lua')),
+    'LuaLexer': ('pygments.lexers.agile', 'Lua', ('lua',), ('*.lua', '*.wlua'), ('text/x-lua', 'application/x-lua')),
     'MOOCodeLexer': ('pygments.lexers.other', 'MOOCode', ('moocode',), ('*.moo',), ('text/x-moocode',)),
     'MakefileLexer': ('pygments.lexers.text', 'Makefile', ('make', 'makefile', 'mf', 'bsdmake'), ('*.mak', 'Makefile', 'makefile', 'Makefile.*', 'GNUmakefile'), ('text/x-makefile',)),
     'MakoCssLexer': ('pygments.lexers.templates', 'CSS+Mako', ('css+mako',), (), ('text/css+mako',)),
@@ -117,6 +125,8 @@
     'MakoJavascriptLexer': ('pygments.lexers.templates', 'JavaScript+Mako', ('js+mako', 'javascript+mako'), (), ('application/x-javascript+mako', 'text/x-javascript+mako', 'text/javascript+mako')),
     'MakoLexer': ('pygments.lexers.templates', 'Mako', ('mako',), ('*.mao',), ('application/x-mako',)),
     'MakoXmlLexer': ('pygments.lexers.templates', 'XML+Mako', ('xml+mako',), (), ('application/xml+mako',)),
+    'MaqlLexer': ('pygments.lexers.other', 'MAQL', ('maql',), ('*.maql',), ('text/x-gooddata-maql', 'application/x-gooddata-maql')),
+    'MasonLexer': ('pygments.lexers.templates', 'Mason', ('mason',), ('*.m', '*.mhtml', '*.mc', '*.mi', 'autohandler', 'dhandler'), ('application/x-mason',)),
     'MatlabLexer': ('pygments.lexers.math', 'Matlab', ('matlab', 'octave'), ('*.m',), ('text/matlab',)),
     'MatlabSessionLexer': ('pygments.lexers.math', 'Matlab session', ('matlabsession',), (), ()),
     'MiniDLexer': ('pygments.lexers.agile', 'MiniD', ('minid',), ('*.md',), ('text/x-minidsrc',)),
@@ -143,8 +153,11 @@
     'OocLexer': ('pygments.lexers.compiled', 'Ooc', ('ooc',), ('*.ooc',), ('text/x-ooc',)),
     'PerlLexer': ('pygments.lexers.agile', 'Perl', ('perl', 'pl'), ('*.pl', '*.pm'), ('text/x-perl', 'application/x-perl')),
     'PhpLexer': ('pygments.lexers.web', 'PHP', ('php', 'php3', 'php4', 'php5'), ('*.php', '*.php[345]'), ('text/x-php',)),
+    'PostScriptLexer': ('pygments.lexers.other', 'PostScript', ('postscript',), ('*.ps', '*.eps'), ('application/postscript',)),
     'PovrayLexer': ('pygments.lexers.other', 'POVRay', ('pov',), ('*.pov', '*.inc'), ('text/x-povray',)),
     'PrologLexer': ('pygments.lexers.compiled', 'Prolog', ('prolog',), ('*.prolog', '*.pro', '*.pl'), ('text/x-prolog',)),
+    'PropertiesLexer': ('pygments.lexers.text', 'Properties', ('properties',), ('*.properties',), ('text/x-java-properties',)),
+    'ProtoBufLexer': ('pygments.lexers.other', 'Protocol Buffer', ('protobuf',), ('*.proto',), ()),
     'Python3Lexer': ('pygments.lexers.agile', 'Python 3', ('python3', 'py3'), (), ('text/x-python3', 'application/x-python3')),
     'Python3TracebackLexer': ('pygments.lexers.agile', 'Python 3.0 Traceback', ('py3tb',), ('*.py3tb',), ('text/x-python3-traceback',)),
     'PythonConsoleLexer': ('pygments.lexers.agile', 'Python console session', ('pycon',), (), ('text/x-python-doctest',)),
@@ -165,17 +178,20 @@
     'RhtmlLexer': ('pygments.lexers.templates', 'RHTML', ('rhtml', 'html+erb', 'html+ruby'), ('*.rhtml',), ('text/html+ruby',)),
     'RstLexer': ('pygments.lexers.text', 'reStructuredText', ('rst', 'rest', 'restructuredtext'), ('*.rst', '*.rest'), ('text/x-rst', 'text/prs.fallenstein.rst')),
     'RubyConsoleLexer': ('pygments.lexers.agile', 'Ruby irb session', ('rbcon', 'irb'), (), ('text/x-ruby-shellsession',)),
-    'RubyLexer': ('pygments.lexers.agile', 'Ruby', ('rb', 'ruby'), ('*.rb', '*.rbw', 'Rakefile', '*.rake', '*.gemspec', '*.rbx'), ('text/x-ruby', 'application/x-ruby')),
+    'RubyLexer': ('pygments.lexers.agile', 'Ruby', ('rb', 'ruby', 'duby'), ('*.rb', '*.rbw', 'Rakefile', '*.rake', '*.gemspec', '*.rbx', '*.duby'), ('text/x-ruby', 'application/x-ruby')),
     'SLexer': ('pygments.lexers.math', 'S', ('splus', 's', 'r'), ('*.S', '*.R'), ('text/S-plus', 'text/S', 'text/R')),
     'SassLexer': ('pygments.lexers.web', 'Sass', ('sass', 'SASS'), ('*.sass',), ('text/x-sass',)),
     'ScalaLexer': ('pygments.lexers.compiled', 'Scala', ('scala',), ('*.scala',), ('text/x-scala',)),
+    'ScamlLexer': ('pygments.lexers.web', 'Scaml', ('scaml', 'SCAML'), ('*.scaml',), ('text/x-scaml',)),
     'SchemeLexer': ('pygments.lexers.functional', 'Scheme', ('scheme', 'scm'), ('*.scm',), ('text/x-scheme', 'application/x-scheme')),
+    'ScssLexer': ('pygments.lexers.web', 'SCSS', ('scss',), ('*.scss',), ('text/x-scss',)),
     'SmalltalkLexer': ('pygments.lexers.other', 'Smalltalk', ('smalltalk', 'squeak'), ('*.st',), ('text/x-smalltalk',)),
     'SmartyLexer': ('pygments.lexers.templates', 'Smarty', ('smarty',), ('*.tpl',), ('application/x-smarty',)),
     'SourcesListLexer': ('pygments.lexers.text', 'Debian Sourcelist', ('sourceslist', 'sources.list'), ('sources.list',), ()),
     'SqlLexer': ('pygments.lexers.other', 'SQL', ('sql',), ('*.sql',), ('text/x-sql',)),
     'SqliteConsoleLexer': ('pygments.lexers.other', 'sqlite3con', ('sqlite3',), ('*.sqlite3-console',), ('text/x-sqlite3-console',)),
     'SquidConfLexer': ('pygments.lexers.text', 'SquidConf', ('squidconf', 'squid.conf', 'squid'), ('squid.conf',), ('text/x-squidconf',)),
+    'SspLexer': ('pygments.lexers.templates', 'Scalate Server Page', ('ssp',), ('*.ssp',), ('application/x-ssp',)),
     'TclLexer': ('pygments.lexers.agile', 'Tcl', ('tcl',), ('*.tcl',), ('text/x-tcl', 'text/x-script.tcl', 'application/x-tcl')),
     'TcshLexer': ('pygments.lexers.other', 'Tcsh', ('tcsh', 'csh'), ('*.tcsh', '*.csh'), ('application/x-csh',)),
     'TexLexer': ('pygments.lexers.text', 'TeX', ('tex', 'latex'), ('*.tex', '*.aux', '*.toc'), ('text/x-tex', 'text/x-latex')),
@@ -183,7 +199,12 @@
     'ValaLexer': ('pygments.lexers.compiled', 'Vala', ('vala', 'vapi'), ('*.vala', '*.vapi'), ('text/x-vala',)),
     'VbNetAspxLexer': ('pygments.lexers.dotnet', 'aspx-vb', ('aspx-vb',), ('*.aspx', '*.asax', '*.ascx', '*.ashx', '*.asmx', '*.axd'), ()),
     'VbNetLexer': ('pygments.lexers.dotnet', 'VB.net', ('vb.net', 'vbnet'), ('*.vb', '*.bas'), ('text/x-vbnet', 'text/x-vba')),
+    'VelocityHtmlLexer': ('pygments.lexers.templates', 'HTML+Velocity', ('html+velocity',), (), ('text/html+velocity',)),
+    'VelocityLexer': ('pygments.lexers.templates', 'Velocity', ('velocity',), ('*.vm', '*.fhtml'), ()),
+    'VelocityXmlLexer': ('pygments.lexers.templates', 'XML+Velocity', ('xml+velocity',), (), ('application/xml+velocity',)),
+    'VerilogLexer': ('pygments.lexers.hdl', 'verilog', ('v',), ('*.v', '*.sv'), ('text/x-verilog',)),
     'VimLexer': ('pygments.lexers.text', 'VimL', ('vim',), ('*.vim', '.vimrc'), ('text/x-vim',)),
+    'XQueryLexer': ('pygments.lexers.web', 'XQuery', ('xquery', 'xqy'), ('*.xqy', '*.xquery'), ('text/xquery', 'application/xquery')),
     'XmlDjangoLexer': ('pygments.lexers.templates', 'XML+Django/Jinja', ('xml+django', 'xml+jinja'), (), ('application/xml+django', 'application/xml+jinja')),
     'XmlErbLexer': ('pygments.lexers.templates', 'XML+Ruby', ('xml+erb', 'xml+ruby'), (), ('application/xml+ruby',)),
     'XmlLexer': ('pygments.lexers.web', 'XML', ('xml',), ('*.xml', '*.xsl', '*.rss', '*.xslt', '*.xsd', '*.wsdl'), ('text/xml', 'application/xml', 'image/svg+xml', 'application/rss+xml', 'application/atom+xml', 'application/xsl+xml', 'application/xslt+xml')),
--- a/MoinMoin/support/pygments/lexers/agile.py	Sun Dec 25 16:38:04 2011 +0100
+++ b/MoinMoin/support/pygments/lexers/agile.py	Sun Dec 25 16:43:04 2011 +0100
@@ -22,7 +22,7 @@
 __all__ = ['PythonLexer', 'PythonConsoleLexer', 'PythonTracebackLexer',
            'RubyLexer', 'RubyConsoleLexer', 'PerlLexer', 'LuaLexer',
            'MiniDLexer', 'IoLexer', 'TclLexer', 'ClojureLexer',
-           'Python3Lexer', 'Python3TracebackLexer']
+           'Python3Lexer', 'Python3TracebackLexer', 'FactorLexer', 'IokeLexer']
 
 # b/w compatibility
 from pygments.lexers.functional import SchemeLexer
@@ -43,8 +43,8 @@
     tokens = {
         'root': [
             (r'\n', Text),
-            (r'^(\s*)("""(?:.|\n)*?""")', bygroups(Text, String.Doc)),
-            (r"^(\s*)('''(?:.|\n)*?''')", bygroups(Text, String.Doc)),
+            (r'^(\s*)([rRuU]{,2}"""(?:.|\n)*?""")', bygroups(Text, String.Doc)),
+            (r"^(\s*)([rRuU]{,2}'''(?:.|\n)*?''')", bygroups(Text, String.Doc)),
             (r'[^\S\n]+', Text),
             (r'#.*$', Comment),
             (r'[]{}:(),;[]', Punctuation),
@@ -104,7 +104,7 @@
         'numbers': [
             (r'(\d+\.\d*|\d*\.\d+)([eE][+-]?[0-9]+)?', Number.Float),
             (r'\d+[eE][+-]?[0-9]+', Number.Float),
-            (r'0\d+', Number.Oct),
+            (r'0[0-7]+', Number.Oct),
             (r'0[xX][a-fA-F0-9]+', Number.Hex),
             (r'\d+L', Number.Integer.Long),
             (r'\d+', Number.Integer)
@@ -310,17 +310,17 @@
         tb = 0
         for match in line_re.finditer(text):
             line = match.group()
-            if line.startswith('>>> ') or line.startswith('... '):
+            if line.startswith(u'>>> ') or line.startswith(u'... '):
                 tb = 0
                 insertions.append((len(curcode),
                                    [(0, Generic.Prompt, line[:4])]))
                 curcode += line[4:]
-            elif line.rstrip() == '...' and not tb:
+            elif line.rstrip() == u'...' and not tb:
                 # only a new >>> prompt can end an exception block
                 # otherwise an ellipsis in place of the traceback frames
                 # will be mishandled
                 insertions.append((len(curcode),
-                                   [(0, Generic.Prompt, '...')]))
+                                   [(0, Generic.Prompt, u'...')]))
                 curcode += line[3:]
             else:
                 if curcode:
@@ -329,8 +329,8 @@
                         yield item
                     curcode = ''
                     insertions = []
-                if (line.startswith('Traceback (most recent call last):') or
-                    re.match(r'  File "[^"]+", line \d+\n$', line)):
+                if (line.startswith(u'Traceback (most recent call last):') or
+                    re.match(ur'  File "[^"]+", line \d+\n$', line)):
                     tb = 1
                     curtb = line
                     tbindex = match.start()
@@ -338,7 +338,7 @@
                     yield match.start(), Name.Class, line
                 elif tb:
                     curtb += line
-                    if not (line.startswith(' ') or line.strip() == '...'):
+                    if not (line.startswith(' ') or line.strip() == u'...'):
                         tb = 0
                         for i, t, v in tblexer.get_tokens_unprocessed(curtb):
                             yield tbindex+i, t, v
@@ -371,7 +371,7 @@
         ],
         'intb': [
             (r'^(  File )("[^"]+")(, line )(\d+)(, in )(.+)(\n)',
-             bygroups(Text, Name.Builtin, Text, Number, Text, Name.Identifier, Text)),
+             bygroups(Text, Name.Builtin, Text, Number, Text, Name, Text)),
             (r'^(  File )("[^"]+")(, line )(\d+)(\n)',
              bygroups(Text, Name.Builtin, Text, Number, Text)),
             (r'^(    )(.+)(\n)',
@@ -379,9 +379,9 @@
             (r'^([ \t]*)(...)(\n)',
              bygroups(Text, Comment, Text)), # for doctests...
             (r'^(.+)(: )(.+)(\n)',
-             bygroups(Name.Class, Text, Name.Identifier, Text), '#pop'),
+             bygroups(Generic.Error, Text, Name, Text), '#pop'),
             (r'^([a-zA-Z_][a-zA-Z0-9_]*)(:?\n)',
-             bygroups(Name.Class, Text), '#pop')
+             bygroups(Generic.Error, Text), '#pop')
         ],
     }
 
@@ -409,15 +409,15 @@
         ],
         'intb': [
             (r'^(  File )("[^"]+")(, line )(\d+)(, in )(.+)(\n)',
-             bygroups(Text, Name.Builtin, Text, Number, Text, Name.Identifier, Text)),
+             bygroups(Text, Name.Builtin, Text, Number, Text, Name, Text)),
             (r'^(    )(.+)(\n)',
              bygroups(Text, using(Python3Lexer), Text)),
             (r'^([ \t]*)(...)(\n)',
              bygroups(Text, Comment, Text)), # for doctests...
             (r'^(.+)(: )(.+)(\n)',
-             bygroups(Name.Class, Text, Name.Identifier, Text), '#pop'),
+             bygroups(Generic.Error, Text, Name, Text), '#pop'),
             (r'^([a-zA-Z_][a-zA-Z0-9_]*)(:?\n)',
-             bygroups(Name.Class, Text), '#pop')
+             bygroups(Generic.Error, Text), '#pop')
         ],
     }
 
@@ -428,8 +428,9 @@
     """
 
     name = 'Ruby'
-    aliases = ['rb', 'ruby']
-    filenames = ['*.rb', '*.rbw', 'Rakefile', '*.rake', '*.gemspec', '*.rbx']
+    aliases = ['rb', 'ruby', 'duby']
+    filenames = ['*.rb', '*.rbw', 'Rakefile', '*.rake', '*.gemspec',
+                 '*.rbx', '*.duby']
     mimetypes = ['text/x-ruby', 'application/x-ruby']
 
     flags = re.DOTALL | re.MULTILINE
@@ -623,7 +624,7 @@
              r'putc|puts|raise|rand|readline|readlines|require|'
              r'scan|select|self|send|set_trace_func|singleton_methods|sleep|'
              r'split|sprintf|srand|sub|syscall|system|taint|'
-             r'test|throw|to_a|to_s|trace_var|trap|type|untaint|untrace_var|'
+             r'test|throw|to_a|to_s|trace_var|trap|untaint|untrace_var|'
              r'warn)\b', Name.Builtin),
             (r'__(FILE|LINE)__\b', Name.Builtin.Pseudo),
             # normal heredocs
@@ -837,7 +838,7 @@
             (r'^=[a-zA-Z0-9]+\s+.*?\n=cut', Comment.Multiline),
             (r'(case|continue|do|else|elsif|for|foreach|if|last|my|'
              r'next|our|redo|reset|then|unless|until|while|use|'
-             r'print|new|BEGIN|END|return)\b', Keyword),
+             r'print|new|BEGIN|CHECK|INIT|END|return)\b', Keyword),
             (r'(format)(\s+)([a-zA-Z0-9_]+)(\s*)(=)(\s*\n)',
              bygroups(Keyword, Text, Name, Text, Punctuation, Text), 'format'),
             (r'(eq|lt|gt|le|ge|ne|not|and|or|cmp)\b', Operator.Word),
@@ -893,11 +894,14 @@
             (r'0_?[0-7]+(_[0-7]+)*', Number.Oct),
             (r'0x[0-9A-Fa-f]+(_[0-9A-Fa-f]+)*', Number.Hex),
             (r'0b[01]+(_[01]+)*', Number.Bin),
-            (r'\d+', Number.Integer),
+            (r'(?i)(\d*(_\d*)*\.\d+(_\d*)*|\d+(_\d*)*\.\d+(_\d*)*)(e[+-]?\d+)?',
+             Number.Float),
+            (r'(?i)\d+(_\d*)*e[+-]?\d+(_\d*)*', Number.Float),
+            (r'\d+(_\d+)*', Number.Integer),
             (r"'(\\\\|\\'|[^'])*'", String),
             (r'"(\\\\|\\"|[^"])*"', String),
             (r'`(\\\\|\\`|[^`])*`', String.Backtick),
-            (r'<([^\s>]+)>', String.Regexp),
+            (r'<([^\s>]+)>', String.Regex),
             (r'(q|qq|qw|qr|qx)\{', String.Other, 'cb-string'),
             (r'(q|qq|qw|qr|qx)\(', String.Other, 'rb-string'),
             (r'(q|qq|qw|qr|qx)\[', String.Other, 'sb-string'),
@@ -974,7 +978,7 @@
     }
 
     def analyse_text(text):
-        if shebang_matches(text, r'perl(\d\.\d\.\d)?'):
+        if shebang_matches(text, r'perl'):
             return True
         if 'my $' in text:
             return 0.9
@@ -1006,7 +1010,7 @@
 
     name = 'Lua'
     aliases = ['lua']
-    filenames = ['*.lua']
+    filenames = ['*.lua', '*.wlua']
     mimetypes = ['text/x-lua', 'application/x-lua']
 
     tokens = {
@@ -1026,10 +1030,11 @@
 
             (r'\n', Text),
             (r'[^\S\n]', Text),
-            (r'(?s)\[(=*)\[.*?\]\1\]', String.Multiline),
-            (r'[\[\]\{\}\(\)\.,:;]', Punctuation),
+            # multiline strings
+            (r'(?s)\[(=*)\[.*?\]\1\]', String),
 
             (r'(==|~=|<=|>=|\.\.|\.\.\.|[=+\-*/%^<>#])', Operator),
+            (r'[\[\]\{\}\(\)\.,:;]', Punctuation),
             (r'(and|or|not)\b', Operator.Word),
 
             ('(break|do|else|elseif|end|for|if|in|repeat|return|then|until|'
@@ -1042,14 +1047,13 @@
 
             (r'[A-Za-z_][A-Za-z0-9_]*(\.[A-Za-z_][A-Za-z0-9_]*)?', Name),
 
-            # multiline strings
-            (r'(?s)\[(=*)\[(.*?)\]\1\]', String),
             ("'", String.Single, combined('stringescape', 'sqs')),
             ('"', String.Double, combined('stringescape', 'dqs'))
         ],
 
         'funcname': [
-            ('[A-Za-z_][A-Za-z0-9_]*', Name.Function, '#pop'),
+            ('(?:([A-Za-z_][A-Za-z0-9_]*)(\.))?([A-Za-z_][A-Za-z0-9_]*)',
+             bygroups(Name.Class, Punctuation, Name.Function), '#pop'),
             # inline function
             ('\(', Punctuation, '#pop'),
         ],
@@ -1483,3 +1487,473 @@
             (r'(\(|\))', Punctuation),
         ],
     }
+
+
+class FactorLexer(RegexLexer):
+    """
+    Lexer for the `Factor <http://factorcode.org>`_ language.
+
+    *New in Pygments 1.4.*
+    """
+    name = 'Factor'
+    aliases = ['factor']
+    filenames = ['*.factor']
+    mimetypes = ['text/x-factor']
+
+    flags = re.MULTILINE | re.UNICODE
+
+    builtin_kernel = (
+        r'(?:or|2bi|2tri|while|wrapper|nip|4dip|wrapper\\?|bi\\*|'
+        r'callstack>array|both\\?|hashcode|die|dupd|callstack|'
+        r'callstack\\?|3dup|tri@|pick|curry|build|\\?execute|3bi|'
+        r'prepose|>boolean|\\?if|clone|eq\\?|tri\\*|\\?|=|swapd|'
+        r'2over|2keep|3keep|clear|2dup|when|not|tuple\\?|dup|2bi\\*|'
+        r'2tri\\*|call|tri-curry|object|bi@|do|unless\\*|if\\*|loop|'
+        r'bi-curry\\*|drop|when\\*|assert=|retainstack|assert\\?|-rot|'
+        r'execute|2bi@|2tri@|boa|with|either\\?|3drop|bi|curry\\?|'
+        r'datastack|until|3dip|over|3curry|tri-curry\\*|tri-curry@|swap|'
+        r'and|2nip|throw|bi-curry|\\(clone\\)|hashcode\\*|compose|2dip|if|3tri|'
+        r'unless|compose\\?|tuple|keep|2curry|equal\\?|assert|tri|2drop|'
+        r'most|<wrapper>|boolean\\?|identity-hashcode|identity-tuple\\?|'
+        r'null|new|dip|bi-curry@|rot|xor|identity-tuple|boolean)\s'
+        )
+
+    builtin_assocs = (
+        r'(?:\\?at|assoc\\?|assoc-clone-like|assoc=|delete-at\\*|'
+        r'assoc-partition|extract-keys|new-assoc|value\\?|assoc-size|'
+        r'map>assoc|push-at|assoc-like|key\\?|assoc-intersect|'
+        r'assoc-refine|update|assoc-union|assoc-combine|at\\*|'
+        r'assoc-empty\\?|at\\+|set-at|assoc-all\\?|assoc-subset\\?|'
+        r'assoc-hashcode|change-at|assoc-each|assoc-diff|zip|values|'
+        r'value-at|rename-at|inc-at|enum\\?|at|cache|assoc>map|<enum>|'
+        r'assoc|assoc-map|enum|value-at\\*|assoc-map-as|>alist|'
+        r'assoc-filter-as|clear-assoc|assoc-stack|maybe-set-at|'
+        r'substitute|assoc-filter|2cache|delete-at|assoc-find|keys|'
+        r'assoc-any\\?|unzip)\s'
+        )
+
+    builtin_combinators = (
+        r'(?:case|execute-effect|no-cond|no-case\\?|3cleave>quot|2cleave|'
+        r'cond>quot|wrong-values\\?|no-cond\\?|cleave>quot|no-case|'
+        r'case>quot|3cleave|wrong-values|to-fixed-point|alist>quot|'
+        r'case-find|cond|cleave|call-effect|2cleave>quot|recursive-hashcode|'
+        r'linear-case-quot|spread|spread>quot)\s'
+        )
+
+    builtin_math = (
+        r'(?:number=|if-zero|next-power-of-2|each-integer|\\?1\\+|'
+        r'fp-special\\?|imaginary-part|unless-zero|float>bits|number\\?|'
+        r'fp-infinity\\?|bignum\\?|fp-snan\\?|denominator|fp-bitwise=|\\*|'
+        r'\\+|power-of-2\\?|-|u>=|/|>=|bitand|log2-expects-positive|<|'
+        r'log2|>|integer\\?|number|bits>double|2/|zero\\?|(find-integer)|'
+        r'bits>float|float\\?|shift|ratio\\?|even\\?|ratio|fp-sign|bitnot|'
+        r'>fixnum|complex\\?|/i|/f|byte-array>bignum|when-zero|sgn|>bignum|'
+        r'next-float|u<|u>|mod|recip|rational|find-last-integer|>float|'
+        r'(all-integers\\?)|2^|times|integer|fixnum\\?|neg|fixnum|sq|'
+        r'bignum|(each-integer)|bit\\?|fp-qnan\\?|find-integer|complex|'
+        r'<fp-nan>|real|double>bits|bitor|rem|fp-nan-payload|all-integers\\?|'
+        r'real-part|log2-expects-positive\\?|prev-float|align|unordered\\?|'
+        r'float|fp-nan\\?|abs|bitxor|u<=|odd\\?|<=|/mod|rational\\?|>integer|'
+        r'real\\?|numerator)\s'
+        )
+
+    builtin_sequences = (
+        r'(?:member-eq\\?|append|assert-sequence=|find-last-from|trim-head-slice|'
+        r'clone-like|3sequence|assert-sequence\\?|map-as|last-index-from|'
+        r'reversed|index-from|cut\\*|pad-tail|remove-eq!|concat-as|'
+        r'but-last|snip|trim-tail|nths|nth|2selector|sequence|slice\\?|'
+        r'<slice>|partition|remove-nth|tail-slice|empty\\?|tail\\*|'
+        r'if-empty|find-from|virtual-sequence\\?|member\\?|set-length|'
+        r'drop-prefix|unclip|unclip-last-slice|iota|map-sum|'
+        r'bounds-error\\?|sequence-hashcode-step|selector-for|'
+        r'accumulate-as|map|start|midpoint@|\\(accumulate\\)|rest-slice|'
+        r'prepend|fourth|sift|accumulate!|new-sequence|follow|map!|'
+        r'like|first4|1sequence|reverse|slice|unless-empty|padding|'
+        r'virtual@|repetition\\?|set-last|index|4sequence|max-length|'
+        r'set-second|immutable-sequence|first2|first3|replicate-as|'
+        r'reduce-index|unclip-slice|supremum|suffix!|insert-nth|'
+        r'trim-tail-slice|tail|3append|short|count|suffix|concat|'
+        r'flip|filter|sum|immutable\\?|reverse!|2sequence|map-integers|'
+        r'delete-all|start\\*|indices|snip-slice|check-slice|sequence\\?|'
+        r'head|map-find|filter!|append-as|reduce|sequence=|halves|'
+        r'collapse-slice|interleave|2map|filter-as|binary-reduce|'
+        r'slice-error\\?|product|bounds-check\\?|bounds-check|harvest|'
+        r'immutable|virtual-exemplar|find|produce|remove|pad-head|last|'
+        r'replicate|set-fourth|remove-eq|shorten|reversed\\?|'
+        r'map-find-last|3map-as|2unclip-slice|shorter\\?|3map|find-last|'
+        r'head-slice|pop\\*|2map-as|tail-slice\\*|but-last-slice|'
+        r'2map-reduce|iota\\?|collector-for|accumulate|each|selector|'
+        r'append!|new-resizable|cut-slice|each-index|head-slice\\*|'
+        r'2reverse-each|sequence-hashcode|pop|set-nth|\\?nth|'
+        r'<flat-slice>|second|join|when-empty|collector|'
+        r'immutable-sequence\\?|<reversed>|all\\?|3append-as|'
+        r'virtual-sequence|subseq\\?|remove-nth!|push-either|new-like|'
+        r'length|last-index|push-if|2all\\?|lengthen|assert-sequence|'
+        r'copy|map-reduce|move|third|first|3each|tail\\?|set-first|'
+        r'prefix|bounds-error|any\\?|<repetition>|trim-slice|exchange|'
+        r'surround|2reduce|cut|change-nth|min-length|set-third|produce-as|'
+        r'push-all|head\\?|delete-slice|rest|sum-lengths|2each|head\\*|'
+        r'infimum|remove!|glue|slice-error|subseq|trim|replace-slice|'
+        r'push|repetition|map-index|trim-head|unclip-last|mismatch)\s'
+        )
+
+    builtin_namespaces = (
+        r'(?:global|\\+@|change|set-namestack|change-global|init-namespaces|'
+        r'on|off|set-global|namespace|set|with-scope|bind|with-variable|'
+        r'inc|dec|counter|initialize|namestack|get|get-global|make-assoc)\s'
+        )
+
+    builtin_arrays = (
+        r'(?:<array>|2array|3array|pair|>array|1array|4array|pair\\?|'
+        r'array|resize-array|array\\?)\s'
+        )
+
+    builtin_io = (
+        r'(?:\\+character\\+|bad-seek-type\\?|readln|each-morsel|stream-seek|'
+        r'read|print|with-output-stream|contents|write1|stream-write1|'
+        r'stream-copy|stream-element-type|with-input-stream|'
+        r'stream-print|stream-read|stream-contents|stream-tell|'
+        r'tell-output|bl|seek-output|bad-seek-type|nl|stream-nl|write|'
+        r'flush|stream-lines|\\+byte\\+|stream-flush|read1|'
+        r'seek-absolute\\?|stream-read1|lines|stream-readln|'
+        r'stream-read-until|each-line|seek-end|with-output-stream\\*|'
+        r'seek-absolute|with-streams|seek-input|seek-relative\\?|'
+        r'input-stream|stream-write|read-partial|seek-end\\?|'
+        r'seek-relative|error-stream|read-until|with-input-stream\\*|'
+        r'with-streams\\*|tell-input|each-block|output-stream|'
+        r'stream-read-partial|each-stream-block|each-stream-line)\s'
+        )
+
+    builtin_strings = (
+        r'(?:resize-string|>string|<string>|1string|string|string\\?)\s'
+        )
+
+    builtin_vectors = (
+        r'(?:vector\\?|<vector>|\\?push|vector|>vector|1vector)\s'
+        )
+
+    builtin_continuations = (
+        r'(?:with-return|restarts|return-continuation|with-datastack|'
+        r'recover|rethrow-restarts|<restart>|ifcc|set-catchstack|'
+        r'>continuation<|cleanup|ignore-errors|restart\\?|'
+        r'compute-restarts|attempt-all-error|error-thread|continue|'
+        r'<continuation>|attempt-all-error\\?|condition\\?|'
+        r'<condition>|throw-restarts|error|catchstack|continue-with|'
+        r'thread-error-hook|continuation|rethrow|callcc1|'
+        r'error-continuation|callcc0|attempt-all|condition|'
+        r'continuation\\?|restart|return)\s'
+        )
+
+    tokens = {
+        'root': [
+            # TODO: (( inputs -- outputs ))
+            # TODO: << ... >>
+
+            # defining words
+            (r'(\s*)(:|::|MACRO:|MEMO:)(\s+)(\S+)',
+                bygroups(Text, Keyword, Text, Name.Function)),
+            (r'(\s*)(M:)(\s+)(\S+)(\s+)(\S+)',
+                bygroups(Text, Keyword, Text, Name.Class, Text, Name.Function)),
+            (r'(\s*)(GENERIC:)(\s+)(\S+)',
+                bygroups(Text, Keyword, Text, Name.Function)),
+            (r'(\s*)(HOOK:|GENERIC#)(\s+)(\S+)(\s+)(\S+)',
+                bygroups(Text, Keyword, Text, Name.Function, Text, Name.Function)),
+            (r'(\()(\s+)', bygroups(Name.Function, Text), 'stackeffect'),
+            (r'\;\s', Keyword),
+
+            # imports and namespaces
+            (r'(USING:)((?:\s|\\\s)+)', bygroups(Keyword.Namespace, Text), 'import'),
+            (r'(USE:)(\s+)(\S+)', bygroups(Keyword.Namespace, Text, Name.Namespace)),
+            (r'(UNUSE:)(\s+)(\S+)', bygroups(Keyword.Namespace, Text, Name.Namespace)),
+            (r'(QUALIFIED:)(\s+)(\S+)',
+                bygroups(Keyword.Namespace, Text, Name.Namespace)),
+            (r'(QUALIFIED-WITH:)(\s+)(\S+)',
+                bygroups(Keyword.Namespace, Text, Name.Namespace)),
+            (r'(FROM:|EXCLUDE:)(\s+)(\S+)(\s+)(=>)',
+                bygroups(Keyword.Namespace, Text, Name.Namespace, Text, Text)),
+            (r'(IN:)(\s+)(\S+)', bygroups(Keyword.Namespace, Text, Name.Namespace)),
+            (r'(?:ALIAS|DEFER|FORGET|POSTPONE):', Keyword.Namespace),
+
+            # tuples and classes
+            (r'(TUPLE:)(\s+)(\S+)(\s+<\s+)(\S+)',
+                bygroups(Keyword, Text, Name.Class, Text, Name.Class), 'slots'),
+            (r'(TUPLE:)(\s+)(\S+)', bygroups(Keyword, Text, Name.Class), 'slots'),
+            (r'(UNION:)(\s+)(\S+)', bygroups(Keyword, Text, Name.Class)),
+            (r'(INTERSECTION:)(\s+)(\S+)', bygroups(Keyword, Text, Name.Class)),
+            (r'(PREDICATE:)(\s+)(\S+)(\s+<\s+)(\S+)',
+                bygroups(Keyword, Text, Name.Class, Text, Name.Class)),
+            (r'(C:)(\s+)(\S+)(\s+)(\S+)',
+                bygroups(Keyword, Text, Name.Function, Text, Name.Class)),
+            (r'INSTANCE:', Keyword),
+            (r'SLOT:', Keyword),
+            (r'MIXIN:', Keyword),
+            (r'(?:SINGLETON|SINGLETONS):', Keyword),
+
+            # other syntax
+            (r'CONSTANT:', Keyword),
+            (r'(?:SYMBOL|SYMBOLS):', Keyword),
+            (r'ERROR:', Keyword),
+            (r'SYNTAX:', Keyword),
+            (r'(HELP:)(\s+)(\S+)', bygroups(Keyword, Text, Name.Function)),
+            (r'(MAIN:)(\s+)(\S+)', bygroups(Keyword.Namespace, Text, Name.Function)),
+            (r'(?:ALIEN|TYPEDEF|FUNCTION|STRUCT):', Keyword),
+
+            # vocab.private
+            # TODO: words inside vocab.private should have red names?
+            (r'(?:<PRIVATE|PRIVATE>)', Keyword.Namespace),
+
+            # strings
+            (r'"""\s+(?:.|\n)*?\s+"""', String),
+            (r'"(?:\\\\|\\"|[^"])*"', String),
+            (r'CHAR:\s+(\\[\\abfnrstv]*|\S)\s', String.Char),
+
+            # comments
+            (r'\!\s+.*$', Comment),
+            (r'#\!\s+.*$', Comment),
+
+            # boolean constants
+            (r'(t|f)\s', Name.Constant),
+
+            # numbers
+            (r'-?\d+\.\d+\s', Number.Float),
+            (r'-?\d+\s', Number.Integer),
+            (r'HEX:\s+[a-fA-F\d]+\s', Number.Hex),
+            (r'BIN:\s+[01]+\s', Number.Integer),
+            (r'OCT:\s+[0-7]+\s', Number.Oct),
+
+            # operators
+            (r'[-+/*=<>^]\s', Operator),
+
+            # keywords
+            (r'(?:deprecated|final|foldable|flushable|inline|recursive)\s', Keyword),
+
+            # builtins
+            (builtin_kernel, Name.Builtin),
+            (builtin_assocs, Name.Builtin),
+            (builtin_combinators, Name.Builtin),
+            (builtin_math, Name.Builtin),
+            (builtin_sequences, Name.Builtin),
+            (builtin_namespaces, Name.Builtin),
+            (builtin_arrays, Name.Builtin),
+            (builtin_io, Name.Builtin),
+            (builtin_strings, Name.Builtin),
+            (builtin_vectors, Name.Builtin),
+            (builtin_continuations, Name.Builtin),
+
+            # whitespaces - usually not relevant
+            (r'\s+', Text),
+
+            # everything else is text
+            (r'\S+', Text),
+        ],
+
+        'stackeffect': [
+            (r'\s*\(', Name.Function, 'stackeffect'),
+            (r'\)', Name.Function, '#pop'),
+            (r'\-\-', Name.Function),
+            (r'\s+', Text),
+            (r'\S+', Name.Variable),
+        ],
+
+        'slots': [
+            (r'\s+', Text),
+            (r';\s', Keyword, '#pop'),
+            (r'\S+', Name.Variable),
+        ],
+
+        'import': [
+            (r';', Keyword, '#pop'),
+            (r'\S+', Name.Namespace),
+            (r'\s+', Text),
+        ],
+    }
+
+
+class IokeLexer(RegexLexer):
+    """
+    For `Ioke <http://ioke.org/>`_ (a strongly typed, dynamic,
+    prototype based programming language) source.
+
+    *New in Pygments 1.4.*
+    """
+    name = 'Ioke'
+    filenames = ['*.ik']
+    aliases = ['ioke', 'ik']
+    mimetypes = ['text/x-iokesrc']
+    tokens = {
+        'interpolatableText': [
+            (r'(\\b|\\e|\\t|\\n|\\f|\\r|\\"|\\\\|\\#|\\\Z|\\u[0-9a-fA-F]{1,4}'
+             r'|\\[0-3]?[0-7]?[0-7])', String.Escape),
+            (r'#{', Punctuation, 'textInterpolationRoot')
+            ],
+
+        'text': [
+            (r'(?<!\\)"', String, '#pop'),
+            include('interpolatableText'),
+            (r'[^"]', String)
+            ],
+
+        'documentation': [
+            (r'(?<!\\)"', String.Doc, '#pop'),
+            include('interpolatableText'),
+            (r'[^"]', String.Doc)
+            ],
+
+        'textInterpolationRoot': [
+            (r'}', Punctuation, '#pop'),
+            include('root')
+            ],
+
+        'slashRegexp': [
+            (r'(?<!\\)/[oxpniums]*', String.Regex, '#pop'),
+            include('interpolatableText'),
+            (r'\\/', String.Regex),
+            (r'[^/]', String.Regex)
+            ],
+
+        'squareRegexp': [
+            (r'(?<!\\)][oxpniums]*', String.Regex, '#pop'),
+            include('interpolatableText'),
+            (r'\\]', String.Regex),
+            (r'[^\]]', String.Regex)
+            ],
+
+        'squareText': [
+            (r'(?<!\\)]', String, '#pop'),
+            include('interpolatableText'),
+            (r'[^\]]', String)
+            ],
+
+        'root': [
+            (r'\n', Text),
+            (r'\s+', Text),
+
+            # Comments
+            (r';(.*?)\n', Comment),
+            (r'\A#!(.*?)\n', Comment),
+
+            #Regexps
+            (r'#/', String.Regex, 'slashRegexp'),
+            (r'#r\[', String.Regex, 'squareRegexp'),
+
+            #Symbols
+            (r':[a-zA-Z0-9_!:?]+', String.Symbol),
+            (r'[a-zA-Z0-9_!:?]+:(?![a-zA-Z0-9_!?])', String.Other),
+            (r':"(\\\\|\\"|[^"])*"', String.Symbol),
+
+            #Documentation
+            (r'((?<=fn\()|(?<=fnx\()|(?<=method\()|(?<=macro\()|(?<=lecro\()'
+             r'|(?<=syntax\()|(?<=dmacro\()|(?<=dlecro\()|(?<=dlecrox\()'
+             r'|(?<=dsyntax\())[\s\n\r]*"', String.Doc, 'documentation'),
+
+            #Text
+            (r'"', String, 'text'),
+            (r'#\[', String, 'squareText'),
+
+            #Mimic
+            (r'[a-zA-Z0-9_][a-zA-Z0-9!?_:]+(?=\s*=.*mimic\s)', Name.Entity),
+
+            #Assignment
+            (r'[a-zA-Z_][a-zA-Z0-9_!:?]*(?=[\s]*[+*/-]?=[^=].*($|\.))', Name.Variable),
+
+            # keywords
+            (r'(break|cond|continue|do|ensure|for|for:dict|for:set|if|let|'
+             r'loop|p:for|p:for:dict|p:for:set|return|unless|until|while|'
+             r'with)(?![a-zA-Z0-9!:_?])', Keyword.Reserved),
+
+            # Origin
+            (r'(eval|mimic|print|println)(?![a-zA-Z0-9!:_?])', Keyword),
+
+            # Base
+            (r'(cell\?|cellNames|cellOwner\?|cellOwner|cells|cell|'
+             r'documentation|hash|identity|mimic|removeCell\!|undefineCell\!)'
+             r'(?![a-zA-Z0-9!:_?])', Keyword),
+
+            # Ground
+            (r'(stackTraceAsText)(?![a-zA-Z0-9!:_?])', Keyword),
+
+            #DefaultBehaviour Literals
+            (r'(dict|list|message|set)(?![a-zA-Z0-9!:_?])', Keyword.Reserved),
+
+            #DefaultBehaviour Case
+            (r'(case|case:and|case:else|case:nand|case:nor|case:not|case:or|'
+             r'case:otherwise|case:xor)(?![a-zA-Z0-9!:_?])', Keyword.Reserved),
+
+            #DefaultBehaviour Reflection
+            (r'(asText|become\!|derive|freeze\!|frozen\?|in\?|is\?|kind\?|'
+             r'mimic\!|mimics|mimics\?|prependMimic\!|removeAllMimics\!|'
+             r'removeMimic\!|same\?|send|thaw\!|uniqueHexId)'
+             r'(?![a-zA-Z0-9!:_?])', Keyword),
+
+            #DefaultBehaviour Aspects
+            (r'(after|around|before)(?![a-zA-Z0-9!:_?])', Keyword.Reserved),
+
+            # DefaultBehaviour
+            (r'(kind|cellDescriptionDict|cellSummary|genSym|inspect|notice)'
+             r'(?![a-zA-Z0-9!:_?])', Keyword),
+            (r'(use|destructuring)', Keyword.Reserved),
+
+            #DefaultBehavior BaseBehavior
+            (r'(cell\?|cellOwner\?|cellOwner|cellNames|cells|cell|'
+             r'documentation|identity|removeCell!|undefineCell)'
+             r'(?![a-zA-Z0-9!:_?])', Keyword),
+
+            #DefaultBehavior Internal
+            (r'(internal:compositeRegexp|internal:concatenateText|'
+             r'internal:createDecimal|internal:createNumber|'
+             r'internal:createRegexp|internal:createText)'
+             r'(?![a-zA-Z0-9!:_?])', Keyword.Reserved),
+
+            #DefaultBehaviour Conditions
+            (r'(availableRestarts|bind|error\!|findRestart|handle|'
+             r'invokeRestart|rescue|restart|signal\!|warn\!)'
+             r'(?![a-zA-Z0-9!:_?])', Keyword.Reserved),
+
+            # constants
+            (r'(nil|false|true)(?![a-zA-Z0-9!:_?])', Name.Constant),
+
+            # names
+            (r'(Arity|Base|Call|Condition|DateTime|Aspects|Pointcut|'
+             r'Assignment|BaseBehavior|Boolean|Case|AndCombiner|Else|'
+             r'NAndCombiner|NOrCombiner|NotCombiner|OrCombiner|XOrCombiner|'
+             r'Conditions|Definitions|FlowControl|Internal|Literals|'
+             r'Reflection|DefaultMacro|DefaultMethod|DefaultSyntax|Dict|'
+             r'FileSystem|Ground|Handler|Hook|IO|IokeGround|Struct|'
+             r'LexicalBlock|LexicalMacro|List|Message|Method|Mixins|'
+             r'NativeMethod|Number|Origin|Pair|Range|Reflector|Regexp Match|'
+             r'Regexp|Rescue|Restart|Runtime|Sequence|Set|Symbol|'
+             r'System|Text|Tuple)(?![a-zA-Z0-9!:_?])', Name.Builtin),
+
+            # functions
+            (ur'(generateMatchMethod|aliasMethod|\u03bb|\u028E|fnx|fn|method|'
+             ur'dmacro|dlecro|syntax|macro|dlecrox|lecrox|lecro|syntax)'
+             ur'(?![a-zA-Z0-9!:_?])', Name.Function),
+
+            # Numbers
+            (r'-?0[xX][0-9a-fA-F]+', Number.Hex),
+            (r'-?(\d+\.?\d*|\d*\.\d+)([eE][+-]?[0-9]+)?', Number.Float),
+            (r'-?\d+', Number.Integer),
+
+            (r'#\(', Punctuation),
+
+             # Operators
+            (ur'(&&>>|\|\|>>|\*\*>>|:::|::|\.\.\.|===|\*\*>|\*\*=|&&>|&&=|'
+             ur'\|\|>|\|\|=|\->>|\+>>|!>>|<>>>|<>>|&>>|%>>|#>>|@>>|/>>|\*>>|'
+             ur'\?>>|\|>>|\^>>|~>>|\$>>|=>>|<<=|>>=|<=>|<\->|=~|!~|=>|\+\+|'
+             ur'\-\-|<=|>=|==|!=|&&|\.\.|\+=|\-=|\*=|\/=|%=|&=|\^=|\|=|<\-|'
+             ur'\+>|!>|<>|&>|%>|#>|\@>|\/>|\*>|\?>|\|>|\^>|~>|\$>|<\->|\->|'
+             ur'<<|>>|\*\*|\?\||\?&|\|\||>|<|\*|\/|%|\+|\-|&|\^|\||=|\$|!|~|'
+             ur'\?|#|\u2260|\u2218|\u2208|\u2209)', Operator),
+            (r'(and|nand|or|xor|nor|return|import)(?![a-zA-Z0-9_!?])',
+             Operator),
+
+            # Punctuation
+            (r'(\`\`|\`|\'\'|\'|\.|\,|@|@@|\[|\]|\(|\)|{|})', Punctuation),
+
+            #kinds
+            (r'[A-Z][a-zA-Z0-9_!:?]*', Name.Class),
+
+            #default cellnames
+            (r'[a-z_][a-zA-Z0-9_!:?]*', Name)
+        ]
+    }
--- a/MoinMoin/support/pygments/lexers/compiled.py	Sun Dec 25 16:38:04 2011 +0100
+++ b/MoinMoin/support/pygments/lexers/compiled.py	Sun Dec 25 16:43:04 2011 +0100
@@ -26,7 +26,7 @@
            'ScalaLexer', 'DylanLexer', 'OcamlLexer', 'ObjectiveCLexer',
            'FortranLexer', 'GLShaderLexer', 'PrologLexer', 'CythonLexer',
            'ValaLexer', 'OocLexer', 'GoLexer', 'FelixLexer', 'AdaLexer',
-           'Modula2Lexer']
+           'Modula2Lexer', 'BlitzMaxLexer']
 
 
 class CLexer(RegexLexer):
@@ -43,8 +43,12 @@
 
     tokens = {
         'whitespace': [
-            (r'^\s*#if\s+0', Comment.Preproc, 'if0'),
-            (r'^\s*#', Comment.Preproc, 'macro'),
+            # preprocessor directives: without whitespace
+            ('^#if\s+0', Comment.Preproc, 'if0'),
+            ('^#', Comment.Preproc, 'macro'),
+            # or with whitespace
+            ('^' + _ws + r'#if\s+0', Comment.Preproc, 'if0'),
+            ('^' + _ws + '#', Comment.Preproc, 'macro'),
             (r'^(\s*)([a-zA-Z_][a-zA-Z0-9_]*:(?!:))', bygroups(Text, Name.Label)),
             (r'\n', Text),
             (r'\s+', Text),
@@ -55,11 +59,11 @@
         'statements': [
             (r'L?"', String, 'string'),
             (r"L?'(\\.|\\[0-7]{1,3}|\\x[a-fA-F0-9]{1,2}|[^\\\'\n])'", String.Char),
-            (r'(\d+\.\d*|\.\d+|\d+)[eE][+-]?\d+[lL]?', Number.Float),
+            (r'(\d+\.\d*|\.\d+|\d+)[eE][+-]?\d+[LlUu]*', Number.Float),
             (r'(\d+\.\d*|\.\d+|\d+[fF])[fF]?', Number.Float),
-            (r'0x[0-9a-fA-F]+[Ll]?', Number.Hex),
-            (r'0[0-7]+[Ll]?', Number.Oct),
-            (r'\d+[Ll]?', Number.Integer),
+            (r'0x[0-9a-fA-F]+[LlUu]*', Number.Hex),
+            (r'0[0-7]+[LlUu]*', Number.Oct),
+            (r'\d+[LlUu]*', Number.Integer),
             (r'\*/', Error),
             (r'[~!%^&*+=|?:<>/-]', Operator),
             (r'[()\[\],.]', Punctuation),
@@ -168,10 +172,17 @@
     filenames = ['*.cpp', '*.hpp', '*.c++', '*.h++', '*.cc', '*.hh', '*.cxx', '*.hxx']
     mimetypes = ['text/x-c++hdr', 'text/x-c++src']
 
+    #: optional Comment or Whitespace
+    _ws = r'(?:\s|//.*?\n|/[*].*?[*]/)+'
+
     tokens = {
         'root': [
-            (r'^\s*#if\s+0', Comment.Preproc, 'if0'),
-            (r'^\s*#', Comment.Preproc, 'macro'),
+            # preprocessor directives: without whitespace
+            ('^#if\s+0', Comment.Preproc, 'if0'),
+            ('^#', Comment.Preproc, 'macro'),
+            # or with whitespace
+            ('^' + _ws + r'#if\s+0', Comment.Preproc, 'if0'),
+            ('^' + _ws + '#', Comment.Preproc, 'macro'),
             (r'\n', Text),
             (r'\s+', Text),
             (r'\\\n', Text), # line continuation
@@ -180,11 +191,11 @@
             (r'[{}]', Punctuation),
             (r'L?"', String, 'string'),
             (r"L?'(\\.|\\[0-7]{1,3}|\\x[a-fA-F0-9]{1,2}|[^\\\'\n])'", String.Char),
-            (r'(\d+\.\d*|\.\d+|\d+)[eE][+-]?\d+[lL]?', Number.Float),
+            (r'(\d+\.\d*|\.\d+|\d+)[eE][+-]?\d+[LlUu]*', Number.Float),
             (r'(\d+\.\d*|\.\d+|\d+[fF])[fF]?', Number.Float),
-            (r'0x[0-9a-fA-F]+[Ll]?', Number.Hex),
-            (r'0[0-7]+[Ll]?', Number.Oct),
-            (r'\d+[Ll]?', Number.Integer),
+            (r'0x[0-9a-fA-F]+[LlUu]*', Number.Hex),
+            (r'0[0-7]+[LlUu]*', Number.Oct),
+            (r'\d+[LlUu]*', Number.Integer),
             (r'\*/', Error),
             (r'[~!%^&*+=|?:<>/-]', Operator),
             (r'[()\[\],.;]', Punctuation),
@@ -204,6 +215,8 @@
              r'uuidof|unaligned|super|single_inheritance|raise|noop|'
              r'multiple_inheritance|m128i|m128d|m128|m64|interface|'
              r'identifier|forceinline|event|assume)\b', Keyword.Reserved),
+            # Offload C++ extensions, http://offload.codeplay.com/
+            (r'(__offload|__blockingoffload|__outer)\b', Keyword.Psuedo),
             (r'(true|false)\b', Keyword.Constant),
             (r'NULL\b', Name.Builtin),
             ('[a-zA-Z_][a-zA-Z0-9_]*:(?!:)', Name.Label),
@@ -1038,7 +1051,7 @@
 
     name = 'Dylan'
     aliases = ['dylan']
-    filenames = ['*.dylan']
+    filenames = ['*.dylan', '*.dyl']
     mimetypes = ['text/x-dylan']
 
     flags = re.DOTALL
@@ -1051,10 +1064,10 @@
              r'|open|primary|sealed|si(deways|ngleton)|slot'
              r'|v(ariable|irtual))\b', Name.Builtin),
             (r'<\w+>', Keyword.Type),
-            (r'#?"(?:\\.|[^"])+?"', String.Double),
             (r'//.*?\n', Comment.Single),
             (r'/\*[\w\W]*?\*/', Comment.Multiline),
-            (r'\'.*?\'', String.Single),
+            (r'"', String, 'string'),
+            (r"'(\\.|\\[0-7]{1,3}|\\x[a-fA-F0-9]{1,2}|[^\\\'\n])'", String.Char),
             (r'=>|\b(a(bove|fterwards)|b(e(gin|low)|y)|c(ase|leanup|reate)'
              r'|define|else(|if)|end|f(inally|or|rom)|i[fn]|l(et|ocal)|otherwise'
              r'|rename|s(elect|ignal)|t(hen|o)|u(n(less|til)|se)|wh(en|ile))\b',
@@ -1071,6 +1084,13 @@
             (r'#[a-zA-Z0-9-]+', Keyword),
             (r'[a-zA-Z0-9-]+', Name.Variable),
         ],
+        'string': [
+            (r'"', String, '#pop'),
+            (r'\\([\\abfnrtv"\']|x[a-fA-F0-9]{2,4}|[0-7]{1,3})', String.Escape),
+            (r'[^\\"\n]+', String), # all other characters
+            (r'\\\n', String), # line continuation
+            (r'\\', String), # stray backslash
+        ],
     }
 
 
@@ -1090,8 +1110,12 @@
 
     tokens = {
         'whitespace': [
-            (r'^(\s*)(#if\s+0)', bygroups(Text, Comment.Preproc), 'if0'),
-            (r'^(\s*)(#)', bygroups(Text, Comment.Preproc), 'macro'),
+            # preprocessor directives: without whitespace
+            ('^#if\s+0', Comment.Preproc, 'if0'),
+            ('^#', Comment.Preproc, 'macro'),
+            # or with whitespace
+            ('^' + _ws + r'#if\s+0', Comment.Preproc, 'if0'),
+            ('^' + _ws + '#', Comment.Preproc, 'macro'),
             (r'\n', Text),
             (r'\s+', Text),
             (r'\\\n', Text), # line continuation
@@ -1323,7 +1347,7 @@
         'root': [
             (r'^#.*', Comment.Preproc),
             (r'//.*', Comment.Single),
-            (r'/\*[\w\W]*\*/', Comment.Multiline),
+            (r'/(\\\n)?[*](.|\n)*?[*](\\\n)?/', Comment.Multiline),
             (r'\+|-|~|!=?|\*|/|%|<<|>>|<=?|>=?|==?|&&?|\^|\|\|?',
              Operator),
             (r'[?:]', Operator), # quick hack for ternary
@@ -1333,7 +1357,7 @@
             (r'[+-]?\d*\.\d+([eE][-+]?\d+)?', Number.Float),
             (r'[+-]?\d+\.\d*([eE][-+]?\d+)?', Number.Float),
             (r'0[xX][0-9a-fA-F]*', Number.Hex),
-            (r'0[0-7]*', Number.Octal),
+            (r'0[0-7]*', Number.Oct),
             (r'[1-9][0-9]*', Number.Integer),
             (r'\b(attribute|const|uniform|varying|centroid|break|continue|'
              r'do|for|while|if|else|in|out|inout|float|int|void|bool|true|'
@@ -1346,12 +1370,13 @@
              r'lowp|mediump|highp|precision|input|output|hvec[234]|'
              r'[df]vec[234]|sampler[23]DRect|sampler2DRectShadow|sizeof|'
              r'cast|namespace|using)\b', Keyword), #future use
-            (r'[a-zA-Z_][a-zA-Z_0-9]*', Name.Variable),
+            (r'[a-zA-Z_][a-zA-Z_0-9]*', Name),
             (r'\.', Punctuation),
             (r'\s+', Text),
         ],
     }
 
+
 class PrologLexer(RegexLexer):
     """
     Lexer for Prolog files.
@@ -1371,7 +1396,7 @@
             (r'[0-9]+', Number),
             (r'[\[\](){}|.,;!]', Punctuation),
             (r':-|-->', Punctuation),
-            (r'"(?:\\x[0-9a-fA-F]+\\|\\u[0-9a-fA-F]{4}|\U[0-9a-fA-F]{8}|'
+            (r'"(?:\\x[0-9a-fA-F]+\\|\\u[0-9a-fA-F]{4}|\\U[0-9a-fA-F]{8}|'
              r'\\[0-7]+\\|\\[\w\W]|[^"])*"', String.Double),
             (r"'(?:''|[^'])*'", String.Atom), # quoted atom
             # Needs to not be followed by an atom.
@@ -1707,7 +1732,7 @@
             (r'[:(){}\[\];,]', Punctuation),
 
             (r'0x[0-9a-fA-F]+', Number.Hex),
-            (r'0c[0-9]+', Number.Octal),
+            (r'0c[0-9]+', Number.Oct),
             (r'0b[01]+', Number.Binary),
             (r'[0-9_]\.[0-9_]*(?!\.)', Number.Float),
             (r'[0-9_]+', Number.Decimal),
@@ -2363,3 +2388,95 @@
                     token = Keyword.Pervasive
             # return result
             yield index, token, value
+
+
+class BlitzMaxLexer(RegexLexer):
+    """
+    For `BlitzMax <http://blitzbasic.com>`_ source code.
+
+    *New in Pygments 1.4.*
+    """
+
+    name = 'BlitzMax'
+    aliases = ['blitzmax', 'bmax']
+    filenames = ['*.bmx']
+    mimetypes = ['text/x-bmx']
+
+    bmax_vopwords = r'\b(Shl|Shr|Sar|Mod)\b'
+    bmax_sktypes = r'@{1,2}|[!#$%]'
+    bmax_lktypes = r'\b(Int|Byte|Short|Float|Double|Long)\b'
+    bmax_name = r'[a-z_][a-z0-9_]*'
+    bmax_var = r'(%s)(?:(?:([ \t]*)(%s)|([ \t]*:[ \t]*\b(?:Shl|Shr|Sar|Mod)\b)|([ \t]*)([:])([ \t]*)(?:%s|(%s)))(?:([ \t]*)(Ptr))?)' % (bmax_name, bmax_sktypes, bmax_lktypes, bmax_name)
+    bmax_func = bmax_var + r'?((?:[ \t]|\.\.\n)*)([(])'
+
+    flags = re.MULTILINE | re.IGNORECASE
+    tokens = {
+        'root': [
+            # Text
+            (r'[ \t]+', Text),
+            (r'\.\.\n', Text), # Line continuation
+            # Comments
+            (r"'.*?\n", Comment.Single),
+            (r'([ \t]*)\bRem\n(\n|.)*?\s*\bEnd([ \t]*)Rem', Comment.Multiline),
+            # Data types
+            ('"', String.Double, 'string'),
+            # Numbers
+            (r'[0-9]+\.[0-9]*(?!\.)', Number.Float),
+            (r'\.[0-9]*(?!\.)', Number.Float),
+            (r'[0-9]+', Number.Integer),
+            (r'\$[0-9a-f]+', Number.Hex),
+            (r'\%[10]+', Number), # Binary
+            # Other
+            (r'(?:(?:(:)?([ \t]*)(:?%s|([+\-*/&|~]))|Or|And|Not|[=<>^]))' %
+             (bmax_vopwords), Operator),
+            (r'[(),.:\[\]]', Punctuation),
+            (r'(?:#[\w \t]*)', Name.Label),
+            (r'(?:\?[\w \t]*)', Comment.Preproc),
+            # Identifiers
+            (r'\b(New)\b([ \t]?)([(]?)(%s)' % (bmax_name),
+             bygroups(Keyword.Reserved, Text, Punctuation, Name.Class)),
+            (r'\b(Import|Framework|Module)([ \t]+)(%s\.%s)' %
+             (bmax_name, bmax_name),
+             bygroups(Keyword.Reserved, Text, Keyword.Namespace)),
+            (bmax_func, bygroups(Name.Function, Text, Keyword.Type,
+                                 Operator, Text, Punctuation, Text,
+                                 Keyword.Type, Name.Class, Text,
+                                 Keyword.Type, Text, Punctuation)),
+            (bmax_var, bygroups(Name.Variable, Text, Keyword.Type, Operator,
+                                Text, Punctuation, Text, Keyword.Type,
+                                Name.Class, Text, Keyword.Type)),
+            (r'\b(Type|Extends)([ \t]+)(%s)' % (bmax_name),
+             bygroups(Keyword.Reserved, Text, Name.Class)),
+            # Keywords
+            (r'\b(Ptr)\b', Keyword.Type),
+            (r'\b(Pi|True|False|Null|Self|Super)\b', Keyword.Constant),
+            (r'\b(Local|Global|Const|Field)\b', Keyword.Declaration),
+            (r'\b(TNullMethodException|TNullFunctionException|'
+             r'TNullObjectException|TArrayBoundsException|'
+             r'TRuntimeException)\b', Name.Exception),
+            (r'\b(Strict|SuperStrict|Module|ModuleInfo|'
+             r'End|Return|Continue|Exit|Public|Private|'
+             r'Var|VarPtr|Chr|Len|Asc|SizeOf|Sgn|Abs|Min|Max|'
+             r'New|Release|Delete|'
+             r'Incbin|IncbinPtr|IncbinLen|'
+             r'Framework|Include|Import|Extern|EndExtern|'
+             r'Function|EndFunction|'
+             r'Type|EndType|Extends|'
+             r'Method|EndMethod|'
+             r'Abstract|Final|'
+             r'If|Then|Else|ElseIf|EndIf|'
+             r'For|To|Next|Step|EachIn|'
+             r'While|Wend|EndWhile|'
+             r'Repeat|Until|Forever|'
+             r'Select|Case|Default|EndSelect|'
+             r'Try|Catch|EndTry|Throw|Assert|'
+             r'Goto|DefData|ReadData|RestoreData)\b', Keyword.Reserved),
+            # Final resolve (for variable names and such)
+            (r'(%s)' % (bmax_name), Name.Variable),
+        ],
+        'string': [
+            (r'""', String.Double),
+            (r'"C?', String.Double, '#pop'),
+            (r'[^"]+', String.Double),
+        ],
+    }
--- a/MoinMoin/support/pygments/lexers/dotnet.py	Sun Dec 25 16:38:04 2011 +0100
+++ b/MoinMoin/support/pygments/lexers/dotnet.py	Sun Dec 25 16:43:04 2011 +0100
@@ -234,28 +234,30 @@
              r'(On|Off|Binary|Text)', Keyword.Declaration),
             (r'(?<!\.)(AddHandler|Alias|'
              r'ByRef|ByVal|Call|Case|Catch|CBool|CByte|CChar|CDate|'
-             r'CDec|CDbl|CInt|CLng|CObj|Const|Continue|CSByte|CShort|'
+             r'CDec|CDbl|CInt|CLng|CObj|Continue|CSByte|CShort|'
              r'CSng|CStr|CType|CUInt|CULng|CUShort|Declare|'
-             r'Default|Delegate|Dim|DirectCast|Do|Each|Else|ElseIf|'
-             r'End|EndIf|Enum|Erase|Error|Event|Exit|False|Finally|For|'
-             r'Friend|Function|Get|Global|GoSub|GoTo|Handles|If|'
-             r'Implements|Imports|Inherits|Interface|'
-             r'Let|Lib|Loop|Me|Module|MustInherit|'
-             r'MustOverride|MyBase|MyClass|Namespace|Narrowing|New|Next|'
+             r'Default|Delegate|DirectCast|Do|Each|Else|ElseIf|'
+             r'EndIf|Erase|Error|Event|Exit|False|Finally|For|'
+             r'Friend|Get|Global|GoSub|GoTo|Handles|If|'
+             r'Implements|Inherits|Interface|'
+             r'Let|Lib|Loop|Me|MustInherit|'
+             r'MustOverride|MyBase|MyClass|Narrowing|New|Next|'
              r'Not|Nothing|NotInheritable|NotOverridable|Of|On|'
              r'Operator|Option|Optional|Overloads|Overridable|'
-             r'Overrides|ParamArray|Partial|Private|Property|Protected|'
+             r'Overrides|ParamArray|Partial|Private|Protected|'
              r'Public|RaiseEvent|ReadOnly|ReDim|RemoveHandler|Resume|'
              r'Return|Select|Set|Shadows|Shared|Single|'
-             r'Static|Step|Stop|Structure|Sub|SyncLock|Then|'
+             r'Static|Step|Stop|SyncLock|Then|'
              r'Throw|To|True|Try|TryCast|Wend|'
              r'Using|When|While|Widening|With|WithEvents|'
              r'WriteOnly)\b', Keyword),
+            (r'(?<!\.)End\b', Keyword, 'end'),
+            (r'(?<!\.)(Dim|Const)\b', Keyword, 'dim'),
             (r'(?<!\.)(Function|Sub|Property)(\s+)',
              bygroups(Keyword, Text), 'funcname'),
             (r'(?<!\.)(Class|Structure|Enum)(\s+)',
              bygroups(Keyword, Text), 'classname'),
-            (r'(?<!\.)(Namespace|Imports)(\s+)',
+            (r'(?<!\.)(Module|Namespace|Imports)(\s+)',
              bygroups(Keyword, Text), 'namespace'),
             (r'(?<!\.)(Boolean|Byte|Char|Date|Decimal|Double|Integer|Long|'
              r'Object|SByte|Short|Single|String|Variant|UInteger|ULong|'
@@ -279,15 +281,25 @@
             (r'"C?', String, '#pop'),
             (r'[^"]+', String),
         ],
+        'dim': [
+            (r'[a-z_][a-z0-9_]*', Name.Variable, '#pop'),
+            (r'', Text, '#pop'),  # any other syntax
+        ],
         'funcname': [
-            (r'[a-z_][a-z0-9_]*', Name.Function, '#pop')
+            (r'[a-z_][a-z0-9_]*', Name.Function, '#pop'),
         ],
         'classname': [
-            (r'[a-z_][a-z0-9_]*', Name.Class, '#pop')
+            (r'[a-z_][a-z0-9_]*', Name.Class, '#pop'),
         ],
         'namespace': [
-            (r'[a-z_][a-z0-9_.]*', Name.Namespace, '#pop')
+            (r'[a-z_][a-z0-9_.]*', Name.Namespace, '#pop'),
         ],
+        'end': [
+            (r'\s+', Text),
+            (r'(Function|Sub|Property|Class|Structure|Enum|Module|Namespace)\b',
+             Keyword, '#pop'),
+            (r'', Text, '#pop'),
+        ]
     }
 
 class GenericAspxLexer(RegexLexer):
--- a/MoinMoin/support/pygments/lexers/functional.py	Sun Dec 25 16:38:04 2011 +0100
+++ b/MoinMoin/support/pygments/lexers/functional.py	Sun Dec 25 16:43:04 2011 +0100
@@ -381,6 +381,7 @@
         'import': [
             # Import statements
             (r'\s+', Text),
+            (r'"', String, 'string'),
             # after "funclist" state
             (r'\)', Punctuation, '#pop'),
             (r'qualified\b', Keyword),
@@ -550,7 +551,7 @@
 
     tokens = {
         'escape-sequence': [
-            (r'\\[\"\'ntbr]', String.Escape),
+            (r'\\[\\\"\'ntbr]', String.Escape),
             (r'\\[0-9]{3}', String.Escape),
             (r'\\x[0-9a-fA-F]{2}', String.Escape),
         ],
@@ -601,7 +602,7 @@
             (r'\.', Punctuation),
             (r'[A-Z][A-Za-z0-9_\']*(?=\s*\.)', Name.Namespace),
             (r'[A-Z][A-Za-z0-9_\']*', Name.Class, '#pop'),
-            (r'[a-z][a-z0-9_\']*', Name, '#pop'),
+            (r'[a-z_][A-Za-z0-9_\']*', Name, '#pop'),
         ],
     }
 
@@ -688,7 +689,7 @@
             (r'[+-]?'+base_re+r'#[0-9a-zA-Z]+', Number.Integer),
             (r'[+-]?\d+', Number.Integer),
             (r'[+-]?\d+.\d+', Number.Float),
-            (r'[][:_@\".{}()|;,]', Punctuation),
+            (r'[]\[:_@\".{}()|;,]', Punctuation),
             (variable_re, Name.Variable),
             (atom_re, Name),
             (r'\?'+macro_re, Name.Constant),
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/MoinMoin/support/pygments/lexers/hdl.py	Sun Dec 25 16:43:04 2011 +0100
@@ -0,0 +1,135 @@
+# -*- coding: utf-8 -*-
+"""
+    pygments.lexers.hdl
+    ~~~~~~~~~~~~~~~~~~~
+
+    Lexers for hardware descriptor languages.
+
+    :copyright: Copyright 2010 by the Pygments team, see AUTHORS.
+    :license: BSD, see LICENSE for details.
+"""
+
+import re
+
+from pygments.lexer import RegexLexer, include, bygroups
+from pygments.token import \
+     Text, Comment, Operator, Keyword, Name, String, Number, Punctuation, \
+     Error
+
+__all__ = ['VerilogLexer']
+
+
+class VerilogLexer(RegexLexer):
+    """
+    For verilog source code with preprocessor directives.
+
+    *New in Pygments 1.4.*
+    """
+    name = 'verilog'
+    aliases = ['v']
+    filenames = ['*.v', '*.sv']
+    mimetypes = ['text/x-verilog']
+
+    #: optional Comment or Whitespace
+    _ws = r'(?:\s|//.*?\n|/[*].*?[*]/)+'
+
+    tokens = {
+        'root': [
+            (r'^\s*`define', Comment.Preproc, 'macro'),
+            (r'\n', Text),
+            (r'\s+', Text),
+            (r'\\\n', Text), # line continuation
+            (r'/(\\\n)?/(\n|(.|\n)*?[^\\]\n)', Comment.Single),
+            (r'/(\\\n)?[*](.|\n)*?[*](\\\n)?/', Comment.Multiline),
+            (r'[{}#@]', Punctuation),
+            (r'L?"', String, 'string'),
+            (r"L?'(\\.|\\[0-7]{1,3}|\\x[a-fA-F0-9]{1,2}|[^\\\'\n])'", String.Char),
+            (r'(\d+\.\d*|\.\d+|\d+)[eE][+-]?\d+[lL]?', Number.Float),
+            (r'(\d+\.\d*|\.\d+|\d+[fF])[fF]?', Number.Float),
+            (r'([0-9]+)|(\'h)[0-9a-fA-F]+', Number.Hex),
+            (r'([0-9]+)|(\'b)[0-1]+', Number.Hex),   # should be binary
+            (r'([0-9]+)|(\'d)[0-9]+', Number.Integer),
+            (r'([0-9]+)|(\'o)[0-7]+', Number.Oct),
+            (r'\'[01xz]', Number),
+            (r'\d+[Ll]?', Number.Integer),
+            (r'\*/', Error),
+            (r'[~!%^&*+=|?:<>/-]', Operator),
+            (r'[()\[\],.;\']', Punctuation),
+            (r'`[a-zA-Z_][a-zA-Z0-9_]*', Name.Constant),
+
+            (r'^\s*(package)(\s+)', bygroups(Keyword.Namespace, Text)),
+            (r'^\s*(import)(\s+)', bygroups(Keyword.Namespace, Text), 'import'),
+
+            (r'(always|always_comb|always_ff|always_latch|and|assign|automatic|'
+             r'begin|break|buf|bufif0|bufif1|case|casex|casez|cmos|const|'
+             r'continue|deassign|default|defparam|disable|do|edge|else|end|endcase|'
+             r'endfunction|endgenerate|endmodule|endpackage|endprimitive|endspecify|'
+             r'endtable|endtask|enum|event|final|for|force|forever|fork|function|'
+             r'generate|genvar|highz0|highz1|if|initial|inout|input|'
+             r'integer|join|large|localparam|macromodule|medium|module|'
+             r'nand|negedge|nmos|nor|not|notif0|notif1|or|output|packed|'
+             r'parameter|pmos|posedge|primitive|pull0|pull1|pulldown|pullup|rcmos|'
+             r'ref|release|repeat|return|rnmos|rpmos|rtran|rtranif0|'
+             r'rtranif1|scalared|signed|small|specify|specparam|strength|'
+             r'string|strong0|strong1|struct|table|task|'
+             r'tran|tranif0|tranif1|type|typedef|'
+             r'unsigned|var|vectored|void|wait|weak0|weak1|while|'
+             r'xnor|xor)\b', Keyword),
+
+            (r'(`accelerate|`autoexpand_vectornets|`celldefine|`default_nettype|'
+             r'`else|`elsif|`endcelldefine|`endif|`endprotect|`endprotected|'
+             r'`expand_vectornets|`ifdef|`ifndef|`include|`noaccelerate|`noexpand_vectornets|'
+             r'`noremove_gatenames|`noremove_netnames|`nounconnected_drive|'
+             r'`protect|`protected|`remove_gatenames|`remove_netnames|`resetall|'
+             r'`timescale|`unconnected_drive|`undef)\b', Comment.Preproc),
+
+            (r'(\$bits|\$bitstoreal|\$bitstoshortreal|\$countdrivers|\$display|\$fclose|'
+             r'\$fdisplay|\$finish|\$floor|\$fmonitor|\$fopen|\$fstrobe|\$fwrite|'
+             r'\$getpattern|\$history|\$incsave|\$input|\$itor|\$key|\$list|\$log|'
+             r'\$monitor|\$monitoroff|\$monitoron|\$nokey|\$nolog|\$printtimescale|'
+             r'\$random|\$readmemb|\$readmemh|\$realtime|\$realtobits|\$reset|\$reset_count|'
+             r'\$reset_value|\$restart|\$rtoi|\$save|\$scale|\$scope|\$shortrealtobits|'
+             r'\$showscopes|\$showvariables|\$showvars|\$sreadmemb|\$sreadmemh|'
+             r'\$stime|\$stop|\$strobe|\$time|\$timeformat|\$write)\b', Name.Builtin),
+
+            (r'(class)(\s+)', bygroups(Keyword, Text), 'classname'),
+            (r'(byte|shortint|int|longint|interger|time|'
+             r'bit|logic|reg|'
+             r'supply0|supply1|tri|triand|trior|tri0|tri1|trireg|uwire|wire|wand|wor'
+             r'shortreal|real|realtime)\b', Keyword.Type),
+            ('[a-zA-Z_][a-zA-Z0-9_]*:(?!:)', Name.Label),
+            ('[a-zA-Z_][a-zA-Z0-9_]*', Name),
+        ],
+        'classname': [
+            (r'[a-zA-Z_][a-zA-Z0-9_]*', Name.Class, '#pop'),
+        ],
+        'string': [
+            (r'"', String, '#pop'),
+            (r'\\([\\abfnrtv"\']|x[a-fA-F0-9]{2,4}|[0-7]{1,3})', String.Escape),
+            (r'[^\\"\n]+', String), # all other characters
+            (r'\\\n', String), # line continuation
+            (r'\\', String), # stray backslash
+        ],
+        'macro': [
+            (r'[^/\n]+', Comment.Preproc),
+            (r'/[*](.|\n)*?[*]/', Comment.Multiline),
+            (r'//.*?\n', Comment.Single, '#pop'),
+            (r'/', Comment.Preproc),
+            (r'(?<=\\)\n', Comment.Preproc),
+            (r'\n', Comment.Preproc, '#pop'),
+        ],
+        'import': [
+            (r'[a-zA-Z0-9_:]+\*?', Name.Namespace, '#pop')
+        ]
+    }
+
+    def get_tokens_unprocessed(self, text):
+        for index, token, value in \
+            RegexLexer.get_tokens_unprocessed(self, text):
+            # Convention: mark all upper case names as constants
+            if token is Name:
+                if value.isupper():
+                    token = Name.Constant
+            yield index, token, value
+
+
--- a/MoinMoin/support/pygments/lexers/math.py	Sun Dec 25 16:38:04 2011 +0100
+++ b/MoinMoin/support/pygments/lexers/math.py	Sun Dec 25 16:43:04 2011 +0100
@@ -153,10 +153,10 @@
             (r'%.*$', Comment),
             (r'^\s*function', Keyword, 'deffunc'),
 
-            # from 'iskeyword' on version 7.4.0.336 (R2007a):
-            (r'(break|case|catch|classdef|continue|else|elseif|end|for|function|'
-             r'global|if|otherwise|parfor|persistent|return|switch|try|while)\b',
-             Keyword),
+            # from 'iskeyword' on version 7.11 (R2010):
+            (r'(break|case|catch|classdef|continue|else|elseif|end|enumerated|'
+             r'events|for|function|global|if|methods|otherwise|parfor|'
+             r'persistent|properties|return|spmd|switch|try|while)\b', Keyword),
 
             ("(" + "|".join(elfun+specfun+elmat) + r')\b',  Name.Builtin),
 
@@ -228,7 +228,7 @@
                 # without is showing error on same line as before...?
                 line = "\n" + line
                 token = (0, Generic.Traceback, line)
-                insertions.append(  (idx, [token,]) )
+                insertions.append((idx, [token]))
 
             else:
                 if curcode:
--- a/MoinMoin/support/pygments/lexers/other.py	Sun Dec 25 16:38:04 2011 +0100
+++ b/MoinMoin/support/pygments/lexers/other.py	Sun Dec 25 16:43:04 2011 +0100
@@ -13,7 +13,7 @@
 
 from pygments.lexer import Lexer, RegexLexer, include, bygroups, using, \
      this, do_insertions
-from pygments.token import Error, Punctuation, \
+from pygments.token import Error, Punctuation, Literal, Token, \
      Text, Comment, Operator, Keyword, Name, String, Number, Generic
 from pygments.util import shebang_matches
 from pygments.lexers.web import HtmlLexer
@@ -24,7 +24,9 @@
            'MOOCodeLexer', 'SmalltalkLexer', 'TcshLexer', 'LogtalkLexer',
            'GnuplotLexer', 'PovrayLexer', 'AppleScriptLexer',
            'BashSessionLexer', 'ModelicaLexer', 'RebolLexer', 'ABAPLexer',
-           'NewspeakLexer', 'GherkinLexer', 'AsymptoteLexer']
+           'NewspeakLexer', 'GherkinLexer', 'AsymptoteLexer',
+           'PostScriptLexer', 'AutohotkeyLexer', 'GoodDataCLLexer',
+           'MaqlLexer', 'ProtoBufLexer', 'HybrisLexer']
 
 line_re  = re.compile('.*?\n')
 
@@ -355,7 +357,7 @@
             (r'\\[\w\W]', String.Escape),
             (r'(\b\w+)(\s*)(=)', bygroups(Name.Variable, Text, Operator)),
             (r'[\[\]{}()=]', Operator),
-            (r'<<\s*(\'?)\\?(\w+)[\w\W]+?\2', String),
+            (r'<<-?\s*(\'?)\\?(\w+)[\w\W]+?\2', String),
             (r'&&|\|\|', Operator),
         ],
         'data': [
@@ -2095,7 +2097,7 @@
 
 class GherkinLexer(RegexLexer):
     """
-    For `Gherkin <http://cukes.info/>` syntax.
+    For `Gherkin <http://github.com/aslakhellesoy/gherkin/>` syntax.
 
     *New in Pygments 1.2.*
     """
@@ -2104,82 +2106,99 @@
     filenames = ['*.feature']
     mimetypes = ['text/x-gherkin']
 
-    feature_keywords_regexp  = ur'^(기능|機能|功能|フィーチャ|خاصية|תכונה|Функционалност|Функционал|Особина|Могућност|Özellik|Właściwość|Tính năng|Savybė|Požiadavka|Požadavek|Osobina|Ominaisuus|Omadus|OH HAI|Mogućnost|Mogucnost|Jellemző|Fīča|Funzionalità|Funktionalität|Funkcionalnost|Funkcionalitāte|Funcționalitate|Functionaliteit|Functionalitate|Funcionalidade|Fonctionnalité|Fitur|Feature|Egenskap|Egenskab|Crikey|Característica|Arwedd)(:)(.*)$'
-    scenario_keywords_regexp = ur'^(\s*)(시나리오 개요|시나리오|배경|背景|場景大綱|場景|场景大纲|场景|劇本大綱|劇本|テンプレ|シナリオテンプレート|シナリオテンプレ|シナリオアウトライン|シナリオ|سيناريو مخطط|سيناريو|الخلفية|תרחיש|תבנית תרחיש|רקע|Тарих|Сценарио|Сценарий структураси|Сценарий|Структура сценарија|Структура сценария|Скица|Рамка на сценарий|Пример|Предыстория|Предистория|Позадина|Основа|Концепт|Контекст|Założenia|Tình huống|Tausta|Taust|Tapausaihio|Tapaus|Szenariogrundriss|Szenario|Szablon scenariusza|Stsenaarium|Struktura scenarija|Skica|Skenario konsep|Skenario|Situācija|Senaryo taslağı|Senaryo|Scénář|Scénario|Schema dello scenario|Scenārijs pēc parauga|Scenārijs|Scenár|Scenariusz|Scenariul de şablon|Scenariul de sablon|Scenariu|Scenario Outline|Scenario Amlinellol|Scenario|Scenarijus|Scenarijaus šablonas|Scenarij|Scenarie|Rerefons|Raamstsenaarium|Primer|Pozadí|Pozadina|Pozadie|Plan du scénario|Plan du Scénario|Osnova scénáře|Osnova|Náčrt Scénáře|Náčrt Scenáru|Mate|MISHUN SRSLY|MISHUN|Kịch bản|Kontext|Konteksts|Kontekstas|Kontekst|Koncept|Khung tình huống|Khung kịch bản|Háttér|Grundlage|Geçmiş|Forgatókönyv vázlat|Forgatókönyv|Esquema do Cenário|Esquema do Cenario|Esquema del escenario|Esquema de l\'escenari|Escenario|Escenari|Dasar|Contexto|Contexte|Contesto|Condiţii|Conditii|Cenário|Cenario|Cefndir|Bối cảnh|Blokes|Bakgrunn|Bakgrund|Baggrund|Background|B4|Antecedents|Antecedentes|All y\'all|Achtergrond|Abstrakt Scenario|Abstract Scenario)(:)(.*)$'
-    examples_regexp          = ur'^(\s*)(예|例子|例|サンプル|امثلة|דוגמאות|Сценарији|Примери|Мисоллар|Значения|Örnekler|Voorbeelden|Variantai|Tapaukset|Scenarios|Scenariji|Scenarijai|Příklady|Példák|Príklady|Przykłady|Primjeri|Primeri|Piemēri|Pavyzdžiai|Paraugs|Juhtumid|Exemplos|Exemples|Exemplele|Exempel|Examples|Esempi|Enghreifftiau|Eksempler|Ejemplos|EXAMPLZ|Dữ liệu|Contoh|Cobber|Beispiele)(:)(.*)$'
-    step_keywords_regexp     = ur'^(\s*)(하지만|조건|만일|그리고|그러면|那麼|那么|而且|當|当|前提|假設|假如|但是|但し|並且|もし|ならば|ただし|しかし|かつ|و |متى |لكن |عندما |ثم |بفرض |اذاً |כאשר |וגם |בהינתן |אזי |אז |אבל |Унда |То |Онда |Но |Лекин |Когато |Када |Кад |К тому же |И |Задато |Задати |Задате |Если |Допустим |Дадено |Ва |Бирок |Аммо |Али |Агар |А |Și |És |anrhegedig a |Zatati |Zakładając |Zadato |Zadate |Zadano |Zadani |Zadan |Yna |Ya know how |Ya gotta |Y |Wtedy |When y\'all |When |Wenn |WEN |Và |Ve |Und |Un |Thì |Then y\'all |Then |Tapi |Tak |Tada |Tad |Så |Stel |Soit |Siis |Si |Quando |Quand |Quan |Pryd |Pokud |Pokiaľ |Però |Pero |Pak |Oraz |Onda |Ond |Oletetaan |Og |Och |O zaman |Når |När |Niin |Nhưng |N |Mutta |Men |Mas |Maka |Majd |Mais |Maar |Ma |Lorsque |Lorsqu\'|Kun |Kuid |Kui |Khi |Keď |Ketika |Když |Kai |Kada |Kad |Jeżeli |Ja |Ir |I CAN HAZ |I |Ha |Givet |Given y\'all |Given |Gitt |Gegeven |Gegeben sei |Fakat |Eğer ki |Etant donné |Et |Então |Entonces |Entao |En |Eeldades |E |Duota |Donat |Donada |Diyelim ki |Dengan |De |Dato |Dar |Dann |Dan |Dado |Dacă |Daca |DEN |Când |Cuando |Cho |Cept |Cand |But y\'all |But |Biết |Bet |BUT |Atunci |And y\'all |And |Ama |Als |Alors |Allora |Ali |Aleshores |Ale |Akkor |Aber |AN |A také |A |\* )'
+    feature_keywords         = ur'^(기능|機能|功能|フィーチャ|خاصية|תכונה|Функціонал|Функционалност|Функционал|Фича|Особина|Могућност|Özellik|Właściwość|Tính năng|Trajto|Savybė|Požiadavka|Požadavek|Osobina|Ominaisuus|Omadus|OH HAI|Mogućnost|Mogucnost|Jellemző|Fīča|Funzionalità|Funktionalität|Funkcionalnost|Funkcionalitāte|Funcționalitate|Functionaliteit|Functionalitate|Funcionalitat|Funcionalidade|Fonctionnalité|Fitur|Feature|Egenskap|Egenskab|Crikey|Característica|Arwedd)(:)(.*)$'
+    feature_element_keywords = ur'^(\s*)(시나리오 개요|시나리오|배경|背景|場景大綱|場景|场景大纲|场景|劇本大綱|劇本|テンプレ|シナリオテンプレート|シナリオテンプレ|シナリオアウトライン|シナリオ|سيناريو مخطط|سيناريو|الخلفية|תרחיש|תבנית תרחיש|רקע|Тарих|Сценарій|Сценарио|Сценарий структураси|Сценарий|Структура сценарію|Структура сценарија|Структура сценария|Скица|Рамка на сценарий|Пример|Предыстория|Предистория|Позадина|Передумова|Основа|Концепт|Контекст|Założenia|Wharrimean is|Tình huống|The thing of it is|Tausta|Taust|Tapausaihio|Tapaus|Szenariogrundriss|Szenario|Szablon scenariusza|Stsenaarium|Struktura scenarija|Skica|Skenario konsep|Skenario|Situācija|Senaryo taslağı|Senaryo|Scénář|Scénario|Schema dello scenario|Scenārijs pēc parauga|Scenārijs|Scenár|Scenaro|Scenariusz|Scenariul de şablon|Scenariul de sablon|Scenariu|Scenario Outline|Scenario Amlinellol|Scenario|Scenarijus|Scenarijaus šablonas|Scenarij|Scenarie|Rerefons|Raamstsenaarium|Primer|Pozadí|Pozadina|Pozadie|Plan du scénario|Plan du Scénario|Osnova scénáře|Osnova|Náčrt Scénáře|Náčrt Scenáru|Mate|MISHUN SRSLY|MISHUN|Kịch bản|Konturo de la scenaro|Kontext|Konteksts|Kontekstas|Kontekst|Koncept|Khung tình huống|Khung kịch bản|Háttér|Grundlage|Geçmiş|Forgatókönyv vázlat|Forgatókönyv|Fono|Esquema do Cenário|Esquema do Cenario|Esquema del escenario|Esquema de l\'escenari|Escenario|Escenari|Dis is what went down|Dasar|Contexto|Contexte|Contesto|Condiţii|Conditii|Cenário|Cenario|Cefndir|Bối cảnh|Blokes|Bakgrunn|Bakgrund|Baggrund|Background|B4|Antecedents|Antecedentes|All y\'all|Achtergrond|Abstrakt Scenario|Abstract Scenario)(:)(.*)$'
+    examples_keywords        = ur'^(\s*)(예|例子|例|サンプル|امثلة|דוגמאות|Сценарији|Примери|Приклади|Мисоллар|Значения|Örnekler|Voorbeelden|Variantai|Tapaukset|Scenarios|Scenariji|Scenarijai|Příklady|Példák|Príklady|Przykłady|Primjeri|Primeri|Piemēri|Pavyzdžiai|Paraugs|Juhtumid|Exemplos|Exemples|Exemplele|Exempel|Examples|Esempi|Enghreifftiau|Ekzemploj|Eksempler|Ejemplos|EXAMPLZ|Dữ liệu|Contoh|Cobber|Beispiele)(:)(.*)$'
+    step_keywords            = ur'^(\s*)(하지만|조건|먼저|만일|만약|단|그리고|그러면|那麼|那么|而且|當|当|前提|假設|假如|但是|但し|並且|もし|ならば|ただし|しかし|かつ|و |متى |لكن |عندما |ثم |بفرض |اذاً |כאשר |וגם |בהינתן |אזי |אז |אבל |Якщо |Унда |То |Припустимо, що |Припустимо |Онда |Но |Нехай |Лекин |Когато |Када |Кад |К тому же |И |Задато |Задати |Задате |Если |Допустим |Дадено |Ва |Бирок |Аммо |Али |Але |Агар |А |І |Și |És |Zatati |Zakładając |Zadato |Zadate |Zadano |Zadani |Zadan |Youse know when youse got |Youse know like when |Yna |Ya know how |Ya gotta |Y |Wun |Wtedy |When y\'all |When |Wenn |WEN |Và |Ve |Und |Un |Thì |Then y\'all |Then |Tapi |Tak |Tada |Tad |Så |Stel |Soit |Siis |Si |Sed |Se |Quando |Quand |Quan |Pryd |Pokud |Pokiaľ |Però |Pero |Pak |Oraz |Onda |Ond |Oletetaan |Og |Och |O zaman |Når |När |Niin |Nhưng |N |Mutta |Men |Mas |Maka |Majd |Mais |Maar |Ma |Lorsque |Lorsqu\'|Kun |Kuid |Kui |Khi |Keď |Ketika |Když |Kaj |Kai |Kada |Kad |Jeżeli |Ja |Ir |I CAN HAZ |I |Ha |Givun |Givet |Given y\'all |Given |Gitt |Gegeven |Gegeben sei |Fakat |Eğer ki |Etant donné |Et |Então |Entonces |Entao |En |Eeldades |E |Duota |Dun |Donitaĵo |Donat |Donada |Do |Diyelim ki |Dengan |Den youse gotta |De |Dato |Dar |Dann |Dan |Dado |Dacă |Daca |DEN |Când |Cuando |Cho |Cept |Cand |Cal |But y\'all |But |Buh |Biết |Bet |BUT |Atès |Atunci |Atesa |Anrhegedig a |Angenommen |And y\'all |And |An |Ama |Als |Alors |Allora |Ali |Aleshores |Ale |Akkor |Aber |AN |A také |A |\* )'
 
     tokens = {
         'comments': [
-            (r'#.*$', Comment)
+            (r'#.*$', Comment),
           ],
-        'multiline_descriptions' : [
-            (step_keywords_regexp, Keyword, "#pop"),
-            include('comments'),
-            (r"(\s|.)", Name.Constant)
-          ],
-        'multiline_descriptions_on_stack' : [
-            (step_keywords_regexp, Keyword, "#pop:2"),
+        'feature_elements' : [
+            (step_keywords, Keyword, "step_content_stack"),
             include('comments'),
-            (r"(\s|.)", Name.Constant)
+            (r"(\s|.)", Name.Function),
           ],
-        'scenario_table_description': [
-            (r"\s+\|", Text, 'scenario_table_header'),
+        'feature_elements_on_stack' : [
+            (step_keywords, Keyword, "#pop:2"),
             include('comments'),
-            (r"(\s|.)", Name.Constant)
+            (r"(\s|.)", Name.Function),
           ],
-        'scenario_table_header': [
-            (r"\s+\|\s*$", Text, "#pop:2"),
-            (r"(\s+\|\s*)(#.*)$", bygroups(Text, Comment), "#pop:2"),
+        'examples_table': [
+            (r"\s+\|", Keyword, 'examples_table_header'),
             include('comments'),
-            (r"\s+\|", Text),
-            (r"[^\|]", Name.Variable)
+            (r"(\s|.)", Name.Function),
+          ],
+        'examples_table_header': [
+            (r"\s+\|\s*$", Keyword, "#pop:2"),
+            include('comments'),
+            (r"\s*\|", Keyword),
+            (r"[^\|]", Name.Variable),
           ],
         'scenario_sections_on_stack': [
-            (scenario_keywords_regexp,
-             bygroups(Text, Name.Class, Name.Class, Name.Constant),
-             "multiline_descriptions_on_stack")
-            ],
+            (feature_element_keywords, bygroups(Name.Function, Keyword, Keyword, Name.Function), "feature_elements_on_stack"),
+          ],
         'narrative': [
             include('scenario_sections_on_stack'),
-            (r"(\s|.)", Name.Builtin)
+            (r"(\s|.)", Name.Function),
           ],
         'table_vars': [
-            (r'(<[^>]*>)', bygroups(Name.Variable))
+            (r'(<[^>]+>)', Name.Variable),
+          ],
+        'numbers': [
+            (r'(\d+\.?\d*|\d*\.\d+)([eE][+-]?[0-9]+)?', String),
           ],
         'string': [
             include('table_vars'),
             (r'(\s|.)', String),
           ],
         'py_string': [
-            (r'"""', String, "#pop"),
+            (r'"""', Keyword, "#pop"),
+            include('string'),
+          ],
+          'step_content_root':[
+            (r"$", Keyword, "#pop"),
+            include('step_content'),
+          ],
+          'step_content_stack':[
+            (r"$", Keyword, "#pop:2"),
+            include('step_content'),
+          ],
+          'step_content':[
+            (r'"', Name.Function, "double_string"),
+            include('table_vars'),
+            include('numbers'),
+            include('comments'),
+            (r'(\s|.)', Name.Function),
+          ],
+          'table_content': [
+            (r"\s+\|\s*$", Keyword, "#pop"),
+            include('comments'),
+            (r"\s*\|", Keyword),
             include('string'),
           ],
         'double_string': [
-            (r'"', String, "#pop"),
+            (r'"', Name.Function, "#pop"),
             include('string'),
           ],
         'root': [
-            (r'\n', Text),
+            (r'\n', Name.Function),
             include('comments'),
-            (r'"""', String, "py_string"),
-            (r'"', String, "double_string"),
+            (r'"""', Keyword, "py_string"),
+            (r'\s+\|', Keyword, 'table_content'),
+            (r'"', Name.Function, "double_string"),
             include('table_vars'),
-            (r'@[^@\s]+', Name.Namespace),
-            (step_keywords_regexp, bygroups(Text, Keyword)),
-            (feature_keywords_regexp,
-             bygroups(Name.Class, Name.Class, Name.Constant), 'narrative'),
-            (scenario_keywords_regexp,
-             bygroups(Text, Name.Class, Name.Class, Name.Constant),
-             "multiline_descriptions"),
-            (examples_regexp,
-             bygroups(Text, Name.Class, Name.Class, Name.Constant),
-             "scenario_table_description"),
-            (r'(\s|.)', Text)
+            include('numbers'),
+            (r'(\s*)(@[^@\r\n\t ]+)', bygroups(Name.Function, Name.Tag)),
+            (step_keywords, bygroups(Name.Function, Keyword), "step_content_root"),
+            (feature_keywords, bygroups(Keyword, Keyword, Name.Function), 'narrative'),
+            (feature_element_keywords, bygroups(Name.Function, Keyword, Keyword, Name.Function), "feature_elements"),
+            (examples_keywords, bygroups(Name.Function, Keyword, Keyword, Name.Function), "examples_table"),
+            (r'(\s|.)', Name.Function),
         ]
     }
 
-
 class AsymptoteLexer(RegexLexer):
     """
     For `Asymptote <http://asymptote.sf.net/>`_ source code.
@@ -2295,3 +2314,530 @@
            elif token is Name and value in ASYVARNAME:
                token = Name.Variable
            yield index, token, value
+
+
+class PostScriptLexer(RegexLexer):
+    """
+    Lexer for PostScript files.
+
+    The PostScript Language Reference published by Adobe at
+    <http://partners.adobe.com/public/developer/en/ps/PLRM.pdf>
+    is the authority for this.
+
+    *New in Pygments 1.4.*
+    """
+    name = 'PostScript'
+    aliases = ['postscript']
+    filenames = ['*.ps', '*.eps']
+    mimetypes = ['application/postscript']
+
+    delimiter = r'\(\)\<\>\[\]\{\}\/\%\s'
+    delimiter_end = r'(?=[%s])' % delimiter
+
+    valid_name_chars = r'[^%s]' % delimiter
+    valid_name = r"%s+%s" % (valid_name_chars, delimiter_end)
+
+    tokens = {
+        'root': [
+            # All comment types
+            (r'^%!.+\n', Comment.Preproc),
+            (r'%%.*\n', Comment.Special),
+            (r'(^%.*\n){2,}', Comment.Multiline),
+            (r'%.*\n', Comment.Single),
+
+            # String literals are awkward; enter separate state.
+            (r'\(', String, 'stringliteral'),
+
+            (r'[\{\}(\<\<)(\>\>)\[\]]', Punctuation),
+
+            # Numbers
+            (r'<[0-9A-Fa-f]+>' + delimiter_end, Number.Hex),
+            # Slight abuse: use Oct to signify any explicit base system
+            (r'[0-9]+\#(\-|\+)?([0-9]+\.?|[0-9]*\.[0-9]+|[0-9]+\.[0-9]*)'
+             r'((e|E)[0-9]+)?' + delimiter_end, Number.Oct),
+            (r'(\-|\+)?([0-9]+\.?|[0-9]*\.[0-9]+|[0-9]+\.[0-9]*)((e|E)[0-9]+)?'
+             + delimiter_end, Number.Float),
+            (r'(\-|\+)?[0-9]+' + delimiter_end, Number.Integer),
+
+            # References
+            (r'\/%s' % valid_name, Name.Variable),
+
+            # Names
+            (valid_name, Name.Function),      # Anything else is executed
+
+            # These keywords taken from
+            # <http://www.math.ubc.ca/~cass/graphics/manual/pdf/a1.pdf>
+            # Is there an authoritative list anywhere that doesn't involve
+            # trawling documentation?
+
+            (r'(false|true)' + delimiter_end, Keyword.Constant),
+
+            # Conditionals / flow control
+            (r'(eq|ne|ge|gt|le|lt|and|or|not|if|ifelse|for|forall)'
+             + delimiter_end, Keyword.Reserved),
+
+            ('(abs|add|aload|arc|arcn|array|atan|begin|bind|ceiling|charpath|'
+             'clip|closepath|concat|concatmatrix|copy|cos|currentlinewidth|'
+             'currentmatrix|currentpoint|curveto|cvi|cvs|def|defaultmatrix|'
+             'dict|dictstackoverflow|div|dtransform|dup|end|exch|exec|exit|exp|'
+             'fill|findfont|floor|get|getinterval|grestore|gsave|gt|'
+             'identmatrix|idiv|idtransform|index|invertmatrix|itransform|'
+             'length|lineto|ln|load|log|loop|matrix|mod|moveto|mul|neg|newpath|'
+             'pathforall|pathbbox|pop|print|pstack|put|quit|rand|rangecheck|'
+             'rcurveto|repeat|restore|rlineto|rmoveto|roll|rotate|round|run|'
+             'save|scale|scalefont|setdash|setfont|setgray|setlinecap|'
+             'setlinejoin|setlinewidth|setmatrix|setrgbcolor|shfill|show|'
+             'showpage|sin|sqrt|stack|stringwidth|stroke|strokepath|sub|'
+             'syntaxerror|transform|translate|truncate|typecheck|undefined|'
+             'undefinedfilename|undefinedresult)' + delimiter_end,
+             Name.Builtin),
+
+            (r'\s+', Text),
+        ],
+
+        'stringliteral': [
+            (r'[^\(\)\\]+', String),
+            (r'\\', String.Escape, 'escape'),
+            (r'\(', String, '#push'),
+            (r'\)', String, '#pop'),
+        ],
+
+        'escape': [
+            (r'([0-8]{3}|n|r|t|b|f|\\|\(|\)|)', String.Escape, '#pop'),
+        ],
+    }
+
+
+class AutohotkeyLexer(RegexLexer):
+    """
+    For `autohotkey <http://www.autohotkey.com/>`_ source code.
+
+    *New in Pygments 1.4.*
+    """
+    name = 'autohotkey'
+    aliases = ['ahk']
+    filenames = ['*.ahk', '*.ahkl']
+    mimetypes = ['text/x-autohotkey']
+
+    flags = re.IGNORECASE | re.DOTALL | re.MULTILINE
+
+    tokens = {
+        'root': [
+            include('whitespace'),
+            (r'^\(', String, 'continuation'),
+            include('comments'),
+            (r'(^\s*)(\w+)(\s*)(=)',
+             bygroups(Text.Whitespace, Name, Text.Whitespace, Operator),
+             'command'),
+            (r'([\w#@$?\[\]]+)(\s*)(\()',
+             bygroups(Name.Function, Text.Whitespace, Punctuation),
+             'parameters'),
+            include('directives'),
+            include('labels'),
+            include('commands'),
+            include('expressions'),
+            include('numbers'),
+            include('literals'),
+            include('keynames'),
+            include('keywords'),
+        ],
+        'command': [
+            include('comments'),
+            include('whitespace'),
+            (r'^\(', String, 'continuation'),
+            (r'[^\n]*?(?=;*|$)', String, '#pop'),
+            include('numbers'),
+            include('literals'),
+        ],
+
+        'expressions': [
+            include('comments'),
+            include('whitespace'),
+            include('numbers'),
+            include('literals'),
+            (r'([]\w#@$?[]+)(\s*)(\()',
+             bygroups(Name.Function, Text.Whitespace, Punctuation),
+             'parameters'),
+            (r'A_\w+', Name.Builtin),
+            (r'%[]\w#@$?[]+?%', Name.Variable),
+            # blocks: if, else, function definitions
+            (r'{', Punctuation, 'block'),
+            # parameters in function calls
+            ],
+        'literals': [
+            (r'"', String, 'string'),
+            (r'A_\w+', Name.Builtin),
+            (r'%[]\w#@$?[]+?%', Name.Variable),
+            (r'[-~!%^&*+|?:<>/=]=?', Operator, 'expressions'),
+            (r'==', Operator, 'expressions'),
+            ('[{()},.%#`;]', Punctuation),
+            (r'\\', Punctuation),
+            include('keywords'),
+            (r'\w+', Text),
+            ],
+        'string': [
+            (r'"', String, '#pop'),
+            (r'""|`.', String.Escape),
+            (r'[^\`"\n]+', String), # all other characters
+        ],
+        'block': [
+            include('root'),
+            ('{', Punctuation, '#push'),
+            ('}', Punctuation, '#pop'),
+        ],
+        'parameters': [
+            (r'\)', Punctuation, '#pop'),
+            (r'\(', Punctuation, '#push'),
+            include('numbers'),
+            include('literals'),
+            include('whitespace'),
+        ],
+        'keywords': [
+            (r'(static|global|local)\b', Keyword.Type),
+            (r'(if|else|and|or)\b', Keyword.Reserved),
+            ],
+        'directives': [
+            (r'#\w+?\s', Keyword),
+            ],
+        'labels': [
+            # hotkeys and labels
+            # technically, hotkey names are limited to named keys and buttons
+            (r'(^\s*)([^:\s]+?:{1,2})', bygroups(Text.Whitespace, Name.Label)),
+             # hotstrings
+            (r'(^\s*)(::[]\w#@$?[]+?::)', bygroups(Text.Whitespace, Name.Label)),
+            ],
+        'comments': [
+            (r'^;+.*?$', Comment.Single),  # beginning of line comments
+            (r'(?<=\s);+.*?$', Comment.Single),    # end of line comments
+            (r'^/\*.*?\n\*/', Comment.Multiline),
+            (r'(?<!\n)/\*.*?\n\*/', Error),  # must be at start of line
+            ],
+        'whitespace': [
+            (r'[ \t]+', Text.Whitespace),
+            ],
+        'numbers': [
+            (r'(\d+\.\d*|\d*\.\d+)([eE][+-]?[0-9]+)?', Number.Float),
+            (r'\d+[eE][+-]?[0-9]+', Number.Float),
+            (r'0[0-7]+', Number.Oct),
+            (r'0[xX][a-fA-F0-9]+', Number.Hex),
+            (r'\d+L', Number.Integer.Long),
+            (r'\d+', Number.Integer)
+        ],
+        'continuation': [
+            (r'\n\)', Punctuation, '#pop'),
+            (r'\s[^\n\)]+', String),
+        ],
+        'keynames': [
+            (r'\[[^\]]+\]', Keyword, 'keynames')
+        ],
+        'commands': [
+            (r'(autotrim|blockinput|break|click|'
+             r'clipwait|continue|control|'
+             r'controlclick|controlfocus|controlget|'
+             r'controlgetfocus|controlgetpos|controlgettext|'
+             r'controlmove|controlsend|controlsendraw|'
+             r'controlsettext|coordmode|critical|'
+             r'detecthiddentext|detecthiddenwindows|'
+             r'dllcall|drive|'
+             r'driveget|drivespacefree|'
+             r'else|envadd|envdiv|'
+             r'envget|envmult|envset|'
+             r'envsub|envupdate|exit|'
+             r'exitapp|fileappend|filecopy|'
+             r'filecopydir|filecreatedir|filecreateshortcut|'
+             r'filedelete|filegetattrib|filegetshortcut|'
+             r'filegetsize|filegettime|filegetversion|'
+             r'fileinstall|filemove|filemovedir|'
+             r'fileread|filereadline|filerecycle|'
+             r'filerecycleempty|fileremovedir|fileselectfile|'
+             r'fileselectfolder|filesetattrib|filesettime|'
+             r'formattime|gosub|'
+             r'goto|groupactivate|groupadd|'
+             r'groupclose|groupdeactivate|gui|'
+             r'guicontrol|guicontrolget|hotkey|'
+             r'ifexist|ifgreater|ifgreaterorequal|'
+             r'ifinstring|ifless|iflessorequal|'
+             r'ifmsgbox|ifnotequal|ifnotexist|'
+             r'ifnotinstring|ifwinactive|ifwinexist|'
+             r'ifwinnotactive|ifwinnotexist|imagesearch|'
+             r'inidelete|iniread|iniwrite|'
+             r'input|inputbox|keyhistory|'
+             r'keywait|listhotkeys|listlines|'
+             r'listvars|loop|'
+             r'menu|mouseclick|mouseclickdrag|'
+             r'mousegetpos|mousemove|msgbox|'
+             r'onmessage|onexit|outputdebug|'
+             r'pixelgetcolor|pixelsearch|postmessage|'
+             r'process|progress|random|'
+             r'regexmatch|regexreplace|registercallback|'
+             r'regdelete|regread|regwrite|'
+             r'reload|repeat|return|'
+             r'run|runas|runwait|'
+             r'send|sendevent|sendinput|'
+             r'sendmessage|sendmode|sendplay|'
+             r'sendraw|setbatchlines|setcapslockstate|'
+             r'setcontroldelay|setdefaultmousespeed|setenv|'
+             r'setformat|setkeydelay|setmousedelay|'
+             r'setnumlockstate|setscrolllockstate|'
+             r'setstorecapslockmode|'
+             r'settimer|settitlematchmode|setwindelay|'
+             r'setworkingdir|shutdown|sleep|'
+             r'sort|soundbeep|soundget|'
+             r'soundgetwavevolume|soundplay|soundset|'
+             r'soundsetwavevolume|splashimage|splashtextoff|'
+             r'splashtexton|splitpath|statusbargettext|'
+             r'statusbarwait|stringcasesense|stringgetpos|'
+             r'stringleft|stringlen|stringlower|'
+             r'stringmid|stringreplace|stringright|'
+             r'stringsplit|stringtrimleft|stringtrimright|'
+             r'stringupper|suspend|sysget|'
+             r'thread|tooltip|transform|'
+             r'traytip|urldownloadtofile|while|'
+             r'varsetcapacity|'
+             r'winactivate|winactivatebottom|winclose|'
+             r'winget|wingetactivestats|wingetactivetitle|'
+             r'wingetclass|wingetpos|wingettext|'
+             r'wingettitle|winhide|winkill|'
+             r'winmaximize|winmenuselectitem|winminimize|'
+             r'winminimizeall|winminimizeallundo|winmove|'
+             r'winrestore|winset|winsettitle|'
+             r'winshow|winwait|winwaitactive|'
+             r'winwaitclose|winwaitnotactive'
+             r'true|false|NULL)\b', Keyword, 'command'),
+            ],
+
+        }
+
+class MaqlLexer(RegexLexer):
+    """
+    Lexer for `GoodData MAQL <https://secure.gooddata.com/docs/html/advanced.metric.tutorial.html>`_
+    scripts.
+
+    *New in Pygments 1.4.*
+    """
+
+    name = 'MAQL'
+    aliases = ['maql']
+    filenames = ['*.maql']
+    mimetypes = ['text/x-gooddata-maql','application/x-gooddata-maql']
+
+    flags = re.IGNORECASE
+    tokens = {
+        'root': [
+            # IDENTITY
+            (r'IDENTIFIER\b', Name.Builtin),
+            # IDENTIFIER
+            (r'\{[^}]+\}', Name.Variable),
+            # NUMBER
+            (r'[0-9]+(?:\.[0-9]+)?(?:[eE][+-]?[0-9]{1,3})?', Literal.Number),
+            # STRING
+            (r'"', Literal.String, 'string-literal'),
+            #  RELATION
+            (r'\<\>|\!\=', Operator),
+            (r'\=|\>\=|\>|\<\=|\<', Operator),
+            # :=
+            (r'\:\=', Operator),
+            # OBJECT
+            (r'\[[^]]+\]', Name.Variable.Class),
+            # keywords
+            (r'(DIMENSIONS?|BOTTOM|METRIC|COUNT|OTHER|FACT|WITH|TOP|OR|'
+             r'ATTRIBUTE|CREATE|PARENT|FALSE|ROWS?|FROM|ALL|AS|PF|'
+             r'COLUMNS?|DEFINE|REPORT|LIMIT|TABLE|LIKE|AND|BY|'
+             r'BETWEEN|EXCEPT|SELECT|MATCH|WHERE|TRUE|FOR|IN|'
+             r'WITHOUT|FILTER|ALIAS|ORDER|FACT|WHEN|NOT|ON|'
+             r'KEYS|KEY|FULLSET|PRIMARY|LABELS|LABEL|VISUAL|'
+             r'TITLE|DESCRIPTION|FOLDER|ALTER|DROP|ADD|DATASET|'
+             r'DATATYPE|INT|BIGINT|DOUBLE|DATE|VARCHAR|DECIMAL|'
+             r'SYNCHRONIZE|TYPE|DEFAULT|ORDER|ASC|DESC|HYPERLINK|'
+             r'INCLUDE|TEMPLATE|MODIFY)\b', Keyword),
+            # FUNCNAME
+            (r'[a-zA-Z]\w*\b', Name.Function),
+            # Comments
+            (r'#.*', Comment.Single),
+            # Punctuation
+            (r'[,;\(\)]', Token.Punctuation),
+            # Space is not significant
+            (r'\s+', Text)
+        ],
+        'string-literal': [
+            (r'\\[tnrfbae"\\]', String.Escape),
+            (r'"', Literal.String, '#pop'),
+            (r'[^\\"]+', Literal.String)
+        ],
+    }
+
+
+class GoodDataCLLexer(RegexLexer):
+    """
+    Lexer for `GoodData-CL <http://github.com/gooddata/GoodData-CL/raw/master/cli/src/main/resources/com/gooddata/processor/COMMANDS.txt>`_
+    script files.
+
+    *New in Pygments 1.4.*
+    """
+
+    name = 'GoodData-CL'
+    aliases = ['gooddata-cl']
+    filenames = ['*.gdc']
+    mimetypes = ['text/x-gooddata-cl']
+
+    flags = re.IGNORECASE
+    tokens = {
+        'root': [
+            # Comments
+            (r'#.*', Comment.Single),
+            # Function call
+            (r'[a-zA-Z]\w*', Name.Function),
+            # Argument list
+            (r'\(', Token.Punctuation, 'args-list'),
+            # Punctuation
+            (r';', Token.Punctuation),
+            # Space is not significant
+            (r'\s+', Text)
+        ],
+        'args-list': [
+            (r'\)', Token.Punctuation, '#pop'),
+            (r',', Token.Punctuation),
+            (r'[a-zA-Z]\w*', Name.Variable),
+            (r'=', Operator),
+            (r'"', Literal.String, 'string-literal'),
+            (r'[0-9]+(?:\.[0-9]+)?(?:[eE][+-]?[0-9]{1,3})?', Literal.Number),
+            # Space is not significant
+            (r'\s', Text)
+        ],
+        'string-literal': [
+            (r'\\[tnrfbae"\\]', String.Escape),
+            (r'"', Literal.String, '#pop'),
+            (r'[^\\"]+', Literal.String)
+        ]
+    }
+
+
+class ProtoBufLexer(RegexLexer):
+    """
+    Lexer for `Protocol Buffer <http://code.google.com/p/protobuf/>`_
+    definition files.
+
+    *New in Pygments 1.4.*
+    """
+
+    name = 'Protocol Buffer'
+    aliases = ['protobuf']
+    filenames = ['*.proto']
+
+    tokens = {
+        'root': [
+            (r'[ \t]+', Text),
+            (r'[,;{}\[\]\(\)]', Punctuation),
+            (r'/(\\\n)?/(\n|(.|\n)*?[^\\]\n)', Comment.Single),
+            (r'/(\\\n)?[*](.|\n)*?[*](\\\n)?/', Comment.Multiline),
+            (r'\b(import|option|optional|required|repeated|default|packed|'
+             r'ctype|extensions|to|max|rpc|returns)\b', Keyword),
+            (r'(int32|int64|uint32|uint64|sint32|sint64|'
+             r'fixed32|fixed64|sfixed32|sfixed64|'
+             r'float|double|bool|string|bytes)\b', Keyword.Type),
+            (r'(true|false)\b', Keyword.Constant),
+            (r'(package)(\s+)', bygroups(Keyword.Namespace, Text), 'package'),
+            (r'(message|extend)(\s+)',
+             bygroups(Keyword.Declaration, Text), 'message'),
+            (r'(enum|group|service)(\s+)',
+             bygroups(Keyword.Declaration, Text), 'type'),
+            (r'\".*\"', String),
+            (r'(\d+\.\d*|\.\d+|\d+)[eE][+-]?\d+[LlUu]*', Number.Float),
+            (r'(\d+\.\d*|\.\d+|\d+[fF])[fF]?', Number.Float),
+            (r'(\-?(inf|nan))', Number.Float),
+            (r'0x[0-9a-fA-F]+[LlUu]*', Number.Hex),
+            (r'0[0-7]+[LlUu]*', Number.Oct),
+            (r'\d+[LlUu]*', Number.Integer),
+            (r'[+-=]', Operator),
+            (r'([a-zA-Z_][a-zA-Z0-9_\.]*)([ \t]*)(=)',
+             bygroups(Name.Attribute, Text, Operator)),
+            ('[a-zA-Z_][a-zA-Z0-9_\.]*', Name),
+        ],
+        'package': [
+            (r'[a-zA-Z_][a-zA-Z0-9_]*', Name.Namespace, '#pop')
+        ],
+        'message': [
+            (r'[a-zA-Z_][a-zA-Z0-9_]*', Name.Class, '#pop')
+        ],
+        'type': [
+            (r'[a-zA-Z_][a-zA-Z0-9_]*', Name, '#pop')
+        ],
+    }
+
+
+class HybrisLexer(RegexLexer):
+    """
+    For `Hybris <http://www.hybris-lang.org>`_ source code.
+
+    *New in Pygments 1.4.*
+    """
+
+    name = 'Hybris'
+    aliases = ['hybris', 'hy']
+    filenames = ['*.hy', '*.hyb']
+    mimetypes = ['text/x-hybris', 'application/x-hybris']
+
+    flags = re.MULTILINE | re.DOTALL
+
+    tokens = {
+        'root': [
+            # method names
+            (r'^(\s*(?:function|method|operator\s+)+?)'
+             r'([a-zA-Z_][a-zA-Z0-9_]*)'
+             r'(\s*)(\()', bygroups(Name.Function, Text, Operator)),
+            (r'[^\S\n]+', Text),
+            (r'//.*?\n', Comment.Single),
+            (r'/\*.*?\*/', Comment.Multiline),
+            (r'@[a-zA-Z_][a-zA-Z0-9_\.]*', Name.Decorator),
+            (r'(break|case|catch|next|default|do|else|finally|for|foreach|of|'
+             r'unless|if|new|return|switch|me|throw|try|while)\b', Keyword),
+            (r'(extends|private|protected|public|static|throws|function|method|'
+             r'operator)\b', Keyword.Declaration),
+            (r'(true|false|null|__FILE__|__LINE__|__VERSION__|__LIB_PATH__|'
+             r'__INC_PATH__)\b', Keyword.Constant),
+            (r'(class|struct)(\s+)',
+             bygroups(Keyword.Declaration, Text), 'class'),
+            (r'(import|include)(\s+)',
+             bygroups(Keyword.Namespace, Text), 'import'),
+            (r'(gc_collect|gc_mm_items|gc_mm_usage|gc_collect_threshold|'
+             r'urlencode|urldecode|base64encode|base64decode|sha1|crc32|sha2|'
+             r'md5|md5_file|acos|asin|atan|atan2|ceil|cos|cosh|exp|fabs|floor|'
+             r'fmod|log|log10|pow|sin|sinh|sqrt|tan|tanh|isint|isfloat|ischar|'
+             r'isstring|isarray|ismap|isalias|typeof|sizeof|toint|tostring|'
+             r'fromxml|toxml|binary|pack|load|eval|var_names|var_values|'
+             r'user_functions|dyn_functions|methods|call|call_method|mknod|'
+             r'mkfifo|mount|umount2|umount|ticks|usleep|sleep|time|strtime|'
+             r'strdate|dllopen|dlllink|dllcall|dllcall_argv|dllclose|env|exec|'
+             r'fork|getpid|wait|popen|pclose|exit|kill|pthread_create|'
+             r'pthread_create_argv|pthread_exit|pthread_join|pthread_kill|'
+             r'smtp_send|http_get|http_post|http_download|socket|bind|listen|'
+             r'accept|getsockname|getpeername|settimeout|connect|server|recv|'
+             r'send|close|print|println|printf|input|readline|serial_open|'
+             r'serial_fcntl|serial_get_attr|serial_get_ispeed|serial_get_ospeed|'
+             r'serial_set_attr|serial_set_ispeed|serial_set_ospeed|serial_write|'
+             r'serial_read|serial_close|xml_load|xml_parse|fopen|fseek|ftell|'
+             r'fsize|fread|fwrite|fgets|fclose|file|readdir|pcre_replace|size|'
+             r'pop|unmap|has|keys|values|length|find|substr|replace|split|trim|'
+             r'remove|contains|join)\b', Name.Builtin),
+            (r'(MethodReference|Runner|Dll|Thread|Pipe|Process|Runnable|'
+             r'CGI|ClientSocket|Socket|ServerSocket|File|Console|Directory|'
+             r'Exception)\b', Keyword.Type),
+            (r'"(\\\\|\\"|[^"])*"', String),
+            (r"'\\.'|'[^\\]'|'\\u[0-9a-f]{4}'", String.Char),
+            (r'(\.)([a-zA-Z_][a-zA-Z0-9_]*)',
+             bygroups(Operator, Name.Attribute)),
+            (r'[a-zA-Z_][a-zA-Z0-9_]*:', Name.Label),
+            (r'[a-zA-Z_\$][a-zA-Z0-9_]*', Name),
+            (r'[~\^\*!%&\[\]\(\)\{\}<>\|+=:;,./?\-@]+', Operator),
+            (r'[0-9][0-9]*\.[0-9]+([eE][0-9]+)?[fd]?', Number.Float),
+            (r'0x[0-9a-f]+', Number.Hex),
+            (r'[0-9]+L?', Number.Integer),
+            (r'\n', Text),
+        ],
+        'class': [
+            (r'[a-zA-Z_][a-zA-Z0-9_]*', Name.Class, '#pop')
+        ],
+        'import': [
+            (r'[a-zA-Z0-9_.]+\*?', Name.Namespace, '#pop')
+        ],
+    }
--- a/MoinMoin/support/pygments/lexers/templates.py	Sun Dec 25 16:38:04 2011 +0100
+++ b/MoinMoin/support/pygments/lexers/templates.py	Sun Dec 25 16:43:04 2011 +0100
@@ -13,7 +13,7 @@
 
 from pygments.lexers.web import \
      PhpLexer, HtmlLexer, XmlLexer, JavascriptLexer, CssLexer
-from pygments.lexers.agile import PythonLexer
+from pygments.lexers.agile import PythonLexer, PerlLexer
 from pygments.lexers.compiled import JavaLexer
 from pygments.lexer import Lexer, DelegatingLexer, RegexLexer, bygroups, \
      include, using, this
@@ -30,12 +30,14 @@
            'JavascriptDjangoLexer', 'GenshiLexer', 'HtmlGenshiLexer',
            'GenshiTextLexer', 'CssGenshiLexer', 'JavascriptGenshiLexer',
            'MyghtyLexer', 'MyghtyHtmlLexer', 'MyghtyXmlLexer',
-           'MyghtyCssLexer', 'MyghtyJavascriptLexer', 'MakoLexer',
+           'MyghtyCssLexer', 'MyghtyJavascriptLexer', 'MasonLexer', 'MakoLexer',
            'MakoHtmlLexer', 'MakoXmlLexer', 'MakoJavascriptLexer',
            'MakoCssLexer', 'JspLexer', 'CheetahLexer', 'CheetahHtmlLexer',
            'CheetahXmlLexer', 'CheetahJavascriptLexer',
            'EvoqueLexer', 'EvoqueHtmlLexer', 'EvoqueXmlLexer',
-           'ColdfusionLexer', 'ColdfusionHtmlLexer']
+           'ColdfusionLexer', 'ColdfusionHtmlLexer',
+           'VelocityLexer', 'VelocityHtmlLexer', 'VelocityXmlLexer',
+           'SspLexer']
 
 
 class ErbLexer(Lexer):
@@ -188,6 +190,121 @@
         return rv
 
 
+class VelocityLexer(RegexLexer):
+    """
+    Generic `Velocity <http://velocity.apache.org/>`_ template lexer.
+
+    Just highlights velocity directives and variable references, other
+    data is left untouched by the lexer.
+    """
+
+    name = 'Velocity'
+    aliases = ['velocity']
+    filenames = ['*.vm','*.fhtml']
+
+    flags = re.MULTILINE | re.DOTALL
+
+    identifier = r'[a-zA-Z_][a-zA-Z0-9_]*'
+
+    tokens = {
+        'root': [
+            (r'[^{#$]+', Other),
+            (r'(#)(\*.*?\*)(#)',
+             bygroups(Comment.Preproc, Comment, Comment.Preproc)),
+            (r'(##)(.*?$)',
+             bygroups(Comment.Preproc, Comment)),
+            (r'(#\{?)(' + identifier + r')(\}?)(\s?\()',
+             bygroups(Comment.Preproc, Name.Function, Comment.Preproc, Punctuation),
+             'directiveparams'),
+            (r'(#\{?)(' + identifier + r')(\}|\b)',
+             bygroups(Comment.Preproc, Name.Function, Comment.Preproc)),
+            (r'\$\{?', Punctuation, 'variable')
+        ],
+        'variable': [
+            (identifier, Name.Variable),
+            (r'\(', Punctuation, 'funcparams'),
+            (r'(\.)(' + identifier + r')', bygroups(Punctuation, Name.Variable), '#push'),
+            (r'\}', Punctuation, '#pop'),
+            (r'', Other, '#pop')
+        ],
+        'directiveparams': [
+            (r'(&&|\|\||==?|!=?|[-<>+*%&\|\^/])|\b(eq|ne|gt|lt|ge|le|not|in)\b', Operator),
+            (r'\[', Operator, 'rangeoperator'),
+            (r'\b' + identifier + r'\b', Name.Function),
+            include('funcparams')
+        ],
+        'rangeoperator': [
+            (r'\.\.', Operator),
+            include('funcparams'),
+            (r'\]', Operator, '#pop')
+        ],
+        'funcparams': [
+            (r'\$\{?', Punctuation, 'variable'),
+            (r'\s+', Text),
+            (r',', Punctuation),
+            (r'"(\\\\|\\"|[^"])*"', String.Double),
+            (r"'(\\\\|\\'|[^'])*'", String.Single),
+            (r"0[xX][0-9a-fA-F]+[Ll]?", Number),
+            (r"\b[0-9]+\b", Number),
+            (r'(true|false|null)\b', Keyword.Constant),
+            (r'\(', Punctuation, '#push'),
+            (r'\)', Punctuation, '#pop')
+        ]
+    }
+
+    def analyse_text(text):
+        rv = 0.0
+        if re.search(r'#\{?macro\}?\(.*?\).*?#\{?end\}?', text):
+            rv += 0.25
+        if re.search(r'#\{?if\}?\(.+?\).*?#\{?end\}?', text):
+            rv += 0.15
+        if re.search(r'#\{?foreach\}?\(.+?\).*?#\{?end\}?', text):
+            rv += 0.15
+        if re.search(r'\$\{?[a-zA-Z_][a-zA-Z0-9_]*(\([^)]*\))?(\.[a-zA-Z0-9_]+(\([^)]*\))?)*\}?', text):
+            rv += 0.01
+        return rv
+
+
+class VelocityHtmlLexer(DelegatingLexer):
+    """
+    Subclass of the `VelocityLexer` that highlights unlexer data
+    with the `HtmlLexer`.
+
+    """
+
+    name = 'HTML+Velocity'
+    aliases = ['html+velocity']
+    alias_filenames = ['*.html','*.fhtml']
+    mimetypes = ['text/html+velocity']
+
+    def __init__(self, **options):
+        super(VelocityHtmlLexer, self).__init__(HtmlLexer, VelocityLexer,
+                                              **options)
+
+
+class VelocityXmlLexer(DelegatingLexer):
+    """
+    Subclass of the `VelocityLexer` that highlights unlexer data
+    with the `XmlLexer`.
+
+    """
+
+    name = 'XML+Velocity'
+    aliases = ['xml+velocity']
+    alias_filenames = ['*.xml','*.vm']
+    mimetypes = ['application/xml+velocity']
+
+    def __init__(self, **options):
+        super(VelocityXmlLexer, self).__init__(XmlLexer, VelocityLexer,
+                                               **options)
+
+    def analyse_text(text):
+        rv = VelocityLexer.analyse_text(text) - 0.01
+        if looks_like_xml(text):
+            rv += 0.5
+        return rv
+
+
 class DjangoLexer(RegexLexer):
     """
     Generic `django <http://www.djangoproject.com/documentation/templates/>`_
@@ -239,7 +356,7 @@
              r'with(?:(?:out)?\s*context)?|scoped|ignore\s+missing)\b',
              Keyword),
             (r'(loop|block|super|forloop)\b', Name.Builtin),
-            (r'[a-zA-Z][a-zA-Z0-9_]*', Name.Variable),
+            (r'[a-zA-Z][a-zA-Z0-9_-]*', Name.Variable),
             (r'\.[a-zA-Z0-9_]+', Name.Variable),
             (r':?"(\\\\|\\"|[^"])*"', String.Double),
             (r":?'(\\\\|\\'|[^'])*'", String.Single),
@@ -389,6 +506,61 @@
                                              **options)
 
 
+class MasonLexer(RegexLexer):
+    """
+    Generic `mason templates`_ lexer. Stolen from Myghty lexer. Code that isn't
+    Mason markup is HTML.
+
+    .. _mason templates: http://www.masonhq.com/
+
+    *New in Pygments 1.4.*
+    """
+    name = 'Mason'
+    aliases = ['mason']
+    filenames = ['*.m', '*.mhtml', '*.mc', '*.mi', 'autohandler', 'dhandler']
+    mimetypes = ['application/x-mason']
+
+    tokens = {
+        'root': [
+            (r'\s+', Text),
+            (r'(<%doc>)(.*?)(</%doc>)(?s)',
+             bygroups(Name.Tag, Comment.Multiline, Name.Tag)),
+            (r'(<%(def|method))(\s*)(.*?)(>)(.*?)(</%\2\s*>)(?s)',
+             bygroups(Name.Tag, None, Text, Name.Function, Name.Tag,
+                      using(this), Name.Tag)),
+            (r'(<%(\w+))(.*?)(>)(.*?)(</%\2\s*>)(?s)',
+             bygroups(Name.Tag, None, Name.Function, Name.Tag,
+                      using(PerlLexer), Name.Tag)),
+            (r'(<&[^|])(.*?)(,.*?)?(&>)(?s)',
+             bygroups(Name.Tag, Name.Function, using(PerlLexer), Name.Tag)),
+            (r'(<&\|)(.*?)(,.*?)?(&>)(?s)',
+             bygroups(Name.Tag, Name.Function, using(PerlLexer), Name.Tag)),
+            (r'</&>', Name.Tag),
+            (r'(<%!?)(.*?)(%>)(?s)',
+             bygroups(Name.Tag, using(PerlLexer), Name.Tag)),
+            (r'(?<=^)#[^\n]*(\n|\Z)', Comment),
+            (r'(?<=^)(%)([^\n]*)(\n|\Z)',
+             bygroups(Name.Tag, using(PerlLexer), Other)),
+            (r"""(?sx)
+                 (.+?)               # anything, followed by:
+                 (?:
+                  (?<=\n)(?=[%#]) |  # an eval or comment line
+                  (?=</?[%&]) |      # a substitution or block or
+                                     # call start or end
+                                     # - don't consume
+                  (\\\n) |           # an escaped newline
+                  \Z                 # end of string
+                 )""", bygroups(using(HtmlLexer), Operator)),
+        ]
+    }
+
+    def analyse_text(text):
+        rv = 0.0
+        if re.search('<&', text) is not None:
+            rv = 1.0
+        return rv
+
+
 class MakoLexer(RegexLexer):
     """
     Generic `mako templates`_ lexer. Code that isn't Mako
@@ -1376,7 +1548,7 @@
     """
     Coldfusion markup in html
     """
-    name = 'Coldufsion HTML'
+    name = 'Coldfusion HTML'
     aliases = ['cfm']
     filenames = ['*.cfm', '*.cfml', '*.cfc']
     mimetypes = ['application/x-coldfusion']
@@ -1385,3 +1557,27 @@
         super(ColdfusionHtmlLexer, self).__init__(HtmlLexer, ColdfusionMarkupLexer,
                                                   **options)
 
+
+class SspLexer(DelegatingLexer):
+    """
+    Lexer for Scalate Server Pages.
+
+    *New in Pygments 1.4.*
+    """
+    name = 'Scalate Server Page'
+    aliases = ['ssp']
+    filenames = ['*.ssp']
+    mimetypes = ['application/x-ssp']
+
+    def __init__(self, **options):
+        super(SspLexer, self).__init__(XmlLexer, JspRootLexer, **options)
+
+    def analyse_text(text):
+        rv = 0.0
+        if re.search('val \w+\s*:', text):
+            rv += 0.6
+        if looks_like_xml(text):
+            rv += 0.2
+        if '<%' in text and '%>' in text:
+            rv += 0.1
+        return rv
--- a/MoinMoin/support/pygments/lexers/text.py	Sun Dec 25 16:38:04 2011 +0100
+++ b/MoinMoin/support/pygments/lexers/text.py	Sun Dec 25 16:43:04 2011 +0100
@@ -19,7 +19,7 @@
 from pygments.util import get_bool_opt
 from pygments.lexers.other import BashLexer
 
-__all__ = ['IniLexer', 'SourcesListLexer', 'BaseMakefileLexer',
+__all__ = ['IniLexer', 'PropertiesLexer', 'SourcesListLexer', 'BaseMakefileLexer',
            'MakefileLexer', 'DiffLexer', 'IrcLogsLexer', 'TexLexer',
            'GroffLexer', 'ApacheConfLexer', 'BBCodeLexer', 'MoinWikiLexer',
            'RstLexer', 'VimLexer', 'GettextLexer', 'SquidConfLexer',
@@ -34,7 +34,7 @@
 
     name = 'INI'
     aliases = ['ini', 'cfg']
-    filenames = ['*.ini', '*.cfg', '*.properties']
+    filenames = ['*.ini', '*.cfg']
     mimetypes = ['text/x-ini']
 
     tokens = {
@@ -42,7 +42,7 @@
             (r'\s+', Text),
             (r'[;#].*?$', Comment),
             (r'\[.*?\]$', Keyword),
-            (r'(.*?)([ \t]*)(=)([ \t]*)(.*?)$',
+            (r'(.*?)([ \t]*)(=)([ \t]*)(.*(?:\n[ \t].+)*)',
              bygroups(Name.Attribute, Text, Operator, Text, String))
         ]
     }
@@ -54,6 +54,28 @@
         return text[0] == '[' and text[npos-1] == ']'
 
 
+class PropertiesLexer(RegexLexer):
+    """
+    Lexer for configuration files in Java's properties format.
+
+    *New in Pygments 1.4.*
+    """
+
+    name = 'Properties'
+    aliases = ['properties']
+    filenames = ['*.properties']
+    mimetypes = ['text/x-java-properties']
+
+    tokens = {
+        'root': [
+            (r'\s+', Text),
+            (r'(?:[;#]|//).*$', Comment),
+            (r'(.*?)([ \t]*)([=:])([ \t]*)(.*(?:(?<=\\)\n.*)*)',
+             bygroups(Name.Attribute, Text, Operator, Text, String)),
+        ],
+    }
+
+
 class SourcesListLexer(RegexLexer):
     """
     Lexer that highlights debian sources.list files.
@@ -830,7 +852,7 @@
             (r'^#:\s.*?$', Keyword.Declaration),
             #(r'^#$', Comment),
             (r'^(#|#\.\s|#\|\s|#~\s|#\s).*$', Comment.Single),
-            (r'^(")([\w-]*:)(.*")$',
+            (r'^(")([A-Za-z-]+:)(.*")$',
              bygroups(String, Name.Property, String)),
             (r'^".*"$', String),
             (r'^(msgid|msgid_plural|msgstr)(\s+)(".*")$',
@@ -1524,7 +1546,7 @@
     """
     name = 'CMake'
     aliases = ['cmake']
-    filenames = ['*.cmake']
+    filenames = ['*.cmake', 'CMakeLists.txt']
     mimetypes = ['text/x-cmake']
 
     tokens = {
--- a/MoinMoin/support/pygments/lexers/web.py	Sun Dec 25 16:38:04 2011 +0100
+++ b/MoinMoin/support/pygments/lexers/web.py	Sun Dec 25 16:43:04 2011 +0100
@@ -10,20 +10,23 @@
 """
 
 import re
+import copy
 
 from pygments.lexer import RegexLexer, ExtendedRegexLexer, bygroups, using, \
      include, this
-from pygments.token import \
-     Text, Comment, Operator, Keyword, Name, String, Number, Other, Punctuation
+from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
+     Number, Other, Punctuation, Literal
 from pygments.util import get_bool_opt, get_list_opt, looks_like_xml, \
                           html_doctype_matches
 from pygments.lexers.agile import RubyLexer
+from pygments.lexers.compiled import ScalaLexer
 
 
 __all__ = ['HtmlLexer', 'XmlLexer', 'JavascriptLexer', 'CssLexer',
            'PhpLexer', 'ActionScriptLexer', 'XsltLexer', 'ActionScript3Lexer',
-           'MxmlLexer', 'HaxeLexer', 'HamlLexer', 'SassLexer',
-           'ObjectiveJLexer', 'CoffeeScriptLexer']
+           'MxmlLexer', 'HaxeLexer', 'HamlLexer', 'SassLexer', 'ScssLexer',
+           'ObjectiveJLexer', 'CoffeeScriptLexer', 'DuelLexer', 'ScamlLexer',
+           'JadeLexer', 'XQueryLexer']
 
 
 class JavascriptLexer(RegexLexer):
@@ -34,7 +37,8 @@
     name = 'JavaScript'
     aliases = ['js', 'javascript']
     filenames = ['*.js']
-    mimetypes = ['application/x-javascript', 'text/x-javascript', 'text/javascript']
+    mimetypes = ['application/javascript', 'application/x-javascript',
+                 'text/x-javascript', 'text/javascript']
 
     flags = re.DOTALL
     tokens = {
@@ -378,7 +382,7 @@
             (r'\!important', Comment.Preproc),
             (r'/\*(?:.|\n)*?\*/', Comment),
             (r'\#[a-zA-Z0-9]{1,6}', Number),
-            (r'[\.-]?[0-9]*[\.]?[0-9]+(em|px|\%|pt|pc|in|mm|cm|ex)', Number),
+            (r'[\.-]?[0-9]*[\.]?[0-9]+(em|px|\%|pt|pc|in|mm|cm|ex|s)\b', Number),
             (r'-?[0-9]+', Number),
             (r'[~\^\*!%&<>\|+=@:,./?-]+', Operator),
             (r'[\[\]();]+', Punctuation),
@@ -748,8 +752,11 @@
             (r'\$\{\$+[a-zA-Z_][a-zA-Z0-9_]*\}', Name.Variable),
             (r'\$+[a-zA-Z_][a-zA-Z0-9_]*', Name.Variable),
             (r'[\\a-zA-Z_][\\a-zA-Z0-9_]*', Name.Other),
-            (r"[0-9](\.[0-9]*)?(eE[+-][0-9])?[flFLdD]?|"
-             r"0[xX][0-9a-fA-F]+[Ll]?", Number),
+            (r'(\d+\.\d*|\d*\.\d+)([eE][+-]?[0-9]+)?', Number.Float),
+            (r'\d+[eE][+-]?[0-9]+', Number.Float),
+            (r'0[0-7]+', Number.Oct),
+            (r'0[xX][a-fA-F0-9]+', Number.Hex),
+            (r'\d+', Number.Integer),
             (r"'([^'\\]*(?:\\.[^'\\]*)*)'", String.Single),
             (r'`([^`\\]*(?:\\.[^`\\]*)*)`', String.Backtick),
             (r'"', String.Double, 'string'),
@@ -763,7 +770,7 @@
         'string': [
             (r'"', String.Double, '#pop'),
             (r'[^{$"\\]+', String.Double),
-            (r'\\([nrt\"$]|[0-7]{1,3}|x[0-9A-Fa-f]{1,2})', String.Escape),
+            (r'\\([nrt\"$\\]|[0-7]{1,3}|x[0-9A-Fa-f]{1,2})', String.Escape),
             (r'\$[a-zA-Z_][a-zA-Z0-9_]*(\[\S+\]|->[a-zA-Z_][a-zA-Z0-9_]*)?',
              String.Interpol),
             (r'(\{\$\{)(.*?)(\}\})',
@@ -1217,6 +1224,10 @@
     # which is ignored and used to wrap long lines.
     # To accomodate this, use this custom faux dot instead.
     _dot = r'(?: \|\n(?=.* \|)|.)'
+
+    # In certain places, a comma at the end of the line
+    # allows line wrapping as well.
+    _comma_dot = r'(?:,\s*\n|' + _dot + ')'
     tokens = {
         'root': [
             (r'[ \t]*\n', Text),
@@ -1230,7 +1241,7 @@
 
         'eval-or-plain': [
             (r'[&!]?==', Punctuation, 'plain'),
-            (r'([&!]?[=~])(' + _dot + '*\n)',
+            (r'([&!]?[=~])(' + _comma_dot + '*\n)',
              bygroups(Punctuation, using(RubyLexer)),
              'root'),
             (r'', Text, 'plain'),
@@ -1247,7 +1258,7 @@
              '#pop'),
             (r'-#' + _dot + '*\n', _starts_block(Comment.Preproc,
                                                  'haml-comment-block'), '#pop'),
-            (r'(-)(' + _dot + '*\n)',
+            (r'(-)(' + _comma_dot + '*\n)',
              bygroups(Punctuation, using(RubyLexer)),
              '#pop'),
             (r':' + _dot + '*\n', _starts_block(Name.Decorator, 'filter-block'),
@@ -1307,6 +1318,172 @@
     }
 
 
+common_sass_tokens = {
+    'value': [
+        (r'[ \t]+', Text),
+        (r'[!$][\w-]+', Name.Variable),
+        (r'url\(', String.Other, 'string-url'),
+        (r'[a-z_-][\w-]*(?=\()', Name.Function),
+        (r'(azimuth|background-attachment|background-color|'
+         r'background-image|background-position|background-repeat|'
+         r'background|border-bottom-color|border-bottom-style|'
+         r'border-bottom-width|border-left-color|border-left-style|'
+         r'border-left-width|border-right|border-right-color|'
+         r'border-right-style|border-right-width|border-top-color|'
+         r'border-top-style|border-top-width|border-bottom|'
+         r'border-collapse|border-left|border-width|border-color|'
+         r'border-spacing|border-style|border-top|border|caption-side|'
+         r'clear|clip|color|content|counter-increment|counter-reset|'
+         r'cue-after|cue-before|cue|cursor|direction|display|'
+         r'elevation|empty-cells|float|font-family|font-size|'
+         r'font-size-adjust|font-stretch|font-style|font-variant|'
+         r'font-weight|font|height|letter-spacing|line-height|'
+         r'list-style-type|list-style-image|list-style-position|'
+         r'list-style|margin-bottom|margin-left|margin-right|'
+         r'margin-top|margin|marker-offset|marks|max-height|max-width|'
+         r'min-height|min-width|opacity|orphans|outline|outline-color|'
+         r'outline-style|outline-width|overflow|padding-bottom|'
+         r'padding-left|padding-right|padding-top|padding|page|'
+         r'page-break-after|page-break-before|page-break-inside|'
+         r'pause-after|pause-before|pause|pitch|pitch-range|'
+         r'play-during|position|quotes|richness|right|size|'
+         r'speak-header|speak-numeral|speak-punctuation|speak|'
+         r'speech-rate|stress|table-layout|text-align|text-decoration|'
+         r'text-indent|text-shadow|text-transform|top|unicode-bidi|'
+         r'vertical-align|visibility|voice-family|volume|white-space|'
+         r'widows|width|word-spacing|z-index|bottom|left|'
+         r'above|absolute|always|armenian|aural|auto|avoid|baseline|'
+         r'behind|below|bidi-override|blink|block|bold|bolder|both|'
+         r'capitalize|center-left|center-right|center|circle|'
+         r'cjk-ideographic|close-quote|collapse|condensed|continuous|'
+         r'crop|crosshair|cross|cursive|dashed|decimal-leading-zero|'
+         r'decimal|default|digits|disc|dotted|double|e-resize|embed|'
+         r'extra-condensed|extra-expanded|expanded|fantasy|far-left|'
+         r'far-right|faster|fast|fixed|georgian|groove|hebrew|help|'
+         r'hidden|hide|higher|high|hiragana-iroha|hiragana|icon|'
+         r'inherit|inline-table|inline|inset|inside|invert|italic|'
+         r'justify|katakana-iroha|katakana|landscape|larger|large|'
+         r'left-side|leftwards|level|lighter|line-through|list-item|'
+         r'loud|lower-alpha|lower-greek|lower-roman|lowercase|ltr|'
+         r'lower|low|medium|message-box|middle|mix|monospace|'
+         r'n-resize|narrower|ne-resize|no-close-quote|no-open-quote|'
+         r'no-repeat|none|normal|nowrap|nw-resize|oblique|once|'
+         r'open-quote|outset|outside|overline|pointer|portrait|px|'
+         r'relative|repeat-x|repeat-y|repeat|rgb|ridge|right-side|'
+         r'rightwards|s-resize|sans-serif|scroll|se-resize|'
+         r'semi-condensed|semi-expanded|separate|serif|show|silent|'
+         r'slow|slower|small-caps|small-caption|smaller|soft|solid|'
+         r'spell-out|square|static|status-bar|super|sw-resize|'
+         r'table-caption|table-cell|table-column|table-column-group|'
+         r'table-footer-group|table-header-group|table-row|'
+         r'table-row-group|text|text-bottom|text-top|thick|thin|'
+         r'transparent|ultra-condensed|ultra-expanded|underline|'
+         r'upper-alpha|upper-latin|upper-roman|uppercase|url|'
+         r'visible|w-resize|wait|wider|x-fast|x-high|x-large|x-loud|'
+         r'x-low|x-small|x-soft|xx-large|xx-small|yes)\b', Name.Constant),
+        (r'(indigo|gold|firebrick|indianred|darkolivegreen|'
+         r'darkseagreen|mediumvioletred|mediumorchid|chartreuse|'
+         r'mediumslateblue|springgreen|crimson|lightsalmon|brown|'
+         r'turquoise|olivedrab|cyan|skyblue|darkturquoise|'
+         r'goldenrod|darkgreen|darkviolet|darkgray|lightpink|'
+         r'darkmagenta|lightgoldenrodyellow|lavender|yellowgreen|thistle|'
+         r'violet|orchid|ghostwhite|honeydew|cornflowerblue|'
+         r'darkblue|darkkhaki|mediumpurple|cornsilk|bisque|slategray|'
+         r'darkcyan|khaki|wheat|deepskyblue|darkred|steelblue|aliceblue|'
+         r'gainsboro|mediumturquoise|floralwhite|coral|lightgrey|'
+         r'lightcyan|darksalmon|beige|azure|lightsteelblue|oldlace|'
+         r'greenyellow|royalblue|lightseagreen|mistyrose|sienna|'
+         r'lightcoral|orangered|navajowhite|palegreen|burlywood|'
+         r'seashell|mediumspringgreen|papayawhip|blanchedalmond|'
+         r'peru|aquamarine|darkslategray|ivory|dodgerblue|'
+         r'lemonchiffon|chocolate|orange|forestgreen|slateblue|'
+         r'mintcream|antiquewhite|darkorange|cadetblue|moccasin|'
+         r'limegreen|saddlebrown|darkslateblue|lightskyblue|deeppink|'
+         r'plum|darkgoldenrod|sandybrown|magenta|tan|'
+         r'rosybrown|pink|lightblue|palevioletred|mediumseagreen|'
+         r'dimgray|powderblue|seagreen|snow|mediumblue|midnightblue|'
+         r'paleturquoise|palegoldenrod|whitesmoke|darkorchid|salmon|'
+         r'lightslategray|lawngreen|lightgreen|tomato|hotpink|'
+         r'lightyellow|lavenderblush|linen|mediumaquamarine|'
+         r'blueviolet|peachpuff)\b', Name.Entity),
+        (r'(black|silver|gray|white|maroon|red|purple|fuchsia|green|'
+         r'lime|olive|yellow|navy|blue|teal|aqua)\b', Name.Builtin),
+        (r'\!(important|default)', Name.Exception),
+        (r'(true|false)', Name.Pseudo),
+        (r'(and|or|not)', Operator.Word),
+        (r'/\*', Comment.Multiline, 'inline-comment'),
+        (r'//[^\n]*', Comment.Single),
+        (r'\#[a-z0-9]{1,6}', Number.Hex),
+        (r'(-?\d+)(\%|[a-z]+)?', bygroups(Number.Integer, Keyword.Type)),
+        (r'(-?\d*\.\d+)(\%|[a-z]+)?', bygroups(Number.Float, Keyword.Type)),
+        (r'#{', String.Interpol, 'interpolation'),
+        (r'[~\^\*!&%<>\|+=@:,./?-]+', Operator),
+        (r'[\[\]()]+', Punctuation),
+        (r'"', String.Double, 'string-double'),
+        (r"'", String.Single, 'string-single'),
+        (r'[a-z_-][\w-]*', Name),
+    ],
+
+    'interpolation': [
+        (r'\}', String.Interpol, '#pop'),
+        include('value'),
+    ],
+
+    'selector': [
+        (r'[ \t]+', Text),
+        (r'\:', Name.Decorator, 'pseudo-class'),
+        (r'\.', Name.Class, 'class'),
+        (r'\#', Name.Namespace, 'id'),
+        (r'[a-zA-Z0-9_-]+', Name.Tag),
+        (r'#\{', String.Interpol, 'interpolation'),
+        (r'&', Keyword),
+        (r'[~\^\*!&\[\]\(\)<>\|+=@:;,./?-]', Operator),
+        (r'"', String.Double, 'string-double'),
+        (r"'", String.Single, 'string-single'),
+    ],
+
+    'string-double': [
+        (r'(\\.|#(?=[^\n{])|[^\n"#])+', String.Double),
+        (r'#\{', String.Interpol, 'interpolation'),
+        (r'"', String.Double, '#pop'),
+    ],
+
+    'string-single': [
+        (r"(\\.|#(?=[^\n{])|[^\n'#])+", String.Double),
+        (r'#\{', String.Interpol, 'interpolation'),
+        (r"'", String.Double, '#pop'),
+    ],
+
+    'string-url': [
+        (r'(\\#|#(?=[^\n{])|[^\n#)])+', String.Other),
+        (r'#\{', String.Interpol, 'interpolation'),
+        (r'\)', String.Other, '#pop'),
+    ],
+
+    'pseudo-class': [
+        (r'[\w-]+', Name.Decorator),
+        (r'#\{', String.Interpol, 'interpolation'),
+        (r'', Text, '#pop'),
+    ],
+
+    'class': [
+        (r'[\w-]+', Name.Class),
+        (r'#\{', String.Interpol, 'interpolation'),
+        (r'', Text, '#pop'),
+    ],
+
+    'id': [
+        (r'[\w-]+', Name.Namespace),
+        (r'#\{', String.Interpol, 'interpolation'),
+        (r'', Text, '#pop'),
+    ],
+
+    'for': [
+        (r'(from|to|through)', Operator.Word),
+        include('value'),
+    ],
+}
+
 class SassLexer(ExtendedRegexLexer):
     """
     For Sass stylesheets.
@@ -1333,14 +1510,17 @@
              'root'),
             (r'@import', Keyword, 'import'),
             (r'@for', Keyword, 'for'),
-            (r'@(debug|if|while)', Keyword, 'script'),
+            (r'@(debug|warn|if|while)', Keyword, 'value'),
+            (r'(@mixin)( [\w-]+)', bygroups(Keyword, Name.Function), 'value'),
+            (r'(@include)( [\w-]+)', bygroups(Keyword, Name.Decorator), 'value'),
+            (r'@extend', Keyword, 'selector'),
             (r'@[a-z0-9_-]+', Keyword, 'selector'),
-            (r'=[\w-]+', Name.Function, 'script'),
-            (r'\+[\w-]+', Name.Decorator, 'script'),
-            (r'(![a-z_]\w*)([ \t]*(?:\|\|)?=)',
-             bygroups(Name.Variable, Operator), 'script'),
+            (r'=[\w-]+', Name.Function, 'value'),
+            (r'\+[\w-]+', Name.Decorator, 'value'),
+            (r'([!$][\w-]\w*)([ \t]*(?:(?:\|\|)?=|:))',
+             bygroups(Name.Variable, Operator), 'value'),
             (r':', Name.Attribute, 'old-style-attr'),
-            (r'(?=[^\s:"\[]+\s*[=:]([ \t]|$))', Name.Attribute, 'new-style-attr'),
+            (r'(?=.+?[=:]([^a-z]|$))', Name.Attribute, 'new-style-attr'),
             (r'', Text, 'selector'),
         ],
 
@@ -1360,210 +1540,83 @@
             (r'\n', Text, 'root'),
         ],
 
-        'for': [
-            (r'(from|to|through)', Operator.Word),
-            include('script'),
-        ],
-
         'old-style-attr': [
             (r'[^\s:="\[]+', Name.Attribute),
             (r'#{', String.Interpol, 'interpolation'),
-            (r'[ \t]*=', Operator, 'script'),
+            (r'[ \t]*=', Operator, 'value'),
             (r'', Text, 'value'),
         ],
 
         'new-style-attr': [
             (r'[^\s:="\[]+', Name.Attribute),
             (r'#{', String.Interpol, 'interpolation'),
-            (r'[ \t]*=', Operator, 'script'),
-            (r':', Name.Attribute, 'value'),
-        ],
-
-        'value': [
-            (r'[ \t]+', Text),
-            (r'url\(', String.Other, 'string-url'),
-            (r'(azimuth|background-attachment|background-color|'
-             r'background-image|background-position|background-repeat|'
-             r'background|border-bottom-color|border-bottom-style|'
-             r'border-bottom-width|border-left-color|border-left-style|'
-             r'border-left-width|border-right|border-right-color|'
-             r'border-right-style|border-right-width|border-top-color|'
-             r'border-top-style|border-top-width|border-bottom|'
-             r'border-collapse|border-left|border-width|border-color|'
-             r'border-spacing|border-style|border-top|border|caption-side|'
-             r'clear|clip|color|content|counter-increment|counter-reset|'
-             r'cue-after|cue-before|cue|cursor|direction|display|'
-             r'elevation|empty-cells|float|font-family|font-size|'
-             r'font-size-adjust|font-stretch|font-style|font-variant|'
-             r'font-weight|font|height|letter-spacing|line-height|'
-             r'list-style-type|list-style-image|list-style-position|'
-             r'list-style|margin-bottom|margin-left|margin-right|'
-             r'margin-top|margin|marker-offset|marks|max-height|max-width|'
-             r'min-height|min-width|opacity|orphans|outline|outline-color|'
-             r'outline-style|outline-width|overflow|padding-bottom|'
-             r'padding-left|padding-right|padding-top|padding|page|'
-             r'page-break-after|page-break-before|page-break-inside|'
-             r'pause-after|pause-before|pause|pitch|pitch-range|'
-             r'play-during|position|quotes|richness|right|size|'
-             r'speak-header|speak-numeral|speak-punctuation|speak|'
-             r'speech-rate|stress|table-layout|text-align|text-decoration|'
-             r'text-indent|text-shadow|text-transform|top|unicode-bidi|'
-             r'vertical-align|visibility|voice-family|volume|white-space|'
-             r'widows|width|word-spacing|z-index|bottom|left|'
-             r'above|absolute|always|armenian|aural|auto|avoid|baseline|'
-             r'behind|below|bidi-override|blink|block|bold|bolder|both|'
-             r'capitalize|center-left|center-right|center|circle|'
-             r'cjk-ideographic|close-quote|collapse|condensed|continuous|'
-             r'crop|crosshair|cross|cursive|dashed|decimal-leading-zero|'
-             r'decimal|default|digits|disc|dotted|double|e-resize|embed|'
-             r'extra-condensed|extra-expanded|expanded|fantasy|far-left|'
-             r'far-right|faster|fast|fixed|georgian|groove|hebrew|help|'
-             r'hidden|hide|higher|high|hiragana-iroha|hiragana|icon|'
-             r'inherit|inline-table|inline|inset|inside|invert|italic|'
-             r'justify|katakana-iroha|katakana|landscape|larger|large|'
-             r'left-side|leftwards|level|lighter|line-through|list-item|'
-             r'loud|lower-alpha|lower-greek|lower-roman|lowercase|ltr|'
-             r'lower|low|medium|message-box|middle|mix|monospace|'
-             r'n-resize|narrower|ne-resize|no-close-quote|no-open-quote|'
-             r'no-repeat|none|normal|nowrap|nw-resize|oblique|once|'
-             r'open-quote|outset|outside|overline|pointer|portrait|px|'
-             r'relative|repeat-x|repeat-y|repeat|rgb|ridge|right-side|'
-             r'rightwards|s-resize|sans-serif|scroll|se-resize|'
-             r'semi-condensed|semi-expanded|separate|serif|show|silent|'
-             r'slow|slower|small-caps|small-caption|smaller|soft|solid|'
-             r'spell-out|square|static|status-bar|super|sw-resize|'
-             r'table-caption|table-cell|table-column|table-column-group|'
-             r'table-footer-group|table-header-group|table-row|'
-             r'table-row-group|text|text-bottom|text-top|thick|thin|'
-             r'transparent|ultra-condensed|ultra-expanded|underline|'
-             r'upper-alpha|upper-latin|upper-roman|uppercase|url|'
-             r'visible|w-resize|wait|wider|x-fast|x-high|x-large|x-loud|'
-             r'x-low|x-small|x-soft|xx-large|xx-small|yes)\b', Name.Constant),
-            (r'(indigo|gold|firebrick|indianred|yellow|darkolivegreen|'
-             r'darkseagreen|mediumvioletred|mediumorchid|chartreuse|'
-             r'mediumslateblue|black|springgreen|crimson|lightsalmon|brown|'
-             r'turquoise|olivedrab|cyan|silver|skyblue|gray|darkturquoise|'
-             r'goldenrod|darkgreen|darkviolet|darkgray|lightpink|teal|'
-             r'darkmagenta|lightgoldenrodyellow|lavender|yellowgreen|thistle|'
-             r'violet|navy|orchid|blue|ghostwhite|honeydew|cornflowerblue|'
-             r'darkblue|darkkhaki|mediumpurple|cornsilk|red|bisque|slategray|'
-             r'darkcyan|khaki|wheat|deepskyblue|darkred|steelblue|aliceblue|'
-             r'gainsboro|mediumturquoise|floralwhite|coral|purple|lightgrey|'
-             r'lightcyan|darksalmon|beige|azure|lightsteelblue|oldlace|'
-             r'greenyellow|royalblue|lightseagreen|mistyrose|sienna|'
-             r'lightcoral|orangered|navajowhite|lime|palegreen|burlywood|'
-             r'seashell|mediumspringgreen|fuchsia|papayawhip|blanchedalmond|'
-             r'peru|aquamarine|white|darkslategray|ivory|dodgerblue|'
-             r'lemonchiffon|chocolate|orange|forestgreen|slateblue|olive|'
-             r'mintcream|antiquewhite|darkorange|cadetblue|moccasin|'
-             r'limegreen|saddlebrown|darkslateblue|lightskyblue|deeppink|'
-             r'plum|aqua|darkgoldenrod|maroon|sandybrown|magenta|tan|'
-             r'rosybrown|pink|lightblue|palevioletred|mediumseagreen|'
-             r'dimgray|powderblue|seagreen|snow|mediumblue|midnightblue|'
-             r'paleturquoise|palegoldenrod|whitesmoke|darkorchid|salmon|'
-             r'lightslategray|lawngreen|lightgreen|tomato|hotpink|'
-             r'lightyellow|lavenderblush|linen|mediumaquamarine|green|'
-             r'blueviolet|peachpuff)\b', Name.Entity),
-            (r'\!important', Name.Exception),
-            (r'/\*', Comment, 'inline-comment'),
-            (r'\#[a-z0-9]{1,6}', Number.Hex),
-            (r'(-?\d+)(\%|[a-z]+)?', bygroups(Number.Integer, Keyword.Type)),
-            (r'(-?\d*\.\d+)(\%|[a-z]+)?', bygroups(Number.Float, Keyword.Type)),
-            (r'#{', String.Interpol, 'interpolation'),
-            (r'[~\^\*!&%<>\|+=@:,./?-]+', Operator),
-            (r'[\[\]();]+', Punctuation),
-            (r'"', String.Double, 'string-double'),
-            (r"'", String.Single, 'string-single'),
-            (r'[a-z][\w-]*', Name),
-            (r'\n', Text, 'root'),
-        ],
-
-        'script': [
-            (r'[ \t]+', Text),
-            (r'![\w_]+', Name.Variable),
-            (r'[+\-*/%=(),!><]', Operator),
-            (r'"', String.Double, 'string-double'),
-            (r"'", String.Single, 'string-single'),
-            (r'\#[a-z0-9]{1,6}', Number.Hex),
-            (r'(-?\d+)(\%|[a-z]+)?', bygroups(Number.Integer, Keyword.Type)),
-            (r'(-?\d*\.\d+)(\%|[a-z]+)?', bygroups(Number.Float, Keyword.Type)),
-            (r'(black|silver|gray|white|maroon|red|purple|fuchsia|green|'
-             r'lime|olive|yellow|navy|blue|teal|aqua)\b', Name.Builtin),
-            (r'(true|false)', Name.Pseudo),
-            (r'(and|or|not)', Operator.Word),
-            (r'(\\.|[^\s\\+*\/%(),=!])+(?=[ \t]*\()', Name.Function),
-            (r'(\\.|[^\s\\+*\/%(),=!])+', Name),
-            (r'\n', Text, 'root'),
-        ],
-
-        'interpolation': [
-            (r'\}', String.Interpol, '#pop'),
-            include('script'),
-        ],
-
-        'selector': [
-            (r'[ \t]+', Text),
-            (r'\:', Name.Decorator, 'pseudo-class'),
-            (r'\.', Name.Class, 'class'),
-            (r'\#', Name.Namespace, 'id'),
-            (r'[a-zA-Z0-9_-]+', Name.Tag),
-            (r'#\{', String.Interpol, 'interpolation'),
-            (r'&', Keyword),
-            (r'[~\^\*!&\[\]\(\)<>\|+=@:;,./?-]', Operator),
-            (r'"', String.Double, 'string-double'),
-            (r"'", String.Single, 'string-single'),
-            (r'\n', Text, 'root'),
-        ],
-
-        'string-double': [
-            (r'(\\.|#(?=[^\n{])|[^\n"#])+', String.Double),
-            (r'#\{', String.Interpol, 'interpolation'),
-            (r'"', String.Double, '#pop'),
-        ],
-
-        'string-single': [
-            (r"(\\.|#(?=[^\n{])|[^\n'#])+", String.Double),
-            (r'#\{', String.Interpol, 'interpolation'),
-            (r"'", String.Double, '#pop'),
-        ],
-
-        'string-url': [
-            (r'(\\#|#(?=[^\n{])|[^\n#)])+', String.Other),
-            (r'#\{', String.Interpol, 'interpolation'),
-            (r'\)', String.Other, '#pop'),
+            (r'[ \t]*[=:]', Operator, 'value'),
         ],
 
         'inline-comment': [
-            (r"(\\#|#(?=[^\n{])|\*(?=[^\n/])|[^\n#*])+", Comment),
+            (r"(\\#|#(?=[^\n{])|\*(?=[^\n/])|[^\n#*])+", Comment.Multiline),
             (r'#\{', String.Interpol, 'interpolation'),
             (r"\*/", Comment, '#pop'),
         ],
+    }
+    for group, common in common_sass_tokens.iteritems():
+        tokens[group] = copy.copy(common)
+    tokens['value'].append((r'\n', Text, 'root'))
+    tokens['selector'].append((r'\n', Text, 'root'))
 
-        'pseudo-class': [
-            (r'[\w-]+', Name.Decorator),
-            (r'#\{', String.Interpol, 'interpolation'),
-            (r'', Text, '#pop'),
+
+class ScssLexer(RegexLexer):
+    """
+    For SCSS stylesheets.
+    """
+
+    name = 'SCSS'
+    aliases = ['scss']
+    filenames = ['*.scss']
+    mimetypes = ['text/x-scss']
+
+    flags = re.IGNORECASE | re.DOTALL
+    tokens = {
+        'root': [
+            (r'\s+', Text),
+            (r'//.*?\n', Comment.Single),
+            (r'/\*.*?\*/', Comment.Multiline),
+            (r'@import', Keyword, 'value'),
+            (r'@for', Keyword, 'for'),
+            (r'@(debug|warn|if|while)', Keyword, 'value'),
+            (r'(@mixin)( [\w-]+)', bygroups(Keyword, Name.Function), 'value'),
+            (r'(@include)( [\w-]+)', bygroups(Keyword, Name.Decorator), 'value'),
+            (r'@extend', Keyword, 'selector'),
+            (r'@[a-z0-9_-]+', Keyword, 'selector'),
+            (r'(\$[\w-]\w*)([ \t]*:)', bygroups(Name.Variable, Operator), 'value'),
+            (r'(?=[^;{}][;}])', Name.Attribute, 'attr'),
+            (r'(?=[^;{}:]+:[^a-z])', Name.Attribute, 'attr'),
+            (r'', Text, 'selector'),
         ],
 
-        'class': [
-            (r'[\w-]+', Name.Class),
-            (r'#\{', String.Interpol, 'interpolation'),
-            (r'', Text, '#pop'),
+        'attr': [
+            (r'[^\s:="\[]+', Name.Attribute),
+            (r'#{', String.Interpol, 'interpolation'),
+            (r'[ \t]*:', Operator, 'value'),
         ],
 
-        'id': [
-            (r'[\w-]+', Name.Namespace),
+        'inline-comment': [
+            (r"(\\#|#(?=[^{])|\*(?=[^/])|[^#*])+", Comment.Multiline),
             (r'#\{', String.Interpol, 'interpolation'),
-            (r'', Text, '#pop'),
+            (r"\*/", Comment, '#pop'),
         ],
     }
+    for group, common in common_sass_tokens.iteritems():
+        tokens[group] = copy.copy(common)
+    tokens['value'].extend([(r'\n', Text), (r'[;{}]', Punctuation, 'root')])
+    tokens['selector'].extend([(r'\n', Text), (r'[;{}]', Punctuation, 'root')])
 
 
 class CoffeeScriptLexer(RegexLexer):
     """
     For `CoffeeScript`_ source code.
 
-    .. _CoffeeScript: http://jashkenas.github.com/coffee-script/
+    .. _CoffeeScript: http://coffeescript.org
 
     *New in Pygments 1.3.*
     """
@@ -1592,24 +1645,27 @@
         'root': [
             (r'^(?=\s|/|<!--)', Text, 'slashstartsregex'),
             include('commentsandwhitespace'),
-            (r'\+\+|--|~|&&|\band\b|\bor\b|\bis\b|\bisnt\b|\bnot\b|\?|:|'
+            (r'\+\+|--|~|&&|\band\b|\bor\b|\bis\b|\bisnt\b|\bnot\b|\?|:|=|'
              r'\|\||\\(?=\n)|(<<|>>>?|==?|!=?|[-<>+*`%&\|\^/])=?',
              Operator, 'slashstartsregex'),
+            (r'\([^()]*\)\s*->', Name.Function),
             (r'[{(\[;,]', Punctuation, 'slashstartsregex'),
             (r'[})\].]', Punctuation),
             (r'(for|in|of|while|break|return|continue|switch|when|then|if|else|'
              r'throw|try|catch|finally|new|delete|typeof|instanceof|super|'
-             r'extends|this)\b', Keyword, 'slashstartsregex'),
+             r'extends|this|class|by)\b', Keyword, 'slashstartsregex'),
             (r'(true|false|yes|no|on|off|null|NaN|Infinity|undefined)\b',
              Keyword.Constant),
             (r'(Array|Boolean|Date|Error|Function|Math|netscape|'
              r'Number|Object|Packages|RegExp|String|sun|decodeURI|'
              r'decodeURIComponent|encodeURI|encodeURIComponent|'
-             r'Error|eval|isFinite|isNaN|parseFloat|parseInt|document|this|'
-             r'window)\b', Name.Builtin),
-            (r'[$a-zA-Z_][a-zA-Z0-9_\.:]*:\s', Name.Variable,
-             'slashstartsregex'),
-            (r'[$a-zA-Z_][a-zA-Z0-9_]*', Name.Other),
+             r'eval|isFinite|isNaN|parseFloat|parseInt|document|window)\b',
+             Name.Builtin),
+            (r'[$a-zA-Z_][a-zA-Z0-9_\.:]*\s*[:=]\s', Name.Variable,
+              'slashstartsregex'),
+            (r'@[$a-zA-Z_][a-zA-Z0-9_\.:]*\s*[:=]\s', Name.Variable.Instance,
+              'slashstartsregex'),
+            (r'@?[$a-zA-Z_][a-zA-Z0-9_]*', Name.Other, 'slashstartsregex'),
             (r'[0-9][0-9]*\.[0-9]+([eE][0-9]+)?[fd]?', Number.Float),
             (r'0x[0-9a-fA-F]+', Number.Hex),
             (r'[0-9]+', Number.Integer),
@@ -1617,3 +1673,900 @@
             (r"'(\\\\|\\'|[^'])*'", String.Single),
         ]
     }
+
+class DuelLexer(RegexLexer):
+    """
+    Lexer for Duel Views Engine (formerly JBST) markup with JavaScript code blocks.
+    See http://duelengine.org/.
+    See http://jsonml.org/jbst/.
+
+    *New in Pygments 1.4.*
+    """
+
+    name = 'Duel'
+    aliases = ['duel', 'Duel Engine', 'Duel View', 'JBST', 'jbst', 'JsonML+BST']
+    filenames = ['*.duel','*.jbst']
+    mimetypes = ['text/x-duel','text/x-jbst']
+
+    flags = re.DOTALL
+
+    tokens = {
+        'root': [
+            (r'(<%[@=#!:]?)(.*?)(%>)',
+             bygroups(Name.Tag, using(JavascriptLexer), Name.Tag)),
+            (r'(<%\$)(.*?)(:)(.*?)(%>)',
+             bygroups(Name.Tag, Name.Function, Punctuation, String, Name.Tag)),
+            (r'(<%--)(.*?)(--%>)',
+             bygroups(Name.Tag, Comment.Multiline, Name.Tag)),
+            (r'(<script.*?>)(.*?)(</script>)',
+             bygroups(using(HtmlLexer),
+                      using(JavascriptLexer), using(HtmlLexer))),
+            (r'(.+?)(?=<)', using(HtmlLexer)),
+            (r'.+', using(HtmlLexer)),
+        ],
+    }
+
+
+class ScamlLexer(ExtendedRegexLexer):
+    """
+    For `Scaml markup <http://scalate.fusesource.org/>`_.  Scaml is Haml for Scala.
+
+    *New in Pygments 1.4.*
+    """
+
+    name = 'Scaml'
+    aliases = ['scaml', 'SCAML']
+    filenames = ['*.scaml']
+    mimetypes = ['text/x-scaml']
+
+    flags = re.IGNORECASE
+    # Scaml does not yet support the " |\n" notation to
+    # wrap long lines.  Once it does, use the custom faux
+    # dot instead.
+    # _dot = r'(?: \|\n(?=.* \|)|.)'
+    _dot = r'.'
+
+    tokens = {
+        'root': [
+            (r'[ \t]*\n', Text),
+            (r'[ \t]*', _indentation),
+        ],
+
+        'css': [
+            (r'\.[a-z0-9_:-]+', Name.Class, 'tag'),
+            (r'\#[a-z0-9_:-]+', Name.Function, 'tag'),
+        ],
+
+        'eval-or-plain': [
+            (r'[&!]?==', Punctuation, 'plain'),
+            (r'([&!]?[=~])(' + _dot + '*\n)',
+             bygroups(Punctuation, using(ScalaLexer)),
+             'root'),
+            (r'', Text, 'plain'),
+        ],
+
+        'content': [
+            include('css'),
+            (r'%[a-z0-9_:-]+', Name.Tag, 'tag'),
+            (r'!!!' + _dot + '*\n', Name.Namespace, '#pop'),
+            (r'(/)(\[' + _dot + '*?\])(' + _dot + '*\n)',
+             bygroups(Comment, Comment.Special, Comment),
+             '#pop'),
+            (r'/' + _dot + '*\n', _starts_block(Comment, 'html-comment-block'),
+             '#pop'),
+            (r'-#' + _dot + '*\n', _starts_block(Comment.Preproc,
+                                                 'scaml-comment-block'), '#pop'),
+            (r'(-@\s*)(import)?(' + _dot + '*\n)',
+             bygroups(Punctuation, Keyword, using(ScalaLexer)),
+             '#pop'),
+            (r'(-)(' + _dot + '*\n)',
+             bygroups(Punctuation, using(ScalaLexer)),
+             '#pop'),
+            (r':' + _dot + '*\n', _starts_block(Name.Decorator, 'filter-block'),
+             '#pop'),
+            include('eval-or-plain'),
+        ],
+
+        'tag': [
+            include('css'),
+            (r'\{(,\n|' + _dot + ')*?\}', using(ScalaLexer)),
+            (r'\[' + _dot + '*?\]', using(ScalaLexer)),
+            (r'\(', Text, 'html-attributes'),
+            (r'/[ \t]*\n', Punctuation, '#pop:2'),
+            (r'[<>]{1,2}(?=[ \t=])', Punctuation),
+            include('eval-or-plain'),
+        ],
+
+        'plain': [
+            (r'([^#\n]|#[^{\n]|(\\\\)*\\#\{)+', Text),
+            (r'(#\{)(' + _dot + '*?)(\})',
+             bygroups(String.Interpol, using(ScalaLexer), String.Interpol)),
+            (r'\n', Text, 'root'),
+        ],
+
+        'html-attributes': [
+            (r'\s+', Text),
+            (r'[a-z0-9_:-]+[ \t]*=', Name.Attribute, 'html-attribute-value'),
+            (r'[a-z0-9_:-]+', Name.Attribute),
+            (r'\)', Text, '#pop'),
+        ],
+
+        'html-attribute-value': [
+            (r'[ \t]+', Text),
+            (r'[a-z0-9_]+', Name.Variable, '#pop'),
+            (r'@[a-z0-9_]+', Name.Variable.Instance, '#pop'),
+            (r'\$[a-z0-9_]+', Name.Variable.Global, '#pop'),
+            (r"'(\\\\|\\'|[^'\n])*'", String, '#pop'),
+            (r'"(\\\\|\\"|[^"\n])*"', String, '#pop'),
+        ],
+
+        'html-comment-block': [
+            (_dot + '+', Comment),
+            (r'\n', Text, 'root'),
+        ],
+
+        'scaml-comment-block': [
+            (_dot + '+', Comment.Preproc),
+            (r'\n', Text, 'root'),
+        ],
+
+        'filter-block': [
+            (r'([^#\n]|#[^{\n]|(\\\\)*\\#\{)+', Name.Decorator),
+            (r'(#\{)(' + _dot + '*?)(\})',
+             bygroups(String.Interpol, using(ScalaLexer), String.Interpol)),
+            (r'\n', Text, 'root'),
+        ],
+    }
+
+
+class JadeLexer(ExtendedRegexLexer):
+    """
+    For Jade markup.
+    Jade is a variant of Scaml, see:
+    http://scalate.fusesource.org/documentation/scaml-reference.html
+
+    *New in Pygments 1.4.*
+    """
+
+    name = 'Jade'
+    aliases = ['jade', 'JADE']
+    filenames = ['*.jade']
+    mimetypes = ['text/x-jade']
+
+    flags = re.IGNORECASE
+    _dot = r'.'
+
+    tokens = {
+        'root': [
+            (r'[ \t]*\n', Text),
+            (r'[ \t]*', _indentation),
+        ],
+
+        'css': [
+            (r'\.[a-z0-9_:-]+', Name.Class, 'tag'),
+            (r'\#[a-z0-9_:-]+', Name.Function, 'tag'),
+        ],
+
+        'eval-or-plain': [
+            (r'[&!]?==', Punctuation, 'plain'),
+            (r'([&!]?[=~])(' + _dot + '*\n)',
+             bygroups(Punctuation, using(ScalaLexer)),  'root'),
+            (r'', Text, 'plain'),
+        ],
+
+        'content': [
+            include('css'),
+            (r'!!!' + _dot + '*\n', Name.Namespace, '#pop'),
+            (r'(/)(\[' + _dot + '*?\])(' + _dot + '*\n)',
+             bygroups(Comment, Comment.Special, Comment),
+             '#pop'),
+            (r'/' + _dot + '*\n', _starts_block(Comment, 'html-comment-block'),
+             '#pop'),
+            (r'-#' + _dot + '*\n', _starts_block(Comment.Preproc,
+                                                 'scaml-comment-block'), '#pop'),
+            (r'(-@\s*)(import)?(' + _dot + '*\n)',
+             bygroups(Punctuation, Keyword, using(ScalaLexer)),
+             '#pop'),
+            (r'(-)(' + _dot + '*\n)',
+             bygroups(Punctuation, using(ScalaLexer)),
+             '#pop'),
+            (r':' + _dot + '*\n', _starts_block(Name.Decorator, 'filter-block'),
+             '#pop'),
+            (r'[a-z0-9_:-]+', Name.Tag, 'tag'),
+            (r'|', Text, 'eval-or-plain'),
+        ],
+
+        'tag': [
+            include('css'),
+            (r'\{(,\n|' + _dot + ')*?\}', using(ScalaLexer)),
+            (r'\[' + _dot + '*?\]', using(ScalaLexer)),
+            (r'\(', Text, 'html-attributes'),
+            (r'/[ \t]*\n', Punctuation, '#pop:2'),
+            (r'[<>]{1,2}(?=[ \t=])', Punctuation),
+            include('eval-or-plain'),
+        ],
+
+        'plain': [
+            (r'([^#\n]|#[^{\n]|(\\\\)*\\#\{)+', Text),
+            (r'(#\{)(' + _dot + '*?)(\})',
+             bygroups(String.Interpol, using(ScalaLexer), String.Interpol)),
+            (r'\n', Text, 'root'),
+        ],
+
+        'html-attributes': [
+            (r'\s+', Text),
+            (r'[a-z0-9_:-]+[ \t]*=', Name.Attribute, 'html-attribute-value'),
+            (r'[a-z0-9_:-]+', Name.Attribute),
+            (r'\)', Text, '#pop'),
+        ],
+
+        'html-attribute-value': [
+            (r'[ \t]+', Text),
+            (r'[a-z0-9_]+', Name.Variable, '#pop'),
+            (r'@[a-z0-9_]+', Name.Variable.Instance, '#pop'),
+            (r'\$[a-z0-9_]+', Name.Variable.Global, '#pop'),
+            (r"'(\\\\|\\'|[^'\n])*'", String, '#pop'),
+            (r'"(\\\\|\\"|[^"\n])*"', String, '#pop'),
+        ],
+
+        'html-comment-block': [
+            (_dot + '+', Comment),
+            (r'\n', Text, 'root'),
+        ],
+
+        'scaml-comment-block': [
+            (_dot + '+', Comment.Preproc),
+            (r'\n', Text, 'root'),
+        ],
+
+        'filter-block': [
+            (r'([^#\n]|#[^{\n]|(\\\\)*\\#\{)+', Name.Decorator),
+            (r'(#\{)(' + _dot + '*?)(\})',
+             bygroups(String.Interpol, using(ScalaLexer), String.Interpol)),
+            (r'\n', Text, 'root'),
+        ],
+    }
+
+
+class XQueryLexer(ExtendedRegexLexer):
+    """
+    An XQuery lexer, parsing a stream and outputting the tokens needed to
+    highlight xquery code.
+
+    *New in Pygments 1.4.*
+    """
+    name = 'XQuery'
+    aliases = ['xquery', 'xqy']
+    filenames = ['*.xqy', '*.xquery']
+    mimetypes = ['text/xquery', 'application/xquery']
+
+    xquery_parse_state = []
+
+    # FIX UNICODE LATER
+    #ncnamestartchar = (
+    #    ur"[A-Z]|_|[a-z]|[\u00C0-\u00D6]|[\u00D8-\u00F6]|[\u00F8-\u02FF]|"
+    #    ur"[\u0370-\u037D]|[\u037F-\u1FFF]|[\u200C-\u200D]|[\u2070-\u218F]|"
+    #    ur"[\u2C00-\u2FEF]|[\u3001-\uD7FF]|[\uF900-\uFDCF]|[\uFDF0-\uFFFD]|"
+    #    ur"[\u10000-\uEFFFF]"
+    #)
+    ncnamestartchar = r"[A-Z]|_|[a-z]"
+    # FIX UNICODE LATER
+    #ncnamechar = ncnamestartchar + (ur"|-|\.|[0-9]|\u00B7|[\u0300-\u036F]|"
+    #                                ur"[\u203F-\u2040]")
+    ncnamechar = ncnamestartchar + r"|-|\.|[0-9]"
+    ncname = "((%s)+(%s)*)" % (ncnamestartchar, ncnamechar)
+    pitarget_namestartchar = r"[A-KN-WY-Z]|_|:|[a-kn-wy-z]"
+    pitarget_namechar = pitarget_namestartchar + r"|-|\.|[0-9]"
+    pitarget = "(%s)+(%s)*" % (pitarget_namestartchar, pitarget_namechar)
+    prefixedname = "%s:%s" % (ncname, ncname)
+    unprefixedname = ncname
+    qname = "((%s)|(%s))" %(prefixedname, unprefixedname)
+
+    entityref = r'&(lt|gt|amp|quot|apos|nbsp);'
+    charref = r'&#[0-9]+;|&#x[0-9a-fA-F]+;'
+
+    stringdouble = r'("((' + entityref + r')|(' + charref + r')|("")|([^&"]))*")'
+    stringsingle = r"('((" + entityref + r")|(" + charref + r")|('')|([^&']))*')"
+
+    # FIX UNICODE LATER
+    #elementcontentchar = (ur'\t|\r|\n|[\u0020-\u0025]|[\u0028-\u003b]|'
+    #                      ur'[\u003d-\u007a]|\u007c|[\u007e-\u007F]')
+    elementcontentchar = r'[A-Za-z]|\s|\d|[!"#$%\(\)\*\+,\-\./\:;=\?\@\[\\\]^_\'`\|~]'
+    #quotattrcontentchar = (ur'\t|\r|\n|[\u0020-\u0021]|[\u0023-\u0025]|'
+    #                       ur'[\u0027-\u003b]|[\u003d-\u007a]|\u007c|[\u007e-\u007F]')
+    quotattrcontentchar = r'[A-Za-z]|\s|\d|[!#$%\(\)\*\+,\-\./\:;=\?\@\[\\\]^_\'`\|~]'
+    #aposattrcontentchar = (ur'\t|\r|\n|[\u0020-\u0025]|[\u0028-\u003b]|'
+    #                       ur'[\u003d-\u007a]|\u007c|[\u007e-\u007F]')
+    aposattrcontentchar = r'[A-Za-z]|\s|\d|[!"#$%\(\)\*\+,\-\./\:;=\?\@\[\\\]^_`\|~]'
+
+
+    # CHAR elements - fix the above elementcontentchar, quotattrcontentchar,
+    #                 aposattrcontentchar
+    #x9 | #xA | #xD | [#x20-#xD7FF] | [#xE000-#xFFFD] | [#x10000-#x10FFFF]
+
+    flags = re.DOTALL | re.MULTILINE | re.UNICODE
+
+    def operator_root_callback(lexer, match, ctx):
+        yield match.start(), Operator, match.group(1)
+        # transition to root always - don't pop off stack
+        ctx.stack = ['root']
+        ctx.pos = match.end()
+
+    def popstate_tag_callback(lexer, match, ctx):
+        yield match.start(), Name.Tag, match.group(1)
+        ctx.stack.append(lexer.xquery_parse_state.pop())
+        ctx.pos = match.end()
+
+    def popstate_xmlcomment_callback(lexer, match, ctx):
+        yield match.start(), String.Doc, match.group(1)
+        ctx.stack.append(lexer.xquery_parse_state.pop())
+        ctx.pos = match.end()
+
+    def popstate_kindtest_callback(lexer, match, ctx):
+        yield match.start(), Punctuation, match.group(1)
+        next_state = lexer.xquery_parse_state.pop()
+        if next_state == 'occurrenceindicator':
+            if re.match("[?*+]+", match.group(2)):
+                yield match.start(), Punctuation, match.group(2)
+                ctx.stack.append('operator')
+                ctx.pos = match.end()
+            else:
+                ctx.stack.append('operator')
+                ctx.pos = match.end(1)
+        else:
+            ctx.stack.append(next_state)
+            ctx.pos = match.end(1)
+
+    def popstate_callback(lexer, match, ctx):
+        yield match.start(), Punctuation, match.group(1)
+        # if we have run out of our state stack, pop whatever is on the pygments
+        # state stack
+        if len(lexer.xquery_parse_state) == 0:
+            ctx.stack.pop()
+        elif len(ctx.stack) > 1:
+            ctx.stack.append(lexer.xquery_parse_state.pop())
+        else:
+            # i don't know if i'll need this, but in case, default back to root
+            ctx.stack = ['root']
+        ctx.pos = match.end()
+
+    def pushstate_element_content_starttag_callback(lexer, match, ctx):
+        yield match.start(), Name.Tag, match.group(1)
+        lexer.xquery_parse_state.append('element_content')
+        ctx.stack.append('start_tag')
+        ctx.pos = match.end()
+
+    def pushstate_cdata_section_callback(lexer, match, ctx):
+        yield match.start(), String.Doc, match.group(1)
+        ctx.stack.append('cdata_section')
+        lexer.xquery_parse_state.append(ctx.state.pop)
+        ctx.pos = match.end()
+
+    def pushstate_starttag_callback(lexer, match, ctx):
+        yield match.start(), Name.Tag, match.group(1)
+        lexer.xquery_parse_state.append(ctx.state.pop)
+        ctx.stack.append('start_tag')
+        ctx.pos = match.end()
+
+    def pushstate_operator_order_callback(lexer, match, ctx):
+        yield match.start(), Keyword, match.group(1)
+        yield match.start(), Text, match.group(2)
+        yield match.start(), Punctuation, match.group(3)
+        ctx.stack = ['root']
+        lexer.xquery_parse_state.append('operator')
+        ctx.pos = match.end()
+
+    def pushstate_operator_root_validate(lexer, match, ctx):
+        yield match.start(), Keyword, match.group(1)
+        yield match.start(), Text, match.group(2)
+        yield match.start(), Punctuation, match.group(3)
+        ctx.stack = ['root']
+        lexer.xquery_parse_state.append('operator')
+        ctx.pos = match.end()
+
+    def pushstate_operator_root_validate_withmode(lexer, match, ctx):
+        yield match.start(), Keyword, match.group(1)
+        yield match.start(), Text, match.group(2)
+        yield match.start(), Keyword, match.group(3)
+        ctx.stack = ['root']
+        lexer.xquery_parse_state.append('operator')
+        ctx.pos = match.end()
+
+    def pushstate_operator_processing_instruction_callback(lexer, match, ctx):
+        yield match.start(), String.Doc, match.group(1)
+        ctx.stack.append('processing_instruction')
+        lexer.xquery_parse_state.append('operator')
+        ctx.pos = match.end()
+
+    def pushstate_element_content_processing_instruction_callback(lexer, match, ctx):
+        yield match.start(), String.Doc, match.group(1)
+        ctx.stack.append('processing_instruction')
+        lexer.xquery_parse_state.append('element_content')
+        ctx.pos = match.end()
+
+    def pushstate_element_content_cdata_section_callback(lexer, match, ctx):
+        yield match.start(), String.Doc, match.group(1)
+        ctx.stack.append('cdata_section')
+        lexer.xquery_parse_state.append('element_content')
+        ctx.pos = match.end()
+
+    def pushstate_operator_cdata_section_callback(lexer, match, ctx):
+        yield match.start(), String.Doc, match.group(1)
+        ctx.stack.append('cdata_section')
+        lexer.xquery_parse_state.append('operator')
+        ctx.pos = match.end()
+
+    def pushstate_element_content_xmlcomment_callback(lexer, match, ctx):
+        yield match.start(), String.Doc, match.group(1)
+        ctx.stack.append('xml_comment')
+        lexer.xquery_parse_state.append('element_content')
+        ctx.pos = match.end()
+
+    def pushstate_operator_xmlcomment_callback(lexer, match, ctx):
+        yield match.start(), String.Doc, match.group(1)
+        ctx.stack.append('xml_comment')
+        lexer.xquery_parse_state.append('operator')
+        ctx.pos = match.end()
+
+    def pushstate_kindtest_callback(lexer, match, ctx):
+        yield match.start(), Keyword, match.group(1)
+        yield match.start(), Text, match.group(2)
+        yield match.start(), Punctuation, match.group(3)
+        lexer.xquery_parse_state.append('kindtest')
+        ctx.stack.append('kindtest')
+        ctx.pos = match.end()
+
+    def pushstate_operator_kindtestforpi_callback(lexer, match, ctx):
+        yield match.start(), Keyword, match.group(1)
+        yield match.start(), Text, match.group(2)
+        yield match.start(), Punctuation, match.group(3)
+        lexer.xquery_parse_state.append('operator')
+        ctx.stack.append('kindtestforpi')
+        ctx.pos = match.end()
+
+    def pushstate_operator_kindtest_callback(lexer, match, ctx):
+        yield match.start(), Keyword, match.group(1)
+        yield match.start(), Text, match.group(2)
+        yield match.start(), Punctuation, match.group(3)
+        lexer.xquery_parse_state.append('operator')
+        ctx.stack.append('kindtest')
+        ctx.pos = match.end()
+
+    def pushstate_occurrenceindicator_kindtest_callback(lexer, match, ctx):
+        yield match.start(), Name.Tag, match.group(1)
+        yield match.start(), Text, match.group(2)
+        yield match.start(), Punctuation, match.group(3)
+        lexer.xquery_parse_state.append('occurrenceindicator')
+        ctx.stack.append('kindtest')
+        ctx.pos = match.end()
+
+    def pushstate_operator_starttag_callback(lexer, match, ctx):
+        yield match.start(), Name.Tag, match.group(1)
+        lexer.xquery_parse_state.append('operator')
+        ctx.stack.append('start_tag')
+        ctx.pos = match.end()
+
+    def pushstate_operator_root_callback(lexer, match, ctx):
+        yield match.start(), Punctuation, match.group(1)
+        lexer.xquery_parse_state.append('operator')
+        ctx.stack = ['root']#.append('root')
+        ctx.pos = match.end()
+
+    def pushstate_operator_root_construct_callback(lexer, match, ctx):
+        yield match.start(), Keyword, match.group(1)
+        yield match.start(), Text, match.group(2)
+        yield match.start(), Punctuation, match.group(3)
+        lexer.xquery_parse_state.append('operator')
+        ctx.stack = ['root']
+        ctx.pos = match.end()
+
+    def pushstate_root_callback(lexer, match, ctx):
+        yield match.start(), Punctuation, match.group(1)
+        cur_state = ctx.stack.pop()
+        lexer.xquery_parse_state.append(cur_state)
+        ctx.stack = ['root']#.append('root')
+        ctx.pos = match.end()
+
+    def pushstate_operator_callback(lexer, match, ctx):
+        yield match.start(), Keyword, match.group(1)
+        yield match.start(), Text, match.group(2)
+        yield match.start(), Punctuation, match.group(3)
+        lexer.xquery_parse_state.append('operator')
+        ctx.pos = match.end()
+
+    tokens = {
+        'comment': [
+            # xquery comments
+            (r'(:\))', Comment, '#pop'),
+            (r'(\(:)', Comment, '#push'),
+            (r'[^:)]', Comment),
+            (r'([^:)]|:|\))', Comment),
+        ],
+        'whitespace': [
+            (r'\s+', Text),
+        ],
+        'operator': [
+            include('whitespace'),
+            (r'(\})', popstate_callback),
+            (r'\(:', Comment, 'comment'),
+
+            (r'(\{)', pushstate_root_callback),
+            (r'then|else|external|at|div|except', Keyword, 'root'),
+            (r'is|mod|order\s+by|stable\s+order\s+by', Keyword, 'root'),
+            (r'and|or', Operator.Word, 'root'),
+            (r'(eq|ge|gt|le|lt|ne|idiv|intersect|in)(?=\b)',
+             Operator.Word, 'root'),
+            (r'return|satisfies|to|union|where|preserve\s+strip',
+             Keyword, 'root'),
+            (r'(::|;|>=|>>|>|\[|<=|<<|<|-|\*|!=|\+|//|/|\||:=|,|=)',
+             operator_root_callback),
+            (r'(castable|cast)(\s+)(as)',
+             bygroups(Keyword, Text, Keyword), 'singletype'),
+            (r'(instance)(\s+)(of)|(treat)(\s+)(as)',
+             bygroups(Keyword, Text, Keyword), 'itemtype'),
+            (r'(case)|(as)', Keyword, 'itemtype'),
+            (r'(\))(\s*)(as)',
+             bygroups(Punctuation, Text, Keyword), 'itemtype'),
+            (r'\$', Name.Variable, 'varname'),
+            (r'(for|let)(\s+)(\$)',
+             bygroups(Keyword, Text, Name.Variable), 'varname'),
+            #(r'\)|\?|\]', Punctuation, '#push'),
+            (r'\)|\?|\]', Punctuation),
+            (r'(empty)(\s+)(greatest|least)', bygroups(Keyword, Text, Keyword)),
+            (r'ascending|descending|default', Keyword, '#push'),
+            (r'external', Keyword),
+            (r'collation', Keyword, 'uritooperator'),
+            # finally catch all string literals and stay in operator state
+            (stringdouble, String.Double),
+            (stringsingle, String.Single),
+
+            (r'(catch)(\s*)', bygroups(Keyword, Text), 'root'),
+        ],
+        'uritooperator': [
+            (stringdouble, String.Double, '#pop'),
+            (stringsingle, String.Single, '#pop'),
+        ],
+        'namespacedecl': [
+            include('whitespace'),
+            (r'\(:', Comment, 'comment'),
+            (r'(at)(\s+)'+stringdouble, bygroups(Keyword, Text, String.Double)),
+            (r"(at)(\s+)"+stringsingle, bygroups(Keyword, Text, String.Single)),
+            (stringdouble, String.Double),
+            (stringsingle, String.Single),
+            (r',', Punctuation),
+            (r'=', Operator),
+            (r';', Punctuation, 'root'),
+            (ncname, Name.Namespace),
+        ],
+        'namespacekeyword': [
+            include('whitespace'),
+            (r'\(:', Comment, 'comment'),
+            (stringdouble, String.Double, 'namespacedecl'),
+            (stringsingle, String.Single, 'namespacedecl'),
+            (r'inherit|no-inherit', Keyword, 'root'),
+            (r'namespace', Keyword, 'namespacedecl'),
+            (r'(default)(\s+)(element)', bygroups(Keyword, Text, Keyword)),
+            (r'preserve|no-preserve', Keyword),
+            (r',', Punctuation),
+        ],
+        'varname': [
+            (r'\(:', Comment, 'comment'),
+            (qname, Name.Variable, 'operator'),
+        ],
+        'singletype': [
+            (r'\(:', Comment, 'comment'),
+            (ncname + r'(:\*)', Name.Variable, 'operator'),
+            (qname, Name.Variable, 'operator'),
+        ],
+        'itemtype': [
+            include('whitespace'),
+            (r'\(:', Comment, 'comment'),
+            (r'\$', Punctuation, 'varname'),
+            (r'void\s*\(\s*\)',
+             bygroups(Keyword, Text, Punctuation, Text, Punctuation), 'operator'),
+            (r'(element|attribute|schema-element|schema-attribute|comment|text|'
+             r'node|binary|document-node)(\s*)(\()',
+             pushstate_occurrenceindicator_kindtest_callback),
+            # Marklogic specific type?
+            (r'(processing-instruction)(\s*)(\()',
+             bygroups(Keyword, Text, Punctuation),
+             ('occurrenceindicator', 'kindtestforpi')),
+            (r'(item)(\s*)(\()(\s*)(\))(?=[*+?])',
+             bygroups(Keyword, Text, Punctuation, Text, Punctuation),
+             'occurrenceindicator'),
+            (r'\(\#', Punctuation, 'pragma'),
+            (r';', Punctuation, '#pop'),
+            (r'then|else', Keyword, '#pop'),
+            (r'(at)(\s+)' + stringdouble,
+             bygroups(Keyword, Text, String.Double), 'namespacedecl'),
+            (r'(at)(\s+)' + stringsingle,
+             bygroups(Keyword, Text, String.Single), 'namespacedecl'),
+            (r'except|intersect|in|is|return|satisfies|to|union|where',
+             Keyword, 'root'),
+            (r'and|div|eq|ge|gt|le|lt|ne|idiv|mod|or', Operator.Word, 'root'),
+            (r':=|=|,|>=|>>|>|\[|\(|<=|<<|<|-|!=|\|', Operator, 'root'),
+            (r'external|at', Keyword, 'root'),
+            (r'(stable)(\s+)(order)(\s+)(by)',
+             bygroups(Keyword, Text, Keyword, Text, Keyword), 'root'),
+            (r'(castable|cast)(\s+)(as)',
+             bygroups(Keyword, Text, Keyword), 'singletype'),
+            (r'(instance)(\s+)(of)|(treat)(\s+)(as)',
+             bygroups(Keyword, Text, Keyword)),
+            (r'case|as', Keyword, 'itemtype'),
+            (r'(\))(\s*)(as)', bygroups(Operator, Text, Keyword), 'itemtype'),
+            (ncname + r'(:\*)', Keyword.Type, 'operator'),
+            (qname, Keyword.Type, 'occurrenceindicator'),
+        ],
+        'kindtest': [
+            (r'\(:', Comment, 'comment'),
+            (r'({)', Punctuation, 'root'),
+            (r'(\))([*+?]?)', popstate_kindtest_callback),
+            (r'\*', Name, 'closekindtest'),
+            (qname, Name, 'closekindtest'),
+            (r'(element|schema-element)(\s*)(\()', pushstate_kindtest_callback),
+        ],
+        'kindtestforpi': [
+            (r'\(:', Comment, 'comment'),
+            (r'\)', Punctuation, '#pop'),
+            (ncname, bygroups(Name.Variable, Name.Variable)),
+            (stringdouble, String.Double),
+            (stringsingle, String.Single),
+        ],
+        'closekindtest': [
+            (r'\(:', Comment, 'comment'),
+            (r'(\))', popstate_callback),
+            (r',', Punctuation),
+            (r'(\{)', pushstate_operator_root_callback),
+            (r'\?', Punctuation),
+        ],
+        'xml_comment': [
+            (r'(-->)', popstate_xmlcomment_callback),
+            (r'[^-]{1,2}', Literal),
+            (r'\u009|\u00A|\u00D|[\u0020-\u00D7FF]|[\u00E000-\u00FFFD]|'
+             r'[\u0010000-\u0010FFFF]', Literal),
+        ],
+        'processing_instruction': [
+            (r'\s+', Text, 'processing_instruction_content'),
+            (r'\?>', String.Doc, '#pop'),
+            (pitarget, Name),
+        ],
+        'processing_instruction_content': [
+            (r'\?>', String.Doc, '#pop'),
+            (r'\u009|\u00A|\u00D|[\u0020-\uD7FF]|[\uE000-\uFFFD]|'
+             r'[\u10000-\u10FFFF]', Literal),
+        ],
+        'cdata_section': [
+            (r']]>', String.Doc, '#pop'),
+            (r'\u009|\u00A|\u00D|[\u0020-\uD7FF]|[\uE000-\uFFFD]|'
+             r'[\u10000-\u10FFFF]', Literal),
+        ],
+        'start_tag': [
+            include('whitespace'),
+            (r'(/>)', popstate_tag_callback),
+            (r'>', Name.Tag, 'element_content'),
+            (r'"', Punctuation, 'quot_attribute_content'),
+            (r"'", Punctuation, 'apos_attribute_content'),
+            (r'=', Operator),
+            (qname, Name.Tag),
+        ],
+        'quot_attribute_content': [
+            (r'"', Punctuation, 'start_tag'),
+            (r'(\{)', pushstate_root_callback),
+            (r'""', Name.Attribute),
+            (quotattrcontentchar, Name.Attribute),
+            (entityref, Name.Attribute),
+            (charref, Name.Attribute),
+            (r'\{\{|\}\}', Name.Attribute),
+        ],
+        'apos_attribute_content': [
+            (r"'", Punctuation, 'start_tag'),
+            (r'\{', Punctuation, 'root'),
+            (r"''", Name.Attribute),
+            (aposattrcontentchar, Name.Attribute),
+            (entityref, Name.Attribute),
+            (charref, Name.Attribute),
+            (r'\{\{|\}\}', Name.Attribute),
+        ],
+        'element_content': [
+            (r'</', Name.Tag, 'end_tag'),
+            (r'(\{)', pushstate_root_callback),
+            (r'(<!--)', pushstate_element_content_xmlcomment_callback),
+            (r'(<\?)', pushstate_element_content_processing_instruction_callback),
+            (r'(<!\[CDATA\[)', pushstate_element_content_cdata_section_callback),
+            (r'(<)', pushstate_element_content_starttag_callback),
+            (elementcontentchar, Literal),
+            (entityref, Literal),
+            (charref, Literal),
+            (r'\{\{|\}\}', Literal),
+        ],
+        'end_tag': [
+            include('whitespace'),
+            (r'(>)', popstate_tag_callback),
+            (qname, Name.Tag),
+        ],
+        'xmlspace_decl': [
+            (r'\(:', Comment, 'comment'),
+            (r'preserve|strip', Keyword, '#pop'),
+        ],
+        'declareordering': [
+            (r'\(:', Comment, 'comment'),
+            include('whitespace'),
+            (r'ordered|unordered', Keyword, '#pop'),
+        ],
+        'xqueryversion': [
+            include('whitespace'),
+            (r'\(:', Comment, 'comment'),
+            (stringdouble, String.Double),
+            (stringsingle, String.Single),
+            (r'encoding', Keyword),
+            (r';', Punctuation, '#pop'),
+        ],
+        'pragma': [
+            (qname, Name.Variable, 'pragmacontents'),
+        ],
+        'pragmacontents': [
+            (r'#\)', Punctuation, 'operator'),
+            (r'\u009|\u00A|\u00D|[\u0020-\u00D7FF]|[\u00E000-\u00FFFD]|'
+             r'[\u0010000-\u0010FFFF]', Literal),
+            (r'(\s*)', Text),
+        ],
+        'occurrenceindicator': [
+            include('whitespace'),
+            (r'\(:', Comment, 'comment'),
+            (r'\*|\?|\+', Operator, 'operator'),
+            (r':=', Operator, 'root'),
+            (r'', Text, 'operator'),
+        ],
+        'option': [
+            include('whitespace'),
+            (qname, Name.Variable, '#pop'),
+        ],
+        'qname_braren': [
+            include('whitespace'),
+            (r'(\{)', pushstate_operator_root_callback),
+            (r'(\()', Punctuation, 'root'),
+        ],
+        'element_qname': [
+            (qname, Name.Variable, 'root'),
+        ],
+        'attribute_qname': [
+            (qname, Name.Variable, 'root'),
+        ],
+        'root': [
+            include('whitespace'),
+            (r'\(:', Comment, 'comment'),
+
+            # handle operator state
+            # order on numbers matters - handle most complex first
+            (r'\d+(\.\d*)?[eE][\+\-]?\d+', Number.Double, 'operator'),
+            (r'(\.\d+)[eE][\+\-]?\d+', Number.Double, 'operator'),
+            (r'(\.\d+|\d+\.\d*)', Number, 'operator'),
+            (r'(\d+)', Number.Integer, 'operator'),
+            (r'(\.\.|\.|\)|\*)', Punctuation, 'operator'),
+            (r'(declare)(\s+)(construction)',
+             bygroups(Keyword, Text, Keyword), 'operator'),
+            (r'(declare)(\s+)(default)(\s+)(order)',
+             bygroups(Keyword, Text, Keyword, Text, Keyword), 'operator'),
+            (ncname + ':\*', Name, 'operator'),
+            (stringdouble, String.Double, 'operator'),
+            (stringsingle, String.Single, 'operator'),
+
+            (r'(\})', popstate_callback),
+
+            #NAMESPACE DECL
+            (r'(declare)(\s+)(default)(\s+)(collation)',
+             bygroups(Keyword, Text, Keyword, Text, Keyword)),
+            (r'(module|declare)(\s+)(namespace)',
+             bygroups(Keyword, Text, Keyword), 'namespacedecl'),
+            (r'(declare)(\s+)(base-uri)',
+             bygroups(Keyword, Text, Keyword), 'namespacedecl'),
+
+            #NAMESPACE KEYWORD
+            (r'(declare)(\s+)(default)(\s+)(element|function)',
+             bygroups(Keyword, Text, Keyword, Text, Keyword), 'namespacekeyword'),
+            (r'(import)(\s+)(schema|module)',
+             bygroups(Keyword.Pseudo, Text, Keyword.Pseudo), 'namespacekeyword'),
+            (r'(declare)(\s+)(copy-namespaces)',
+             bygroups(Keyword, Text, Keyword), 'namespacekeyword'),
+
+            #VARNAMEs
+            (r'(for|let|some|every)(\s+)(\$)',
+             bygroups(Keyword, Text, Name.Variable), 'varname'),
+            (r'\$', Name.Variable, 'varname'),
+            (r'(declare)(\s+)(variable)(\s+)(\$)',
+             bygroups(Keyword, Text, Keyword, Text, Name.Variable), 'varname'),
+
+            #ITEMTYPE
+            (r'(\))(\s+)(as)', bygroups(Operator, Text, Keyword), 'itemtype'),
+
+            (r'(element|attribute|schema-element|schema-attribute|comment|'
+             r'text|node|document-node)(\s+)(\()',
+             pushstate_operator_kindtest_callback),
+
+            (r'(processing-instruction)(\s+)(\()',
+             pushstate_operator_kindtestforpi_callback),
+
+            (r'(<!--)', pushstate_operator_xmlcomment_callback),
+
+            (r'(<\?)', pushstate_operator_processing_instruction_callback),
+
+            (r'(<!\[CDATA\[)', pushstate_operator_cdata_section_callback),
+
+            # (r'</', Name.Tag, 'end_tag'),
+            (r'(<)', pushstate_operator_starttag_callback),
+
+            (r'(declare)(\s+)(boundary-space)',
+             bygroups(Keyword, Text, Keyword), 'xmlspace_decl'),
+
+            (r'(validate)(\s+)(lax|strict)',
+             pushstate_operator_root_validate_withmode),
+            (r'(validate)(\s*)(\{)', pushstate_operator_root_validate),
+            (r'(typeswitch)(\s*)(\()', bygroups(Keyword, Text, Punctuation)),
+            (r'(element|attribute)(\s*)(\{)',
+             pushstate_operator_root_construct_callback),
+
+            (r'(document|text|processing-instruction|comment)(\s*)(\{)',
+             pushstate_operator_root_construct_callback),
+            #ATTRIBUTE
+            (r'(attribute)(\s+)(?=' + qname + r')',
+             bygroups(Keyword, Text), 'attribute_qname'),
+            #ELEMENT
+            (r'(element)(\s+)(?=' +qname+ r')',
+             bygroups(Keyword, Text), 'element_qname'),
+            #PROCESSING_INSTRUCTION
+            (r'(processing-instruction)(\s+)' + ncname + r'(\s*)(\{)',
+             bygroups(Keyword, Text, Name.Variable, Text, Punctuation), 'operator'),
+
+            (r'(declare|define)(\s+)(function)',
+             bygroups(Keyword, Text, Keyword)),
+
+            (r'(\{)', pushstate_operator_root_callback),
+
+            (r'(unordered|ordered)(\s*)(\{)',
+             pushstate_operator_order_callback),
+
+            (r'(declare)(\s+)(ordering)',
+             bygroups(Keyword, Text, Keyword), 'declareordering'),
+
+            (r'(xquery)(\s+)(version)',
+             bygroups(Keyword.Pseudo, Text, Keyword.Pseudo), 'xqueryversion'),
+
+            (r'(\(#)', Punctuation, 'pragma'),
+
+            # sometimes return can occur in root state
+            (r'return', Keyword),
+
+            (r'(declare)(\s+)(option)', bygroups(Keyword, Text, Keyword),
+             'option'),
+
+            #URI LITERALS - single and double quoted
+            (r'(at)(\s+)('+stringdouble+')', String.Double, 'namespacedecl'),
+            (r'(at)(\s+)('+stringsingle+')', String.Single, 'namespacedecl'),
+
+            (r'(ancestor-or-self|ancestor|attribute|child|descendant-or-self)(::)',
+             bygroups(Keyword, Punctuation)),
+            (r'(descendant|following-sibling|following|parent|preceding-sibling'
+             r'|preceding|self)(::)', bygroups(Keyword, Punctuation)),
+
+            (r'(if)(\s*)(\()', bygroups(Keyword, Text, Punctuation)),
+
+            (r'then|else', Keyword),
+
+            # ML specific
+            (r'(try)(\s*)', bygroups(Keyword, Text), 'root'),
+            (r'(catch)(\s*)(\()(\$)',
+             bygroups(Keyword, Text, Punctuation, Name.Variable), 'varname'),
+
+            (r'@' + qname, Name.Attribute),
+            (r'@\*', Name.Attribute),
+            (r'@' + ncname, Name.Attribute),
+
+            (r'//|/|\+|-|;|,|\(|\)', Punctuation),
+
+            # STANDALONE QNAMES
+            (qname + r'(?=\s*[{])', Name.Variable, 'qname_braren'),
+            (qname + r'(?=\s*[(])', Name.Function, 'qname_braren'),
+            (qname, Name.Variable, 'operator'),
+        ]
+    }
+
--- a/MoinMoin/support/pygments/styles/__init__.py	Sun Dec 25 16:38:04 2011 +0100
+++ b/MoinMoin/support/pygments/styles/__init__.py	Sun Dec 25 16:43:04 2011 +0100
@@ -30,6 +30,7 @@
     'native':   'native::NativeStyle',
     'fruity':   'fruity::FruityStyle',
     'bw':       'bw::BlackWhiteStyle',
+    'vim':      'vim::VimStyle',
     'vs':       'vs::VisualStudioStyle',
     'tango':    'tango::TangoStyle',
 }
--- a/MoinMoin/support/pygments/token.py	Sun Dec 25 16:38:04 2011 +0100
+++ b/MoinMoin/support/pygments/token.py	Sun Dec 25 16:43:04 2011 +0100
@@ -40,9 +40,6 @@
         new.parent = self
         return new
 
-    def __hash__(self):
-        return hash(tuple(self))
-
     def __repr__(self):
         return 'Token' + (self and '.' or '') + '.'.join(self)
 
--- a/MoinMoin/support/pygments/util.py	Sun Dec 25 16:38:04 2011 +0100
+++ b/MoinMoin/support/pygments/util.py	Sun Dec 25 16:43:04 2011 +0100
@@ -110,10 +110,16 @@
     returns float values.
     """
     def text_analyse(text):
-        rv = f(text)
+        try:
+            rv = f(text)
+        except Exception:
+            return 0.0
         if not rv:
             return 0.0
-        return min(1.0, max(0.0, float(rv)))
+        try:
+            return min(1.0, max(0.0, float(rv)))
+        except ValueError:
+            return 0.0
     text_analyse.__doc__ = f.__doc__
     return staticmethod(text_analyse)
 
--- a/MoinMoin/support/werkzeug/__init__.py	Sun Dec 25 16:38:04 2011 +0100
+++ b/MoinMoin/support/werkzeug/__init__.py	Sun Dec 25 16:43:04 2011 +0100
@@ -11,14 +11,19 @@
     library.
 
 
-    :copyright: (c) 2009 by the Werkzeug Team, see AUTHORS for more details.
+    :copyright: (c) 2011 by the Werkzeug Team, see AUTHORS for more details.
     :license: BSD, see LICENSE for more details.
 """
 from types import ModuleType
 import sys
 
+
+# the version.  Usually set automatically by a script.
+__version__ = '0.8.1'
+
+
 # This import magic raises concerns quite often which is why the implementation
-# and motiviation is explained here in detail now.
+# and motivation is explained here in detail now.
 #
 # The majority of the functions and classes provided by Werkzeug work on the
 # HTTP and WSGI layer.  There is no useful grouping for those which is why
@@ -34,31 +39,32 @@
 # import mapping to objects in other modules
 all_by_module = {
     'werkzeug.debug':       ['DebuggedApplication'],
-    'werkzeug.local':       ['Local', 'LocalManager', 'LocalProxy'],
+    'werkzeug.local':       ['Local', 'LocalManager', 'LocalProxy',
+                             'LocalStack', 'release_local'],
     'werkzeug.templates':   ['Template'],
     'werkzeug.serving':     ['run_simple'],
     'werkzeug.test':        ['Client', 'EnvironBuilder', 'create_environ',
                              'run_wsgi_app'],
     'werkzeug.testapp':     ['test_app'],
     'werkzeug.exceptions':  ['abort', 'Aborter'],
-    'werkzeug.utils':       ['escape', 'url_quote',
-                             'environ_property', 'cookie_date', 'http_date',
-                             'url_encode', 'url_quote_plus', 'url_fix',
-                             'get_host', 'responder',
-                             'SharedDataMiddleware', 'ClosingIterator',
-                             'FileStorage', 'url_unquote_plus', 'url_decode',
-                             'url_unquote', 'get_current_url', 'redirect',
-                             'append_slash_redirect',
+    'werkzeug.urls':        ['url_decode', 'url_encode', 'url_quote',
+                             'url_quote_plus', 'url_unquote',
+                             'url_unquote_plus', 'url_fix', 'Href',
+                             'iri_to_uri', 'uri_to_iri'],
+    'werkzeug.formparser':  ['parse_form_data'],
+    'werkzeug.utils':       ['escape', 'environ_property',
+                             'append_slash_redirect', 'redirect',
                              'cached_property', 'import_string',
                              'dump_cookie', 'parse_cookie', 'unescape',
-                             'format_string', 'Href', 'DispatcherMiddleware',
-                             'find_modules', 'header_property', 'html',
-                             'xhtml', 'HTMLBuilder', 'parse_form_data',
+                             'format_string', 'find_modules', 'header_property',
+                             'html', 'xhtml', 'HTMLBuilder',
                              'validate_arguments', 'ArgumentValidationError',
-                             'bind_arguments', 'FileWrapper', 'wrap_file',
-                             'pop_path_info', 'peek_path_info',
-                             'LimitedStream', 'make_line_iter',
-                             'secure_filename'],
+                             'bind_arguments', 'secure_filename'],
+    'werkzeug.wsgi':        ['get_current_url', 'get_host', 'pop_path_info',
+                             'peek_path_info', 'SharedDataMiddleware',
+                             'DispatcherMiddleware', 'ClosingIterator',
+                             'FileWrapper', 'make_line_iter', 'LimitedStream',
+                             'responder', 'wrap_file', 'extract_path_info'],
     'werkzeug.datastructures': ['MultiDict', 'CombinedMultiDict', 'Headers',
                              'EnvironHeaders', 'ImmutableList',
                              'ImmutableDict', 'ImmutableMultiDict',
@@ -67,9 +73,11 @@
                              'LanguageAccept', 'RequestCacheControl',
                              'ResponseCacheControl', 'ETags', 'HeaderSet',
                              'WWWAuthenticate', 'Authorization',
-                             'CacheControl', 'FileMultiDict', 'CallbackDict'],
+                             'FileMultiDict', 'CallbackDict', 'FileStorage',
+                             'OrderedMultiDict', 'ImmutableOrderedMultiDict'],
     'werkzeug.useragents':  ['UserAgent'],
-    'werkzeug.http':        ['parse_etags', 'parse_date', 'parse_cache_control_header',
+    'werkzeug.http':        ['parse_etags', 'parse_date', 'http_date',
+                             'cookie_date', 'parse_cache_control_header',
                              'is_resource_modified', 'parse_accept_header',
                              'parse_set_header', 'quote_etag', 'unquote_etag',
                              'generate_etag', 'dump_header',
@@ -88,12 +96,13 @@
                              'UserAgentMixin', 'AuthorizationMixin',
                              'WWWAuthenticateMixin',
                              'CommonRequestDescriptorsMixin'],
+    'werkzeug.security':    ['generate_password_hash', 'check_password_hash'],
     # the undocumented easteregg ;-)
     'werkzeug._internal':   ['_easteregg']
 }
 
 # modules that should be imported when accessed as attributes of werkzeug
-attribute_modules = dict.fromkeys(['exceptions', 'routing', 'script'])
+attribute_modules = frozenset(['exceptions', 'routing', 'script'])
 
 
 object_origins = {}
@@ -102,11 +111,6 @@
         object_origins[item] = module
 
 
-#: the cached version of the library.  We get the distribution from
-#: pkg_resources the first time this attribute is accessed.  Because
-#: this operation is quite slow it speeds up importing a lot.
-version = None
-
 class module(ModuleType):
     """Automatically import objects from the modules."""
 
@@ -128,17 +132,6 @@
                        '__package__', '__version__'))
         return result
 
-    @property
-    def __version__(self):
-        global version
-        if version is None:
-            try:
-                version = __import__('pkg_resources') \
-                          .get_distribution('Werkzeug').version
-            except:
-                version = 'unknown'
-        return version
-
 # keep a reference to this module so that it's not garbage collected
 old_module = sys.modules['werkzeug']
 
@@ -147,8 +140,10 @@
 new_module = sys.modules['werkzeug'] = module('werkzeug')
 new_module.__dict__.update({
     '__file__':         __file__,
+    '__package__':      'werkzeug',
     '__path__':         __path__,
     '__doc__':          __doc__,
+    '__version__':      __version__,
     '__all__':          tuple(object_origins) + tuple(attribute_modules),
     '__docformat__':    'restructuredtext en'
 })
--- a/MoinMoin/support/werkzeug/_internal.py	Sun Dec 25 16:38:04 2011 +0100
+++ b/MoinMoin/support/werkzeug/_internal.py	Sun Dec 25 16:43:04 2011 +0100
@@ -5,20 +5,21 @@
 
     This module provides internally used helpers and constants.
 
-    :copyright: (c) 2009 by the Werkzeug Team, see AUTHORS for more details.
+    :copyright: (c) 2011 by the Werkzeug Team, see AUTHORS for more details.
     :license: BSD, see LICENSE for more details.
 """
 import inspect
 from weakref import WeakKeyDictionary
 from cStringIO import StringIO
-from Cookie import BaseCookie, Morsel, CookieError
+from Cookie import SimpleCookie, Morsel, CookieError
 from time import gmtime
-from datetime import datetime
+from datetime import datetime, date
 
 
 _logger = None
 _empty_stream = StringIO('')
 _signature_cache = WeakKeyDictionary()
+_epoch_ord = date(1970, 1, 1).toordinal()
 
 
 HTTP_STATUS_CODES = {
@@ -64,7 +65,7 @@
     423:    'Locked',
     424:    'Failed Dependency',
     426:    'Upgrade Required',
-    449:    'Retry With',           # propritary MS extension
+    449:    'Retry With',           # proprietary MS extension
     500:    'Internal Server Error',
     501:    'Not Implemented',
     502:    'Bad Gateway',
@@ -93,13 +94,22 @@
     return proxy_repr
 
 
+def _get_environ(obj):
+    env = getattr(obj, 'environ', obj)
+    assert isinstance(env, dict), \
+        '%r is not a WSGI environment (has to be a dict)' % type(obj).__name__
+    return env
+
+
 def _log(type, message, *args, **kwargs):
     """Log into the internal werkzeug logger."""
     global _logger
     if _logger is None:
         import logging
         _logger = logging.getLogger('werkzeug')
-        if _logger.level == logging.NOTSET:
+        # Only set up a default log handler if the
+        # end-user application didn't set anything up.
+        if not logging.root.handlers and _logger.level == logging.NOTSET:
             _logger.setLevel(logging.INFO)
             handler = logging.StreamHandler()
             _logger.addHandler(handler)
@@ -178,7 +188,7 @@
         new.__module__ = old.__module__
         new.__doc__ = old.__doc__
         new.__dict__ = old.__dict__
-    except:
+    except Exception:
         pass
     return new
 
@@ -194,7 +204,7 @@
         return value.decode(charset, errors)
     except UnicodeError, e:
         if fallback is not None:
-            return value.decode(fallback, 'ignore')
+            return value.decode(fallback, 'replace')
         from werkzeug.exceptions import HTTPUnicodeError
         raise HTTPUnicodeError(str(e))
 
@@ -237,19 +247,20 @@
     )
 
 
-_timegm = None
 def _date_to_unix(arg):
     """Converts a timetuple, integer or datetime object into the seconds from
     epoch in utc.
     """
-    global _timegm
     if isinstance(arg, datetime):
         arg = arg.utctimetuple()
     elif isinstance(arg, (int, long, float)):
         return int(arg)
-    if _timegm is None:
-        from calendar import timegm as _timegm
-    return _timegm(arg)
+    year, month, day, hour, minute, second = arg[:6]
+    days = date(year, month, 1).toordinal() - _epoch_ord + day - 1
+    hours = days * 24 + hour
+    minutes = hours * 60 + minute
+    seconds = minutes * 60 + second
+    return seconds
 
 
 class _ExtendedMorsel(Morsel):
@@ -269,7 +280,7 @@
         return result
 
 
-class _ExtendedCookie(BaseCookie):
+class _ExtendedCookie(SimpleCookie):
     """Form of the base cookie that doesn't raise a `CookieError` for
     malformed keys.  This has the advantage that broken cookies submitted
     by nonstandard browsers don't cause the cookie to be empty.
@@ -372,8 +383,11 @@
         if environ.get('QUERY_STRING') != 'macgybarchakku':
             return app(environ, injecting_start_response)
         injecting_start_response('200 OK', [('Content-Type', 'text/html')])
-        return ['''<!DOCTYPE html PUBLIC "-//W3C//DTD HTML 4.01//EN">
-<title>About Werkzeug</>
+        return ['''
+<!DOCTYPE html>
+<html>
+<head>
+<title>About Werkzeug</title>
 <style type="text/css">
   body { font: 15px Georgia, serif; text-align: center; }
   a { color: #333; text-decoration: none; }
@@ -381,7 +395,11 @@
   p { margin: 0 0 30px 0; }
   pre { font: 11px 'Consolas', 'Monaco', monospace; line-height: 0.95; }
 </style>
+</head>
+<body>
 <h1><a href="http://werkzeug.pocoo.org/">Werkzeug</a></h1>
-<p>the Swiss Army knife of Python web development.
-<pre>%s\n\n\n</>''' % gyver]
+<p>the Swiss Army knife of Python web development.</p>
+<pre>%s\n\n\n</pre>
+</body>
+</html>''' % gyver]
     return easteregged
--- a/MoinMoin/support/werkzeug/contrib/__init__.py	Sun Dec 25 16:38:04 2011 +0100
+++ b/MoinMoin/support/werkzeug/contrib/__init__.py	Sun Dec 25 16:43:04 2011 +0100
@@ -11,6 +11,6 @@
     This file itself is mostly for informational purposes and to tell the
     Python interpreter that `contrib` is a package.
 
-    :copyright: (c) 2009 by the Werkzeug Team, see AUTHORS for more details.
+    :copyright: (c) 2011 by the Werkzeug Team, see AUTHORS for more details.
     :license: BSD, see LICENSE for more details.
 """
--- a/MoinMoin/support/werkzeug/contrib/atom.py	Sun Dec 25 16:38:04 2011 +0100
+++ b/MoinMoin/support/werkzeug/contrib/atom.py	Sun Dec 25 16:43:04 2011 +0100
@@ -18,7 +18,7 @@
                          updated=post.last_update, published=post.pub_date)
             return feed.get_response()
 
-    :copyright: (c) 2009 by the Werkzeug Team, see AUTHORS for more details.
+    :copyright: (c) 2011 by the Werkzeug Team, see AUTHORS for more details.
     :license: BSD, see LICENSE for more details.
 """
 from datetime import datetime
@@ -152,7 +152,7 @@
         # atom demands either an author element in every entry or a global one
         if not self.author:
             if False in map(lambda e: bool(e.author), self.entries):
-                self.author = ({'name': u'unbekannter Autor'},)
+                self.author = ({'name': 'Unknown author'},)
 
         if not self.updated:
             dates = sorted([entry.updated for entry in self.entries])
--- a/MoinMoin/support/werkzeug/contrib/cache.py	Sun Dec 25 16:38:04 2011 +0100
+++ b/MoinMoin/support/werkzeug/contrib/cache.py	Sun Dec 25 16:43:04 2011 +0100
@@ -19,11 +19,11 @@
     Caching is pretty simple.  Basically you have a cache object lurking around
     somewhere that is connected to a remote cache or the file system or
     something else.  When the request comes in you check if the current page
-    is already in the cache and if, you're returning it.  Otherwise you generate
-    the page and put it into the cache.  (Or a fragment of the page, you don't
-    have to cache the full thing)
+    is already in the cache and if so, you're returning it from the cache.
+    Otherwise you generate the page and put it into the cache. (Or a fragment
+    of the page, you don't have to cache the full thing)
 
-    Here a simple example of how to cache a sidebar for a template::
+    Here is a simple example of how to cache a sidebar for a template::
 
         def get_sidebar(user):
             identifier = 'sidebar_for/user%d' % user.id
@@ -38,7 +38,7 @@
     =======================
 
     To create a cache object you just import the cache system of your choice
-    from the cache module and instanciate it.  Then you can start working
+    from the cache module and instantiate it.  Then you can start working
     with that object:
 
     >>> from werkzeug.contrib.cache import SimpleCache
@@ -50,21 +50,42 @@
     True
 
     Please keep in mind that you have to create the cache and put it somewhere
-    you have access to it (either as a module global you can import or if you
-    put it onto your WSGI application).
+    you have access to it (either as a module global you can import or you just
+    put it into your WSGI application).
 
-    :copyright: (c) 2009 by the Werkzeug Team, see AUTHORS for more details.
+    :copyright: (c) 2011 by the Werkzeug Team, see AUTHORS for more details.
     :license: BSD, see LICENSE for more details.
 """
 import os
 import re
+import tempfile
 try:
     from hashlib import md5
 except ImportError:
     from md5 import new as md5
 from itertools import izip
 from time import time
-from cPickle import loads, dumps, load, dump, HIGHEST_PROTOCOL
+from werkzeug.posixemulation import rename
+
+try:
+    import cPickle as pickle
+except ImportError:
+    import pickle
+
+
+def _items(mappingorseq):
+    """Wrapper for efficient iteration over mappings represented by dicts
+    or sequences::
+
+        >>> for k, v in _items((i, i*i) for i in xrange(5)):
+        ...    assert k*k == v
+
+        >>> for k, v in _items(dict((i, i*i) for i in xrange(5))):
+        ...    assert k*k == v
+
+    """
+    return mappingorseq.iteritems() if hasattr(mappingorseq, 'iteritems') \
+        else mappingorseq
 
 
 class BaseCache(object):
@@ -79,8 +100,8 @@
         self.default_timeout = default_timeout
 
     def get(self, key):
-        """Looks up key in the cache and returns it.  If the key does not
-        exist `None` is returned instead.
+        """Looks up key in the cache and returns the value for it.
+        If the key does not exist `None` is returned instead.
 
         :param key: the key to be looked up.
         """
@@ -95,8 +116,8 @@
         pass
 
     def get_many(self, *keys):
-        """Returns a list of keys.  For each key a item in the list is
-        created.  Example::
+        """Returns a list of values for the given keys.
+        For each key a item in the list is created.  Example::
 
             foo, bar = cache.get_many("foo", "bar")
 
@@ -121,18 +142,19 @@
         return dict(izip(keys, self.get_many(*keys)))
 
     def set(self, key, value, timeout=None):
-        """Adds or overrides a key in the cache.
+        """Adds a new key/value to the cache (overwrites value, if key already
+        exists in the cache).
 
         :param key: the key to set
         :param value: the value for the key
-        :param timeout: the cache timeout for the key or the default
-                        timeout if not specified.
+        :param timeout: the cache timeout for the key (if not specified,
+                        it uses the default timeout).
         """
         pass
 
     def add(self, key, value, timeout=None):
-        """Works like :meth:`set` but does not override already existing
-        values.
+        """Works like :meth:`set` but does not overwrite the values of already
+        existing keys.
 
         :param key: the key to set
         :param value: the value for the key
@@ -142,13 +164,13 @@
         pass
 
     def set_many(self, mapping, timeout=None):
-        """Sets multiple keys and values from a dict.
+        """Sets multiple keys and values from a mapping.
 
-        :param mapping: a dict with the values to set.
-        :param timeout: the cache timeout for the key or the default
-                        timeout if not specified.
+        :param mapping: a mapping with the keys/values to set.
+        :param timeout: the cache timeout for the key (if not specified,
+                        it uses the default timeout).
         """
-        for key, value in mapping.iteritems():
+        for key, value in _items(mapping):
             self.set(key, value, timeout)
 
     def delete_many(self, *keys):
@@ -162,7 +184,7 @@
 
     def clear(self):
         """Clears the cache.  Keep in mind that not all caches support
-        clearning of the full cache.
+        completely clearing the cache.
         """
         pass
 
@@ -226,20 +248,22 @@
         now = time()
         expires, value = self._cache.get(key, (0, None))
         if expires > time():
-            return loads(value)
+            return pickle.loads(value)
 
     def set(self, key, value, timeout=None):
         if timeout is None:
             timeout = self.default_timeout
         self._prune()
-        self._cache[key] = (time() + timeout, dumps(value, HIGHEST_PROTOCOL))
+        self._cache[key] = (time() + timeout, pickle.dumps(value,
+            pickle.HIGHEST_PROTOCOL))
 
     def add(self, key, value, timeout=None):
         if timeout is None:
             timeout = self.default_timeout
         if len(self._cache) > self._threshold:
             self._prune()
-        item = (time() + timeout, dumps(value, HIGHEST_PROTOCOL))
+        item = (time() + timeout, pickle.dumps(value,
+            pickle.HIGHEST_PROTOCOL))
         self._cache.setdefault(key, item)
 
     def delete(self, key):
@@ -251,9 +275,10 @@
 class MemcachedCache(BaseCache):
     """A cache that uses memcached as backend.
 
-    The first argument can either be a list or tuple of server addresses
-    in which case Werkzeug tries to import the memcache module and connect
-    to it, or an object that resembles the API of a :class:`memcache.Client`.
+    The first argument can either be an object that resembles the API of a
+    :class:`memcache.Client` or a tuple/list of server addresses. In the
+    event that a tuple/list is passed, Werkzeug tries to import the best
+    available memcache library.
 
     Implementation notes:  This cache backend works around some limitations in
     memcached to simplify the interface.  For example unicode keys are encoded
@@ -273,33 +298,19 @@
                        different prefix.
     """
 
-    def __init__(self, servers, default_timeout=300, key_prefix=None):
+    def __init__(self, servers=None, default_timeout=300, key_prefix=None):
         BaseCache.__init__(self, default_timeout)
-        if isinstance(servers, (list, tuple)):
-            try:
-                import cmemcache as memcache
-                is_cmemcache = True
-            except ImportError:
-                try:
-                    import memcache
-                    is_cmemcache = False
-                except ImportError:
-                    raise RuntimeError('no memcache module found')
+        if servers is None or isinstance(servers, (list, tuple)):
+            if servers is None:
+                servers = ['127.0.0.1:11211']
+            self._client = self.import_preferred_memcache_lib(servers)
+            if self._client is None:
+                raise RuntimeError('no memcache module found')
+        else:
+            # NOTE: servers is actually an already initialized memcache
+            # client.
+            self._client = servers
 
-            # cmemcache has a bug that debuglog is not defined for the
-            # client.  Whenever pickle fails you get a weird AttributError.
-            if is_cmemcache:
-                client = memcache.Client(map(str, servers))
-                try:
-                    client.debuglog = lambda *a: None
-                except:
-                    pass
-            else:
-                client = memcache.Client(servers, False, HIGHEST_PROTOCOL)
-        else:
-            client = servers
-
-        self._client = client
         self.key_prefix = key_prefix
 
     def get(self, key):
@@ -316,7 +327,7 @@
     def get_dict(self, *keys):
         key_mapping = {}
         have_encoded_keys = False
-        for idx, key in enumerate(keys):
+        for key in keys:
             if isinstance(key, unicode):
                 encoded_key = key.encode('utf-8')
                 have_encoded_keys = True
@@ -326,9 +337,6 @@
                 encoded_key = self.key_prefix + encoded_key
             if _test_memcached_key(key):
                 key_mapping[encoded_key] = key
-        # the keys call here is important because otherwise cmemcache
-        # does ugly things.  What exaclty I don't know, i think it does
-        # Py_DECREF but quite frankly i don't care.
         d = rv = self._client.get_multi(key_mapping.keys())
         if have_encoded_keys or self.key_prefix:
             rv = {}
@@ -366,7 +374,7 @@
         if timeout is None:
             timeout = self.default_timeout
         new_mapping = {}
-        for key, value in mapping.iteritems():
+        for key, value in _items(mapping):
             if isinstance(key, unicode):
                 key = key.encode('utf-8')
             if self.key_prefix:
@@ -410,58 +418,210 @@
             key = self.key_prefix + key
         self._client.decr(key, delta)
 
+    def import_preferred_memcache_lib(self, servers):
+        """Returns an initialized memcache client.  Used by the constructor."""
+        try:
+            import pylibmc
+        except ImportError:
+            pass
+        else:
+            return pylibmc.Client(servers)
 
-class GAEMemcachedCache(MemcachedCache):
-    """Connects to the Google appengine memcached Cache.
+        try:
+            from google.appengine.api import memcache
+        except ImportError:
+            pass
+        else:
+            return memcache.Client()
 
+        try:
+            import memcache
+        except ImportError:
+            pass
+        else:
+            return memcache.Client(servers)
+
+
+# backwards compatibility
+GAEMemcachedCache = MemcachedCache
+
+
+class RedisCache(BaseCache):
+    """Uses the Redis key-value store as a cache backend.
+
+    The first argument can be either a string denoting address of the Redis
+    server or an object resembling an instance of a redis.Redis class.
+
+    Note: Python Redis API already takes care of encoding unicode strings on
+    the fly.
+
+    .. versionadded:: 0.7
+
+    .. versionadded:: 0.8
+       `key_prefix` was added.
+
+    .. versionchanged:: 0.8
+       This cache backend now properly serializes objects.
+
+    :param host: address of the Redis server or an object which API is
+                 compatible with the official Python Redis client (redis-py).
+    :param port: port number on which Redis server listens for connections
     :param default_timeout: the default timeout that is used if no timeout is
                             specified on :meth:`~BaseCache.set`.
-    :param key_prefix: a prefix that is added before all keys.  This makes it
-                       possible to use the same memcached server for different
-                       applications.  Keep in mind that
-                       :meth:`~BaseCache.clear` will also clear keys with a
-                       different prefix.
+    :param key_prefix: A prefix that should be added to all keys.
     """
 
-    def __init__(self, default_timeout=300, key_prefix=None):
-        from google.appengine.api import memcache
-        MemcachedCache.__init__(self, memcache.Client(),
-                                default_timeout, key_prefix)
+    def __init__(self, host='localhost', port=6379, default_timeout=300,
+                 key_prefix=None):
+        BaseCache.__init__(self, default_timeout)
+        if isinstance(host, basestring):
+            try:
+                import redis
+            except ImportError:
+                raise RuntimeError('no redis module found')
+            self._client = redis.Redis(host=host, port=port)
+        else:
+            self._client = host
+        self.key_prefix = key_prefix or ''
+
+    def dump_object(self, value):
+        """Dumps an object into a string for redis.  By default it serializes
+        integers as regular string and pickle dumps everything else.
+        """
+        if isinstance(value, (int, long)):
+            return str(value)
+        return '!' + pickle.dumps(value)
+
+    def load_object(self, value):
+        """The reversal of :meth:`dump_object`.  This might be callde with
+        None.
+        """
+        if value is None:
+            return None
+        if value.startswith('!'):
+            return pickle.loads(value[1:])
+        try:
+            return int(value)
+        except ValueError:
+            # before 0.8 we did not have serialization.  Still support that.
+            return value
+
+    def get(self, key):
+        return self.load_object(self._client.get(self.key_prefix + key))
+
+    def get_many(self, *keys):
+        if self.key_prefix:
+            keys = [self.key_prefix + key for key in keys]
+        return [self.load_object(x) for x in self._client.mget(keys)]
+
+    def set(self, key, value, timeout=None):
+        if timeout is None:
+            timeout = self.default_timeout
+        dump = self.dump_object(value)
+        self._client.setex(self.key_prefix + key, dump, timeout)
+
+    def add(self, key, value, timeout=None):
+        if timeout is None:
+            timeout = self.default_timeout
+        dump = self.dump_object(value)
+        added = self._client.setnx(self.key_prefix + key, dump)
+        if added:
+            self._client.expire(self.key_prefix + key, timeout)
+
+    def set_many(self, mapping, timeout=None):
+        if timeout is None:
+            timeout = self.default_timeout
+        pipe = self._client.pipeline()
+        for key, value in _items(mapping):
+            dump = self.dump_object(value)
+            pipe.setex(self.key_prefix + key, dump, timeout)
+        pipe.execute()
+
+    def delete(self, key):
+        self._client.delete(self.key_prefix + key)
+
+    def delete_many(self, *keys):
+        if not keys:
+            return
+        if self.key_prefix:
+            keys = [self.key_prefix + key for key in keys]
+        self._client.delete(*keys)
+
+    def clear(self):
+        if self.key_prefix:
+            keys = self._client.keys(self.key_prefix + '*')
+            if keys:
+                self._client.delete(*keys)
+        else:
+            self._client.flushdb()
+
+    def inc(self, key, delta=1):
+        return self._client.incr(self.key_prefix + key, delta)
+
+    def dec(self, key, delta=1):
+        return self._client.decr(self.key_prefix + key, delta)
 
 
 class FileSystemCache(BaseCache):
     """A cache that stores the items on the file system.  This cache depends
     on being the only user of the `cache_dir`.  Make absolutely sure that
-    nobody but this cache stores files there or otherwise the chace will
-    randomely delete files therein.
+    nobody but this cache stores files there or otherwise the cache will
+    randomly delete files therein.
 
-    :param cache_dir: the directory where cached files are stored.
+    :param cache_dir: the directory where cache files are stored.
     :param threshold: the maximum number of items the cache stores before
                       it starts deleting some.
     :param default_timeout: the default timeout that is used if no timeout is
                             specified on :meth:`~BaseCache.set`.
+    :param mode: the file mode wanted for the cache files, default 0600
     """
 
-    def __init__(self, cache_dir, threshold=500, default_timeout=300):
+    #: used for temporary files by the FileSystemCache
+    _fs_transaction_suffix = '.__wz_cache'
+
+    def __init__(self, cache_dir, threshold=500, default_timeout=300, mode=0600):
         BaseCache.__init__(self, default_timeout)
         self._path = cache_dir
         self._threshold = threshold
+        self._mode = mode
         if not os.path.exists(self._path):
             os.makedirs(self._path)
 
+    def _list_dir(self):
+        """return a list of (fully qualified) cache filenames
+        """
+        return [os.path.join(self._path, fn) for fn in os.listdir(self._path)
+                if not fn.endswith(self._fs_transaction_suffix)]
+
     def _prune(self):
-        entries = os.listdir(self._path)
+        entries = self._list_dir()
         if len(entries) > self._threshold:
             now = time()
-            for idx, key in enumerate(entries):
+            for idx, fname in enumerate(entries):
+                remove = False
+                f = None
                 try:
-                    f = file(self._get_filename(key))
-                    if load(f) > now and idx % 3 != 0:
-                        f.close()
-                        continue
-                except:
-                    f.close()
-                self.delete(key)
+                    try:
+                        f = open(fname, 'rb')
+                        expires = pickle.load(f)
+                        remove = expires <= now or idx % 3 == 0
+                    finally:
+                        if f is not None:
+                            f.close()
+                except Exception:
+                    pass
+                if remove:
+                    try:
+                        os.remove(fname)
+                    except (IOError, OSError):
+                        pass
+
+    def clear(self):
+        for fname in self._list_dir():
+            try:
+                os.remove(fname)
+            except (IOError, OSError):
+                pass
 
     def _get_filename(self, key):
         hash = md5(key).hexdigest()
@@ -470,14 +630,14 @@
     def get(self, key):
         filename = self._get_filename(key)
         try:
-            f = file(filename, 'rb')
+            f = open(filename, 'rb')
             try:
-                if load(f) >= time():
-                    return load(f)
+                if pickle.load(f) >= time():
+                    return pickle.load(f)
             finally:
                 f.close()
             os.remove(filename)
-        except:
+        except Exception:
             return None
 
     def add(self, key, value, timeout=None):
@@ -491,12 +651,16 @@
         filename = self._get_filename(key)
         self._prune()
         try:
-            f = file(filename, 'wb')
+            fd, tmp = tempfile.mkstemp(suffix=self._fs_transaction_suffix,
+                                       dir=self._path)
+            f = os.fdopen(fd, 'wb')
             try:
-                dump(int(time() + timeout), f, 1)
-                dump(value, f, HIGHEST_PROTOCOL)
+                pickle.dump(int(time() + timeout), f, 1)
+                pickle.dump(value, f, pickle.HIGHEST_PROTOCOL)
             finally:
                 f.close()
+            rename(tmp, filename)
+            os.chmod(filename, self._mode)
         except (IOError, OSError):
             pass
 
@@ -505,7 +669,3 @@
             os.remove(self._get_filename(key))
         except (IOError, OSError):
             pass
-
-    def clear(self):
-        for key in os.listdir(self._path):
-            self.delete(key)
--- a/MoinMoin/support/werkzeug/contrib/fixers.py	Sun Dec 25 16:38:04 2011 +0100
+++ b/MoinMoin/support/werkzeug/contrib/fixers.py	Sun Dec 25 16:43:04 2011 +0100
@@ -18,7 +18,7 @@
 """
 from urllib import unquote
 from werkzeug.http import parse_options_header, parse_cache_control_header, \
-     parse_set_header, dump_header
+     parse_set_header
 from werkzeug.useragents import UserAgent
 from werkzeug.datastructures import Headers, ResponseCacheControl
 
@@ -34,8 +34,15 @@
         self.app = app
 
     def __call__(self, environ, start_response):
-        environ['PATH_INFO'] = environ.get('SCRIPT_NAME', '') + \
-                               environ.get('PATH_INFO', '')
+        # only set PATH_INFO for older versions of Lighty or if no
+        # server software is provided.  That's because the test was
+        # added in newer Werkzeug versions and we don't want to break
+        # people's code if they are using this fixer in a test that
+        # does not set the SERVER_SOFTWARE key.
+        if 'SERVER_SOFTWARE' not in environ or \
+           environ['SERVER_SOFTWARE'] < 'lighttpd/1.4.28':
+            environ['PATH_INFO'] = environ.get('SCRIPT_NAME', '') + \
+                                   environ.get('PATH_INFO', '')
         environ['SCRIPT_NAME'] = ''
         return self.app(environ, start_response)
 
@@ -76,9 +83,6 @@
     application that was not designed with HTTP proxies in mind.  It
     sets `REMOTE_ADDR`, `HTTP_HOST` from `X-Forwarded` headers.
 
-    Werkzeug wrappers have builtin support for this by setting the
-    :attr:`~werkzeug.BaseRequest.is_behind_proxy` attribute to `True`.
-
     Do not use this middleware in non-proxy setups for security reasons.
 
     The original values of `REMOTE_ADDR` and `HTTP_HOST` are stored in
@@ -91,18 +95,33 @@
     def __init__(self, app):
         self.app = app
 
+    def get_remote_addr(self, forwarded_for):
+        """Selects the new remote addr from the given list of ips in
+        X-Forwarded-For.  By default the first one is picked.
+
+        .. versionadded:: 0.8
+        """
+        if forwarded_for:
+            return forwarded_for[0]
+
     def __call__(self, environ, start_response):
         getter = environ.get
+        forwarded_proto = getter('HTTP_X_FORWARDED_PROTO', '')
         forwarded_for = getter('HTTP_X_FORWARDED_FOR', '').split(',')
         forwarded_host = getter('HTTP_X_FORWARDED_HOST', '')
         environ.update({
-            'werkzeug.proxy_fix.orig_remote_addr':  getter('REMOTE_ADDR'),
-            'werkzeug.proxy_fix.orig_http_host':    getter('HTTP_HOST')
+            'werkzeug.proxy_fix.orig_wsgi_url_scheme':  getter('wsgi.url_scheme'),
+            'werkzeug.proxy_fix.orig_remote_addr':      getter('REMOTE_ADDR'),
+            'werkzeug.proxy_fix.orig_http_host':        getter('HTTP_HOST')
         })
-        if forwarded_for:
-            environ['REMOTE_ADDR'] = forwarded_for[0].strip()
+        forwarded_for = [x for x in [x.strip() for x in forwarded_for] if x]
+        remote_addr = self.get_remote_addr(forwarded_for)
+        if remote_addr is not None:
+            environ['REMOTE_ADDR'] = remote_addr
         if forwarded_host:
             environ['HTTP_HOST'] = forwarded_host
+        if forwarded_proto:
+            environ['wsgi.url_scheme'] = forwarded_proto
         return self.app(environ, start_response)
 
 
--- a/MoinMoin/support/werkzeug/contrib/iterio.py	Sun Dec 25 16:38:04 2011 +0100
+++ b/MoinMoin/support/werkzeug/contrib/iterio.py	Sun Dec 25 16:43:04 2011 +0100
@@ -36,12 +36,12 @@
 
     .. _greenlet: http://codespeak.net/py/dist/greenlet.html
 
-    :copyright: (c) 2009 by the Werkzeug Team, see AUTHORS for more details.
+    :copyright: (c) 2011 by the Werkzeug Team, see AUTHORS for more details.
     :license: BSD, see LICENSE for more details.
 """
 try:
-    from py.magic import greenlet
-except:
+    import greenlet
+except ImportError:
     greenlet = None
 
 
@@ -127,25 +127,22 @@
         if greenlet is None:
             raise RuntimeError('IterI requires greenlet support')
         stream = object.__new__(cls)
-        stream.__init__(greenlet.getcurrent())
+        stream._parent = greenlet.getcurrent()
+        stream._buffer = []
+        stream.closed = False
+        stream.pos = 0
 
         def run():
             func(stream)
             stream.flush()
 
-        g = greenlet(run, stream._parent)
+        g = greenlet.greenlet(run, stream._parent)
         while 1:
             rv = g.switch()
             if not rv:
                 return
             yield rv[0]
 
-    def __init__(self, parent):
-        self._parent = parent
-        self._buffer = []
-        self.closed = False
-        self.pos = 0
-
     def close(self):
         if not self.closed:
             self.closed = True
@@ -171,13 +168,12 @@
     """Iter output.  Wrap an iterator and give it a stream like interface."""
 
     def __new__(cls, gen):
-        return object.__new__(cls)
-
-    def __init__(self, gen):
+        self = object.__new__(cls)
         self._gen = gen
         self._buf = ''
         self.closed = False
         self.pos = 0
+        return self
 
     def __iter__(self):
         return self
--- a/MoinMoin/support/werkzeug/contrib/jsrouting.py	Sun Dec 25 16:38:04 2011 +0100
+++ b/MoinMoin/support/werkzeug/contrib/jsrouting.py	Sun Dec 25 16:43:04 2011 +0100
@@ -6,29 +6,31 @@
     Addon module that allows to create a JavaScript function from a map
     that generates rules.
 
-    :copyright: (c) 2009 by the Werkzeug Team, see AUTHORS for more details.
+    :copyright: (c) 2011 by the Werkzeug Team, see AUTHORS for more details.
     :license: BSD, see LICENSE for more details.
 """
 try:
     from simplejson import dumps
 except ImportError:
-    def dumps(*args):
-        raise RuntimeError('simplejson required for jsrouting')
+    try:
+        from json import dumps
+    except ImportError:
+        def dumps(*args):
+            raise RuntimeError('simplejson required for jsrouting')
 
 from inspect import getmro
-from werkzeug.templates import Template
 from werkzeug.routing import NumberConverter
 
 
-_javascript_routing_template = Template(u'''\
-<% if name_parts %>\
-<% for idx in xrange(0, len(name_parts) - 1) %>\
-if (typeof ${'.'.join(name_parts[:idx + 1])} === 'undefined') \
-${'.'.join(name_parts[:idx + 1])} = {};
-<% endfor %>\
-${'.'.join(name_parts)} = <% endif %>\
-(function (server_name, script_name, subdomain, url_scheme) {
-    var converters = ${', '.join(converters)};
+def render_template(name_parts, rules, converters):
+    result = u''
+    if name_parts:
+        for idx in xrange(0, len(name_parts) - 1):
+            name = u'.'.join(name_parts[:idx + 1])
+            result += u"if (typeof %s === 'undefined') %s = {}\n" % (name, name)
+        result += '%s = ' % '.'.join(name_parts)
+    result += """(function (server_name, script_name, subdomain, url_scheme) {
+    var converters = %(converters)s;
     var rules = $rules;
     function in_array(array, value) {
         if (array.indexOf != undefined) {
@@ -160,7 +162,8 @@
                    + '/' + lstrip(rv.path, '/');
         }
     };
-})''')
+})""" % {'converters': u', '.join(converters)}
+    return result
 
 
 def generate_map(map, name='url_map'):
@@ -203,11 +206,9 @@
             u'defaults':    rule.defaults
         })
 
-    return _javascript_routing_template.render({
-        'name_parts':   name and name.split('.') or [],
-        'rules':        dumps(rules),
-        'converters':   converters
-    })
+    return render_template(name_parts=name and name.split('.') or [],
+                           rules=dumps(rules),
+                           converters=converters)
 
 
 def generate_adapter(adapter, name='url_for', map_name='url_map'):
--- a/MoinMoin/support/werkzeug/contrib/kickstart.py	Sun Dec 25 16:38:04 2011 +0100
+++ b/MoinMoin/support/werkzeug/contrib/kickstart.py	Sun Dec 25 16:43:04 2011 +0100
@@ -22,7 +22,7 @@
     .. _Genshi: http://genshi.edgewall.org/
     .. _Django: http://www.djangoproject.com/
 
-    :copyright: (c) 2009 by the Werkzeug Team, see AUTHORS for more details.
+    :copyright: (c) 2011 by the Werkzeug Team, see AUTHORS for more details.
     :license: BSD, see LICENSE for more details.
 """
 from os import path
@@ -34,6 +34,10 @@
 __all__ = ['Request', 'Response', 'TemplateNotFound', 'TemplateLoader',
            'GenshiTemplateLoader', 'Application']
 
+from warnings import warn
+warn(DeprecationWarning('werkzeug.contrib.kickstart is deprecated and '
+                        'will be removed in Werkzeug 1.0'))
+
 
 class Request(RequestBase):
     """A handy subclass of the base request that adds a URL builder.
--- a/MoinMoin/support/werkzeug/contrib/limiter.py	Sun Dec 25 16:38:04 2011 +0100
+++ b/MoinMoin/support/werkzeug/contrib/limiter.py	Sun Dec 25 16:43:04 2011 +0100
@@ -9,30 +9,12 @@
     .. _Trac: http://trac.edgewall.org/
     .. _Django: http://www.djangoproject.com/
 
-    :copyright: (c) 2009 by the Werkzeug Team, see AUTHORS for more details.
+    :copyright: (c) 2011 by the Werkzeug Team, see AUTHORS for more details.
     :license: BSD, see LICENSE for more details.
 """
 from warnings import warn
-from werkzeug.utils import LimitedStream as LimitedStreamBase
-
-
-class _SilentLimitedStream(LimitedStreamBase):
 
-    def __init__(self, environ, limit):
-        LimitedStreamBase.__init__(self,
-            environ['wsgi.input'],
-            min(limit, int(environ.get('CONTENT_LENGTH') or 0)),
-            silent=True
-        )
-
-
-class LimitedStream(_SilentLimitedStream):
-
-    def __init__(self, environ, limit):
-        _SilentLimitedStream.__init__(self, environ, limit)
-        warn(DeprecationWarning('contrib limited stream is deprecated, use '
-                                'werkzeug.LimitedStream instead.'),
-             stacklevel=2)
+from werkzeug.wsgi import LimitedStream
 
 
 class StreamLimitMiddleware(object):
@@ -49,5 +31,6 @@
         self.maximum_size = maximum_size
 
     def __call__(self, environ, start_response):
-        environ['wsgi.input'] = _SilentLimitedStream(environ, self.maximum_size)
+        limit = min(self.maximum_size, int(environ.get('CONTENT_LENGTH') or 0))
+        environ['wsgi.input'] = LimitedStream(environ['wsgi.input'], limit)
         return self.app(environ, start_response)
--- a/MoinMoin/support/werkzeug/contrib/lint.py	Sun Dec 25 16:38:04 2011 +0100
+++ b/MoinMoin/support/werkzeug/contrib/lint.py	Sun Dec 25 16:43:04 2011 +0100
@@ -16,14 +16,15 @@
 
     It's strongly recommended to use it during development.
 
-    :copyright: (c) 2009 by the Werkzeug Team, see AUTHORS for more details.
+    :copyright: (c) 2011 by the Werkzeug Team, see AUTHORS for more details.
     :license: BSD, see LICENSE for more details.
 """
 from urlparse import urlparse
 from warnings import warn
+
 from werkzeug.datastructures import Headers
-from werkzeug.utils import FileWrapper
 from werkzeug.http import is_entity_header
+from werkzeug.wsgi import FileWrapper
 
 
 class WSGIWarning(Warning):
@@ -177,7 +178,7 @@
             try:
                 warn(WSGIWarning('Iterator was garbage collected before '
                                  'it was closed.'))
-            except:
+            except Exception:
                 pass
 
 
--- a/MoinMoin/support/werkzeug/contrib/profiler.py	Sun Dec 25 16:38:04 2011 +0100
+++ b/MoinMoin/support/werkzeug/contrib/profiler.py	Sun Dec 25 16:43:04 2011 +0100
@@ -13,7 +13,7 @@
         from werkzeug.contrib.profiler import ProfilerMiddleware
         app = ProfilerMiddleware(app)
 
-    :copyright: (c) 2009 by the Werkzeug Team, see AUTHORS for more details.
+    :copyright: (c) 2011 by the Werkzeug Team, see AUTHORS for more details.
     :license: BSD, see LICENSE for more details.
 """
 import sys
@@ -87,7 +87,7 @@
         p = Profile()
         p.runcall(runapp)
         body = ''.join(response_body)
-        stats = Stats(p)
+        stats = Stats(p, stream=self._stream)
         stats.sort_stats(*self._sort_by)
 
         self._stream.write('-' * 80)
@@ -102,7 +102,9 @@
                 threaded=False, processes=1, stream=None,
                 sort_by=('time', 'calls'), restrictions=()):
     """Return a new callback for :mod:`werkzeug.script` that starts a local
-    server with the profiler enabled::
+    server with the profiler enabled.
+
+    ::
 
         from werkzeug.contrib import profiler
         action_profile = profiler.make_action(make_app)
--- a/MoinMoin/support/werkzeug/contrib/reporterstream.py	Sun Dec 25 16:38:04 2011 +0100
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,92 +0,0 @@
-# -*- coding: utf-8 -*-
-"""
-    werkzeug.contrib.reporterstream
-    ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-
-    This module implements a class that can wrap `wsgi.input` in order to be
-    informed about changes of the stream.  This is useful if you want to
-    display a progress bar for the upload.
-
-    :copyright: (c) 2009 by the Werkzeug Team, see AUTHORS for more details.
-    :license: BSD, see LICENSE for more details.
-"""
-from warnings import warn
-warn(DeprecationWarning('reporter stream is deprecated.  If you want to continue '
-                        'using this class copy the module code from the '
-                        'werkzeug wiki: http://dev.pocoo.org/projects/werkzeug/'
-                        'wiki/ReporterStream'), stacklevel=2)
-
-
-class BaseReporterStream(object):
-    """
-    This class can be used to wrap `wsgi.input` in order to be informed about
-    changes of the stream.
-
-    Usage::
-
-        from random import randrange
-
-        class ReporterStream(BaseReporterStream):
-
-            def __init__(self, environ):
-                super(ReporterStream, self).__init__(environ, 1024 * 16)
-                self.transport_id = randrange(0, 100000)
-
-            def processed(self):
-                s = self.environ['my.session.service']
-                s.store['upload/%s' % self.transport_id] = (self.pos, self.length)
-                s.flush()
-
-
-    And before accessing `request.form` or similar attributes add the stream:
-
-        stream = ReporterStream(environ)
-        environ['wsgi.input'] = stream
-    """
-
-    def __init__(self, environ, threshold):
-        self.threshold = threshold
-        self.length = int(environ.get('CONTENT_LENGTH') or 0)
-        self.pos = 0
-        self.environ = environ
-        self._stream = environ['wsgi.input']
-
-    def processed(self):
-        """Called after pos has changed for threshold or a line was read."""
-
-    def read(self, size=None):
-        length = self.length
-        threshold = self.threshold
-        buffer = []
-
-        if size is None:
-            while self.pos < length:
-                step = min(threshold, length - self.pos)
-                data = self._stream.read(step)
-                self.pos += step
-                self.processed()
-                buffer.append(data)
-        else:
-            read = 0
-            while read < size:
-                step = min(threshold, length - self.pos)
-                step = min(step, size)
-                data = self._stream.read(step)
-                self.pos += step
-                read += step
-                self.processed()
-                buffer.append(data)
-
-        return ''.join(buffer)
-
-    def readline(self, *args):
-        line = self._stream.readline(*args)
-        self.pos += len(line)
-        self.processed()
-        return line
-
-    def readlines(self, hint=None):
-        result = []
-        while self.pos < self.length:
-            result.append(self.readline())
-        return result
--- a/MoinMoin/support/werkzeug/contrib/securecookie.py	Sun Dec 25 16:38:04 2011 +0100
+++ b/MoinMoin/support/werkzeug/contrib/securecookie.py	Sun Dec 25 16:43:04 2011 +0100
@@ -39,11 +39,12 @@
     If you are using the werkzeug request objects you could integrate the
     secure cookie into your application like this::
 
-        from werkzeug import BaseRequest, cached_property
+        from werkzeug.utils import cached_property
+        from werkzeug.wrappers import BaseRequest
         from werkzeug.contrib.securecookie import SecureCookie
 
         # don't use this key but a different one; you could just use
-        # os.unrandom(20) to get something random
+        # os.urandom(20) to get something random
         SECRET_KEY = '\xfa\xdd\xb8z\xae\xe0}4\x8b\xea'
 
         class Request(BaseRequest):
@@ -84,17 +85,17 @@
             request.client_session.save_cookie(response)
             return response(environ, start_response)
 
-    :copyright: (c) 2009 by the Werkzeug Team, see AUTHORS for more details.
+    :copyright: (c) 2011 by the Werkzeug Team, see AUTHORS for more details.
     :license: BSD, see LICENSE for more details.
 """
 import sys
 import cPickle as pickle
 from hmac import new as hmac
-from datetime import datetime
-from time import time, mktime, gmtime
-from werkzeug import url_quote_plus, url_unquote_plus
+from time import time
+from werkzeug.urls import url_quote_plus, url_unquote_plus
 from werkzeug._internal import _date_to_unix
 from werkzeug.contrib.sessions import ModificationTrackingDict
+from werkzeug.security import safe_str_cmp
 
 
 # rather ugly way to import the correct hash method.  Because
@@ -144,7 +145,10 @@
     #: The hash method to use.  This has to be a module with a new function
     #: or a function that creates a hashlib object.  Such as `hashlib.md5`
     #: Subclasses can override this attribute.  The default hash is sha1.
-    hash_method = _default_hash
+    #: Make sure to wrap this in staticmethod() if you store an arbitrary
+    #: function there such as hashlib.sha1 which  might be implemented
+    #: as a function.
+    hash_method = staticmethod(_default_hash)
 
     #: the module used for serialization.  Unless overriden by subclasses
     #: the standard pickle module is used.
@@ -170,12 +174,12 @@
             self.should_save and '*' or ''
         )
 
+    @property
     def should_save(self):
         """True if the session should be saved.  By default this is only true
         for :attr:`modified` cookies, not :attr:`new`.
         """
         return self.modified
-    should_save = property(should_save, doc=should_save.__doc__)
 
     @classmethod
     def quote(cls, value):
@@ -203,7 +207,7 @@
             if cls.serialization_method is not None:
                 value = cls.serialization_method.loads(value)
             return value
-        except:
+        except Exception:
             # unfortunately pickle and other serialization modules can
             # cause pretty every error here.  if we get one we catch it
             # and convert it into an UnquoteError
@@ -245,7 +249,7 @@
         :return: a new :class:`SecureCookie`.
         """
         if isinstance(string, unicode):
-            string = string.encode('utf-8', 'ignore')
+            string = string.encode('utf-8', 'replace')
         try:
             base64_hash, data = string.split('?', 1)
         except (ValueError, IndexError):
@@ -273,7 +277,7 @@
                 client_hash = base64_hash.decode('base64')
             except Exception:
                 items = client_hash = None
-            if items is not None and client_hash == mac.digest():
+            if items is not None and safe_str_cmp(client_hash, mac.digest()):
                 try:
                     for key, value in items.iteritems():
                         items[key] = cls.unquote(value)
@@ -304,8 +308,8 @@
         """
         data = request.cookies.get(key)
         if not data:
-            return SecureCookie(secret_key=secret_key)
-        return SecureCookie.unserialize(data, secret_key)
+            return cls(secret_key=secret_key)
+        return cls.unserialize(data, secret_key)
 
     def save_cookie(self, response, key='session', expires=None,
                     session_expires=None, max_age=None, path='/', domain=None,
--- a/MoinMoin/support/werkzeug/contrib/sessions.py	Sun Dec 25 16:38:04 2011 +0100
+++ b/MoinMoin/support/werkzeug/contrib/sessions.py	Sun Dec 25 16:43:04 2011 +0100
@@ -37,7 +37,7 @@
 
         def application(environ, start_response):
             request = Request(environ)
-            sid = request.cookie.get('cookie_name')
+            sid = request.cookies.get('cookie_name')
             if sid is None:
                 request.session = session_store.new()
             else:
@@ -48,11 +48,13 @@
                 response.set_cookie('cookie_name', request.session.sid)
             return response(environ, start_response)
 
-    :copyright: (c) 2009 by the Werkzeug Team, see AUTHORS for more details.
+    :copyright: (c) 2011 by the Werkzeug Team, see AUTHORS for more details.
     :license: BSD, see LICENSE for more details.
 """
 import re
 import os
+import sys
+import tempfile
 from os import path
 from time import time
 from random import random
@@ -62,11 +64,13 @@
     from sha import new as sha1
 from cPickle import dump, load, HIGHEST_PROTOCOL
 
-from werkzeug.utils import ClosingIterator, dump_cookie, parse_cookie
 from werkzeug.datastructures import CallbackDict
+from werkzeug.utils import dump_cookie, parse_cookie
+from werkzeug.wsgi import ClosingIterator
+from werkzeug.posixemulation import rename
 
 
-_sha1_re = re.compile(r'^[a-fA-F0-9]{40}$')
+_sha1_re = re.compile(r'^[a-f0-9]{40}$')
 
 
 def _urandom():
@@ -124,8 +128,13 @@
 
     @property
     def should_save(self):
-        """True if the session should be saved."""
-        return self.modified or self.new
+        """True if the session should be saved.
+
+        .. versionchanged:: 0.6
+           By default the session is now only saved if the session is
+           modified, not if it is new like it was before.
+        """
+        return self.modified
 
 
 class SessionStore(object):
@@ -173,9 +182,19 @@
         return self.session_class({}, sid, True)
 
 
+#: used for temporary files by the filesystem session store
+_fs_transaction_suffix = '.__wz_sess'
+
+
 class FilesystemSessionStore(SessionStore):
-    """Simple example session store that saves sessions in the filesystem like
-    PHP does.
+    """Simple example session store that saves sessions on the filesystem.
+    This store works best on POSIX systems and Windows Vista / Windows
+    Server 2008 and newer.
+
+    .. versionchanged:: 0.6
+       `renew_missing` was added.  Previously this was considered `True`,
+       now the default changed to `False` and it can be explicitly
+       deactivated.
 
     :param path: the path to the folder used for storing the sessions.
                  If not provided the default temporary directory is used.
@@ -184,48 +203,93 @@
                               session id.
     :param session_class: The session class to use.  Defaults to
                           :class:`Session`.
+    :param renew_missing: set to `True` if you want the store to
+                          give the user a new sid if the session was
+                          not yet saved.
     """
 
     def __init__(self, path=None, filename_template='werkzeug_%s.sess',
-                 session_class=None):
+                 session_class=None, renew_missing=False, mode=0644):
         SessionStore.__init__(self, session_class)
         if path is None:
-            from tempfile import gettempdir
-            path = gettempdir()
+            path = tempfile.gettempdir()
         self.path = path
+        if isinstance(filename_template, unicode):
+            filename_template = filename_template.encode(
+                sys.getfilesystemencoding() or 'utf-8')
+        assert not filename_template.endswith(_fs_transaction_suffix), \
+            'filename templates may not end with %s' % _fs_transaction_suffix
         self.filename_template = filename_template
+        self.renew_missing = renew_missing
+        self.mode = mode
 
     def get_session_filename(self, sid):
+        # out of the box, this should be a strict ASCII subset but
+        # you might reconfigure the session object to have a more
+        # arbitrary string.
+        if isinstance(sid, unicode):
+            sid = sid.encode(sys.getfilesystemencoding() or 'utf-8')
         return path.join(self.path, self.filename_template % sid)
 
     def save(self, session):
-        f = file(self.get_session_filename(session.sid), 'wb')
+        fn = self.get_session_filename(session.sid)
+        fd, tmp = tempfile.mkstemp(suffix=_fs_transaction_suffix,
+                                   dir=self.path)
+        f = os.fdopen(fd, 'wb')
         try:
             dump(dict(session), f, HIGHEST_PROTOCOL)
         finally:
             f.close()
+        try:
+            rename(tmp, fn)
+            os.chmod(fn, self.mode)
+        except (IOError, OSError):
+            pass
 
     def delete(self, session):
         fn = self.get_session_filename(session.sid)
         try:
-            # Late import because Google Appengine won't allow os.unlink
-            from os import unlink
-            unlink(fn)
+            os.unlink(fn)
         except OSError:
             pass
 
     def get(self, sid):
-        fn = self.get_session_filename(sid)
-        if not self.is_valid_key(sid) or not path.exists(fn):
+        if not self.is_valid_key(sid):
             return self.new()
+        try:
+            f = open(self.get_session_filename(sid), 'rb')
+        except IOError:
+            if self.renew_missing:
+                return self.new()
+            data = {}
         else:
-            f = file(fn, 'rb')
             try:
-                data = load(f)
+                try:
+                    data = load(f)
+                except Exception:
+                    data = {}
             finally:
                 f.close()
         return self.session_class(data, sid, False)
 
+    def list(self):
+        """Lists all sessions in the store.
+
+        .. versionadded:: 0.6
+        """
+        before, after = self.filename_template.split('%s', 1)
+        filename_re = re.compile(r'%s(.{5,})%s$' % (re.escape(before),
+                                                    re.escape(after)))
+        result = []
+        for filename in os.listdir(self.path):
+            #: this is a session that is still being saved.
+            if filename.endswith(_fs_transaction_suffix):
+                continue
+            match = filename_re.match(filename)
+            if match is not None:
+                result.append(match.group(1))
+        return result
+
 
 class SessionMiddleware(object):
     """A simple middleware that puts the session object of a store provided
@@ -237,7 +301,7 @@
     the WSGI environment only relevant for the application which is against
     the concept of WSGI.
 
-    The cookie parameters are the same as for the :func:`~werkzeug.dump_cookie`
+    The cookie parameters are the same as for the :func:`~dump_cookie`
     function just prefixed with ``cookie_``.  Additionally `max_age` is
     called `cookie_age` and not `cookie_max_age` because of backwards
     compatibility.
--- a/MoinMoin/support/werkzeug/contrib/testtools.py	Sun Dec 25 16:38:04 2011 +0100
+++ b/MoinMoin/support/werkzeug/contrib/testtools.py	Sun Dec 25 16:43:04 2011 +0100
@@ -9,10 +9,15 @@
         A response wrapper which adds various cached attributes for
         simplified assertions on various content types.
 
-    :copyright: (c) 2009 by the Werkzeug Team, see AUTHORS for more details.
+    :copyright: (c) 2011 by the Werkzeug Team, see AUTHORS for more details.
     :license: BSD, see LICENSE for more details.
 """
-from werkzeug import Response, cached_property, import_string
+from werkzeug.utils import cached_property, import_string
+from werkzeug.wrappers import Response
+
+from warnings import warn
+warn(DeprecationWarning('werkzeug.contrib.testtools is deprecated and '
+                        'will be removed with Werkzeug 1.0'))
 
 
 class ContentAccessors(object):
@@ -56,7 +61,7 @@
             raise AttributeError('Not a JSON response')
         try:
             from simplejson import loads
-        except:
+        except ImportError:
             from json import loads
         return loads(self.data)
     json = cached_property(json)
--- a/MoinMoin/support/werkzeug/contrib/wrappers.py	Sun Dec 25 16:38:04 2011 +0100
+++ b/MoinMoin/support/werkzeug/contrib/wrappers.py	Sun Dec 25 16:43:04 2011 +0100
@@ -8,7 +8,7 @@
 
     Example::
 
-        from werkzeug import Request as RequestBase
+        from werkzeug.wrappers import Request as RequestBase
         from werkzeug.contrib.wrappers import JSONRequestMixin
 
         class Request(RequestBase, JSONRequestMixin):
@@ -17,11 +17,13 @@
     Afterwards this request object provides the extra functionality of the
     :class:`JSONRequestMixin`.
 
-    :copyright: (c) 2009 by the Werkzeug Team, see AUTHORS for more details.
+    :copyright: (c) 2011 by the Werkzeug Team, see AUTHORS for more details.
     :license: BSD, see LICENSE for more details.
 """
+import codecs
 from werkzeug.exceptions import BadRequest
 from werkzeug.utils import cached_property
+from werkzeug.http import dump_options_header, parse_options_header
 from werkzeug._internal import _decode_unicode
 try:
     from simplejson import loads
@@ -29,6 +31,15 @@
     from json import loads
 
 
+def is_known_charset(charset):
+    """Checks if the given charset is known to Python."""
+    try:
+        codecs.lookup(charset)
+    except LookupError:
+        return False
+    return True
+
+
 class JSONRequestMixin(object):
     """Add json method to a request object.  This will parse the input data
     through simplejson if possible.
@@ -160,3 +171,105 @@
         """The root path of the script includling a trailing slash."""
         path = (self.environ.get('SCRIPT_NAME') or '').rstrip('/') + '/'
         return _decode_unicode(path, self.charset, self.encoding_errors)
+
+
+class DynamicCharsetRequestMixin(object):
+    """"If this mixin is mixed into a request class it will provide
+    a dynamic `charset` attribute.  This means that if the charset is
+    transmitted in the content type headers it's used from there.
+
+    Because it changes the behavior or :class:`Request` this class has
+    to be mixed in *before* the actual request class::
+
+        class MyRequest(DynamicCharsetRequestMixin, Request):
+            pass
+
+    By default the request object assumes that the URL charset is the
+    same as the data charset.  If the charset varies on each request
+    based on the transmitted data it's not a good idea to let the URLs
+    change based on that.  Most browsers assume either utf-8 or latin1
+    for the URLs if they have troubles figuring out.  It's strongly
+    recommended to set the URL charset to utf-8::
+
+        class MyRequest(DynamicCharsetRequestMixin, Request):
+            url_charset = 'utf-8'
+
+    .. versionadded:: 0.6
+    """
+
+    #: the default charset that is assumed if the content type header
+    #: is missing or does not contain a charset parameter.  The default
+    #: is latin1 which is what HTTP specifies as default charset.
+    #: You may however want to set this to utf-8 to better support
+    #: browsers that do not transmit a charset for incoming data.
+    default_charset = 'latin1'
+
+    def unknown_charset(self, charset):
+        """Called if a charset was provided but is not supported by
+        the Python codecs module.  By default latin1 is assumed then
+        to not lose any information, you may override this method to
+        change the behavior.
+
+        :param charset: the charset that was not found.
+        :return: the replacement charset.
+        """
+        return 'latin1'
+
+    @cached_property
+    def charset(self):
+        """The charset from the content type."""
+        header = self.environ.get('CONTENT_TYPE')
+        if header:
+            ct, options = parse_options_header(header)
+            charset = options.get('charset')
+            if charset:
+                if is_known_charset(charset):
+                    return charset
+                return self.unknown_charset(charset)
+        return self.default_charset
+
+
+class DynamicCharsetResponseMixin(object):
+    """If this mixin is mixed into a response class it will provide
+    a dynamic `charset` attribute.  This means that if the charset is
+    looked up and stored in the `Content-Type` header and updates
+    itself automatically.  This also means a small performance hit but
+    can be useful if you're working with different charsets on
+    responses.
+
+    Because the charset attribute is no a property at class-level, the
+    default value is stored in `default_charset`.
+
+    Because it changes the behavior or :class:`Response` this class has
+    to be mixed in *before* the actual response class::
+
+        class MyResponse(DynamicCharsetResponseMixin, Response):
+            pass
+
+    .. versionadded:: 0.6
+    """
+
+    #: the default charset.
+    default_charset = 'utf-8'
+
+    def _get_charset(self):
+        header = self.headers.get('content-type')
+        if header:
+            charset = parse_options_header(header)[1].get('charset')
+            if charset:
+                return charset
+        return self.default_charset
+
+    def _set_charset(self, charset):
+        header = self.headers.get('content-type')
+        ct, options = parse_options_header(header)
+        if not ct:
+            raise TypeError('Cannot set charset if Content-Type '
+                            'header is missing.')
+        options['charset'] = charset
+        self.headers['Content-Type'] = dump_options_header(ct, options)
+
+    charset = property(_get_charset, _set_charset, doc="""
+        The charset for the response.  It's stored inside the
+        Content-Type header as a parameter.""")
+    del _get_charset, _set_charset
--- a/MoinMoin/support/werkzeug/datastructures.py	Sun Dec 25 16:38:04 2011 +0100
+++ b/MoinMoin/support/werkzeug/datastructures.py	Sun Dec 25 16:43:04 2011 +0100
@@ -5,13 +5,15 @@
 
     This module provides mixins and classes with an immutable interface.
 
-    :copyright: (c) 2009 by the Werkzeug Team, see AUTHORS for more details.
+    :copyright: (c) 2011 by the Werkzeug Team, see AUTHORS for more details.
     :license: BSD, see LICENSE for more details.
 """
 import re
 import codecs
 import mimetypes
-from werkzeug._internal import _proxy_repr, _missing
+from itertools import repeat
+
+from werkzeug._internal import _proxy_repr, _missing, _empty_stream
 
 
 _locale_delim_re = re.compile(r'[_-]')
@@ -21,6 +23,25 @@
     raise TypeError('%r objects are immutable' % self.__class__.__name__)
 
 
+def iter_multi_items(mapping):
+    """Iterates over the items of a mapping yielding keys and values
+    without dropping any from more complex structures.
+    """
+    if isinstance(mapping, MultiDict):
+        for item in mapping.iteritems(multi=True):
+            yield item
+    elif isinstance(mapping, dict):
+        for key, value in mapping.iteritems():
+            if isinstance(value, (tuple, list)):
+                for value in value:
+                    yield key, value
+            else:
+                yield key, value
+    else:
+        for item in mapping:
+            yield item
+
+
 class ImmutableListMixin(object):
     """Makes a :class:`list` immutable.
 
@@ -29,6 +50,17 @@
     :private:
     """
 
+    _hash_cache = None
+
+    def __hash__(self):
+        if self._hash_cache is not None:
+            return self._hash_cache
+        rv = self._hash_cache = hash(tuple(self))
+        return rv
+
+    def __reduce_ex__(self, protocol):
+        return type(self), (list(self),)
+
     def __delitem__(self, key):
         is_immutable(self)
 
@@ -83,6 +115,25 @@
 
     :private:
     """
+    _hash_cache = None
+
+    @classmethod
+    def fromkeys(cls, keys, value=None):
+        instance = super(cls, cls).__new__(cls)
+        instance.__init__(zip(keys, repeat(value)))
+        return instance
+
+    def __reduce_ex__(self, protocol):
+        return type(self), (dict(self),)
+
+    def _iter_hashitems(self):
+        return self.iteritems()
+
+    def __hash__(self):
+        if self._hash_cache is not None:
+            return self._hash_cache
+        rv = self._hash_cache = hash(frozenset(self._iter_hashitems()))
+        return rv
 
     def setdefault(self, key, default=None):
         is_immutable(self)
@@ -114,6 +165,15 @@
     :private:
     """
 
+    def __reduce_ex__(self, protocol):
+        return type(self), (self.items(multi=True),)
+
+    def _iter_hashitems(self):
+        return self.iteritems(multi=True)
+
+    def add(self, key, value):
+        is_immutable(self)
+
     def popitemlist(self):
         is_immutable(self)
 
@@ -128,7 +188,12 @@
 
 
 class UpdateDictMixin(object):
-    """Makes dicts call `self.on_update` on modifications."""
+    """Makes dicts call `self.on_update` on modifications.
+
+    .. versionadded:: 0.5
+
+    :private:
+    """
 
     on_update = None
 
@@ -198,7 +263,7 @@
 
     def copy(self):
         """Return a shallow mutable copy of this object.  Keep in mind that
-        the standard library's :func:`copy` funciton is a no-op for this class
+        the standard library's :func:`copy` function is a no-op for this class
         like for any other python immutable type (eg: :class:`tuple`).
         """
         return TypeConversionDict(self)
@@ -236,7 +301,7 @@
 
     From Werkzeug 0.3 onwards, the `KeyError` raised by this class is also a
     subclass of the :exc:`~exceptions.BadRequest` HTTP exception and will
-    render a page for a ``400 BAD REQUEST`` if catched in a catch-all for HTTP
+    render a page for a ``400 BAD REQUEST`` if caught in a catch-all for HTTP
     exceptions.
 
     A :class:`MultiDict` can be constructed from an iterable of
@@ -248,11 +313,6 @@
                     or `None`.
     """
 
-    # the key error this class raises.  Because of circular dependencies
-    # with the http exception module this class is created at the end of
-    # this module.
-    KeyError = None
-
     def __init__(self, mapping=None):
         if isinstance(mapping, MultiDict):
             dict.__init__(self, ((k, l[:]) for k, l in mapping.iterlists()))
@@ -271,6 +331,16 @@
                 tmp.setdefault(key, []).append(value)
             dict.__init__(self, tmp)
 
+    def __getstate__(self):
+        return dict(self.lists())
+
+    def __setstate__(self, value):
+        dict.clear(self)
+        dict.update(self, value)
+
+    def __iter__(self):
+        return self.iterkeys()
+
     def __getitem__(self, key):
         """Return the first data value for this key;
         raises KeyError if not found.
@@ -280,12 +350,26 @@
         """
         if key in self:
             return dict.__getitem__(self, key)[0]
-        raise self.KeyError(key)
+        raise BadRequestKeyError(key)
 
     def __setitem__(self, key, value):
-        """Set an item as list."""
+        """Like :meth:`add` but removes an existing key first.
+
+        :param key: the key for the value.
+        :param value: the value to set.
+        """
         dict.__setitem__(self, key, [value])
 
+    def add(self, key, value):
+        """Adds a new value for the key.
+
+        .. versionadded:: 0.6
+
+        :param key: the key for the value.
+        :param value: the value to add.
+        """
+        dict.setdefault(self, key, []).append(value)
+
     def getlist(self, key, type=None):
         """Return the list of items for a given key. If that key is not in the
         `MultiDict`, the return value will be an empty list.  Just as `get`
@@ -372,7 +456,7 @@
         """Return a list of ``(key, value)`` pairs.
 
         :param multi: If set to `True` the list returned will have a
-                      pair for each value of each key.  Ohterwise it
+                      pair for each value of each key.  Otherwise it
                       will only contain pairs for the first value of
                       each key.
 
@@ -380,11 +464,13 @@
         """
         return list(self.iteritems(multi))
 
-    #: Return a list of ``(key, value)`` pairs, where values is the list of
-    #: all values associated with the key.
-    #:
-    #: :return: a :class:`list`
-    lists = dict.items
+    def lists(self):
+        """Return a list of ``(key, values)`` pairs, where values is the list of
+        all values associated with the key.
+
+        :return: a :class:`list`
+        """
+        return list(self.iterlists())
 
     def values(self):
         """Returns a list of the first value on every key's value list.
@@ -415,10 +501,7 @@
                 yield key, values[0]
 
     def iterlists(self):
-        """Return a list of all values associated with a key.
-
-        :return: a class:`list`
-        """
+        """Like :meth:`items` but returns an iterator."""
         for key, values in dict.iteritems(self):
             yield key, list(values)
 
@@ -428,9 +511,8 @@
             yield values[0]
 
     def iterlistvalues(self):
-        """like :meth:`listvalues` but returns an iterator."""
-        for values in dict.itervalues(self):
-            yield list(values)
+        """Like :meth:`listvalues` but returns an iterator."""
+        return dict.itervalues(self)
 
     def copy(self):
         """Return a shallow copy of this object."""
@@ -448,19 +530,12 @@
         """
         if flat:
             return dict(self.iteritems())
-        return dict(self)
+        return dict(self.lists())
 
     def update(self, other_dict):
         """update() extends rather than replaces existing key lists."""
-        if isinstance(other_dict, MultiDict):
-            for key, value_list in other_dict.iterlists():
-                self.setlistdefault(key, []).extend(value_list)
-        elif isinstance(other_dict, dict):
-            for key, value in other_dict.items():
-                self.setlistdefault(key, []).append(value)
-        else:
-            for key, value in other_dict:
-                self.setlistdefault(key, []).append(value)
+        for key, value in iter_multi_items(other_dict):
+            MultiDict.add(self, key, value)
 
     def pop(self, key, default=_missing):
         """Pop the first item for a list on the dict.  Afterwards the
@@ -476,12 +551,12 @@
         :param default: if provided the value to return if the key was
                         not in the dictionary.
         """
-        if default is not _missing:
-            return dict.pop(self, key, default)
         try:
             return dict.pop(self, key)[0]
         except KeyError, e:
-            raise self.KeyError(str(e))
+            if default is not _missing:
+                return default
+            raise BadRequestKeyError(str(e))
 
     def popitem(self):
         """Pop an item from the dict."""
@@ -489,7 +564,7 @@
             item = dict.popitem(self)
             return (item[0], item[1][0])
         except KeyError, e:
-            raise self.KeyError(str(e))
+            raise BadRequestKeyError(str(e))
 
     def poplist(self, key):
         """Pop the list for a key from the dict.  If the key is not in the dict
@@ -506,12 +581,224 @@
         try:
             return dict.popitem(self)
         except KeyError, e:
-            raise self.KeyError(str(e))
+            raise BadRequestKeyError(str(e))
+
+    def __copy__(self):
+        return self.copy()
 
     def __repr__(self):
         return '%s(%r)' % (self.__class__.__name__, self.items(multi=True))
 
 
+class _omd_bucket(object):
+    """Wraps values in the :class:`OrderedMultiDict`.  This makes it
+    possible to keep an order over multiple different keys.  It requires
+    a lot of extra memory and slows down access a lot, but makes it
+    possible to access elements in O(1) and iterate in O(n).
+    """
+    __slots__ = ('prev', 'key', 'value', 'next')
+
+    def __init__(self, omd, key, value):
+        self.prev = omd._last_bucket
+        self.key = key
+        self.value = value
+        self.next = None
+
+        if omd._first_bucket is None:
+            omd._first_bucket = self
+        if omd._last_bucket is not None:
+            omd._last_bucket.next = self
+        omd._last_bucket = self
+
+    def unlink(self, omd):
+        if self.prev:
+            self.prev.next = self.next
+        if self.next:
+            self.next.prev = self.prev
+        if omd._first_bucket is self:
+            omd._first_bucket = self.next
+        if omd._last_bucket is self:
+            omd._last_bucket = self.prev
+
+
+class OrderedMultiDict(MultiDict):
+    """Works like a regular :class:`MultiDict` but preserves the
+    order of the fields.  To convert the ordered multi dict into a
+    list you can use the :meth:`items` method and pass it ``multi=True``.
+
+    In general an :class:`OrderedMultiDict` is an order of magnitude
+    slower than a :class:`MultiDict`.
+
+    .. admonition:: note
+
+       Due to a limitation in Python you cannot convert an ordered
+       multi dict into a regular dict by using ``dict(multidict)``.
+       Instead you have to use the :meth:`to_dict` method, otherwise
+       the internal bucket objects are exposed.
+    """
+
+    def __init__(self, mapping=None):
+        dict.__init__(self)
+        self._first_bucket = self._last_bucket = None
+        if mapping is not None:
+            OrderedMultiDict.update(self, mapping)
+
+    def __eq__(self, other):
+        if not isinstance(other, MultiDict):
+            return NotImplemented
+        if isinstance(other, OrderedMultiDict):
+            iter1 = self.iteritems(multi=True)
+            iter2 = other.iteritems(multi=True)
+            try:
+                for k1, v1 in iter1:
+                    k2, v2 = iter2.next()
+                    if k1 != k2 or v1 != v2:
+                        return False
+            except StopIteration:
+                return False
+            try:
+                iter2.next()
+            except StopIteration:
+                return True
+            return False
+        if len(self) != len(other):
+            return False
+        for key, values in self.iterlists():
+            if other.getlist(key) != values:
+                return False
+        return True
+
+    def __ne__(self, other):
+        return not self.__eq__(other)
+
+    def __reduce_ex__(self, protocol):
+        return type(self), (self.items(multi=True),)
+
+    def __getstate__(self):
+        return self.items(multi=True)
+
+    def __setstate__(self, values):
+        dict.clear(self)
+        for key, value in values:
+            self.add(key, value)
+
+    def __getitem__(self, key):
+        if key in self:
+            return dict.__getitem__(self, key)[0].value
+        raise BadRequestKeyError(key)
+
+    def __setitem__(self, key, value):
+        self.poplist(key)
+        self.add(key, value)
+
+    def __delitem__(self, key):
+        self.pop(key)
+
+    def iterkeys(self):
+        return (key for key, value in self.iteritems())
+
+    def itervalues(self):
+        return (value for key, value in self.iteritems())
+
+    def iteritems(self, multi=False):
+        ptr = self._first_bucket
+        if multi:
+            while ptr is not None:
+                yield ptr.key, ptr.value
+                ptr = ptr.next
+        else:
+            returned_keys = set()
+            while ptr is not None:
+                if ptr.key not in returned_keys:
+                    returned_keys.add(ptr.key)
+                    yield ptr.key, ptr.value
+                ptr = ptr.next
+
+    def iterlists(self):
+        returned_keys = set()
+        ptr = self._first_bucket
+        while ptr is not None:
+            if ptr.key not in returned_keys:
+                yield ptr.key, self.getlist(ptr.key)
+                returned_keys.add(ptr.key)
+            ptr = ptr.next
+
+    def iterlistvalues(self):
+        for key, values in self.iterlists():
+            yield values
+
+    def add(self, key, value):
+        dict.setdefault(self, key, []).append(_omd_bucket(self, key, value))
+
+    def getlist(self, key, type=None):
+        try:
+            rv = dict.__getitem__(self, key)
+        except KeyError:
+            return []
+        if type is None:
+            return [x.value for x in rv]
+        result = []
+        for item in rv:
+            try:
+                result.append(type(item.value))
+            except ValueError:
+                pass
+        return result
+
+    def setlist(self, key, new_list):
+        self.poplist(key)
+        for value in new_list:
+            self.add(key, value)
+
+    def setlistdefault(self, key, default_list=None):
+        raise TypeError('setlistdefault is unsupported for '
+                        'ordered multi dicts')
+
+    def update(self, mapping):
+        for key, value in iter_multi_items(mapping):
+            OrderedMultiDict.add(self, key, value)
+
+    def poplist(self, key):
+        buckets = dict.pop(self, key, ())
+        for bucket in buckets:
+            bucket.unlink(self)
+        return [x.value for x in buckets]
+
+    def pop(self, key, default=_missing):
+        try:
+            buckets = dict.pop(self, key)
+        except KeyError, e:
+            if default is not _missing:
+                return default
+            raise BadRequestKeyError(str(e))
+        for bucket in buckets:
+            bucket.unlink(self)
+        return buckets[0].value
+
+    def popitem(self):
+        try:
+            key, buckets = dict.popitem(self)
+        except KeyError, e:
+            raise BadRequestKeyError(str(e))
+        for bucket in buckets:
+            bucket.unlink(self)
+        return key, buckets[0].value
+
+    def popitemlist(self):
+        try:
+            key, buckets = dict.popitem(self)
+        except KeyError, e:
+            raise BadRequestKeyError(str(e))
+        for bucket in buckets:
+            bucket.unlink(self)
+        return key, [x.value for x in buckets]
+
+
+def _options_header_vkw(value, kw):
+    return dump_options_header(value, dict((k.replace('_', '-'), v)
+                                            for k, v in kw.items()))
+
+
 class Headers(object):
     """An object that stores some headers.  It has a dict-like interface
     but is ordered and can store the same keys multiple times.
@@ -521,7 +808,7 @@
 
     From Werkzeug 0.3 onwards, the :exc:`KeyError` raised by this class is
     also a subclass of the :class:`~exceptions.BadRequest` HTTP exception
-    and will render a page for a ``400 BAD REQUEST`` if catched in a
+    and will render a page for a ``400 BAD REQUEST`` if caught in a
     catch-all for HTTP exceptions.
 
     Headers is mostly compatible with the Python :class:`wsgiref.headers.Headers`
@@ -538,17 +825,15 @@
     :param defaults: The list of default values for the :class:`Headers`.
     """
 
-    # the key error this class raises.  Because of circular dependencies
-    # with the http exception module this class is created at the end of
-    # this module.
-    KeyError = None
-
     def __init__(self, defaults=None, _list=None):
         if _list is None:
             _list = []
         self._list = _list
         if defaults is not None:
-            self.extend(defaults)
+            if isinstance(defaults, (list, Headers)):
+                self._list.extend(defaults)
+            else:
+                self.extend(defaults)
 
     @classmethod
     def linked(cls, headerlist):
@@ -557,7 +842,7 @@
 
         >>> headerlist = [('Content-Length', '40')]
         >>> headers = Headers.linked(headerlist)
-        >>> headers.add('Content-Type', 'text/html')
+        >>> headers['Content-Type'] = 'text/html'
         >>> headerlist
         [('Content-Length', '40'), ('Content-Type', 'text/html')]
 
@@ -566,8 +851,8 @@
         """
         return cls(_list=headerlist)
 
-    def __getitem__(self, key, _index_operation=True):
-        if _index_operation:
+    def __getitem__(self, key, _get_mode=False):
+        if not _get_mode:
             if isinstance(key, (int, long)):
                 return self._list[key]
             elif isinstance(key, slice):
@@ -576,7 +861,12 @@
         for k, v in self._list:
             if k.lower() == ikey:
                 return v
-        raise self.KeyError(key)
+        # micro optimization: if we are in get mode we will catch that
+        # exception one stack level down so we can raise a standard
+        # key error instead of our special one.
+        if _get_mode:
+            raise KeyError()
+        raise BadRequestKeyError(key)
 
     def __eq__(self, other):
         return other.__class__ is self.__class__ and \
@@ -608,7 +898,7 @@
                      by this callable the default value is returned.
         """
         try:
-            rv = self.__getitem__(key, _index_operation=False)
+            rv = self.__getitem__(key, _get_mode=True)
         except KeyError:
             return default
         if type is None:
@@ -735,7 +1025,7 @@
     def __contains__(self, key):
         """Check if a key is present."""
         try:
-            self.__getitem__(key, _index_operation=False)
+            self.__getitem__(key, _get_mode=True)
         except KeyError:
             return False
         return True
@@ -766,10 +1056,15 @@
             keyword arguments were added for :mod:`wsgiref` compatibility.
         """
         if kw:
-            _value = dump_options_header(_value, dict((k.replace('_', '-'), v)
-                                                      for k, v in kw.items()))
+            _value = _options_header_vkw(_value, kw)
+        self._validate_value(_value)
         self._list.append((_key, _value))
 
+    def _validate_value(self, value):
+        if isinstance(value, basestring) and ('\n' in value or '\r' in value):
+            raise ValueError('Detected newline in header value.  This is '
+                'a potential security problem')
+
     def add_header(self, _key, _value, **_kw):
         """Add a new header tuple to the list.
 
@@ -782,24 +1077,38 @@
         """Clears all headers."""
         del self._list[:]
 
-    def set(self, key, value):
+    def set(self, _key, _value, **kw):
         """Remove all header tuples for `key` and add a new one.  The newly
         added key either appears at the end of the list if there was no
         entry or replaces the first one.
 
+        Keyword arguments can specify additional parameters for the header
+        value, with underscores converted to dashes.  See :meth:`add` for
+        more information.
+
+        .. versionchanged:: 0.6.1
+           :meth:`set` now accepts the same arguments as :meth:`add`.
+
         :param key: The key to be inserted.
         :param value: The value to be inserted.
         """
-        lc_key = key.lower()
-        for idx, (old_key, old_value) in enumerate(self._list):
-            if old_key.lower() == lc_key:
+        if kw:
+            _value = _options_header_vkw(_value, kw)
+        self._validate_value(_value)
+        if not self._list:
+            self._list.append((_key, _value))
+            return
+        listiter = iter(self._list)
+        ikey = _key.lower()
+        for idx, (old_key, old_value) in enumerate(listiter):
+            if old_key.lower() == ikey:
                 # replace first ocurrence
-                self._list[idx] = (key, value)
+                self._list[idx] = (_key, _value)
                 break
         else:
-            return self.add(key, value)
-        self._list[idx + 1:] = [(k, v) for k, v in self._list[idx + 1:]
-                                if k.lower() != lc_key]
+            self._list.append((_key, _value))
+            return
+        self._list[idx + 1:] = [t for t in listiter if t[0].lower() != ikey]
 
     def setdefault(self, key, value):
         """Returns the value for the key if it is in the dict, otherwise it
@@ -817,24 +1126,19 @@
     def __setitem__(self, key, value):
         """Like :meth:`set` but also supports index/slice based setting."""
         if isinstance(key, (slice, int, long)):
+            self._validate_value(value)
             self._list[key] = value
         else:
             self.set(key, value)
 
-    def to_list(self, charset='utf-8'):
+    def to_list(self, charset='iso-8859-1'):
         """Convert the headers into a list and converts the unicode header
         items to the specified charset.
 
         :return: list
         """
-        result = []
-        for k, v in self:
-            if isinstance(v, unicode):
-                v = v.encode(charset)
-            else:
-                v = str(v)
-            result.append((k, v))
-        return result
+        return [(k, isinstance(v, unicode) and v.encode(charset) or str(v))
+                for k, v in self]
 
     def copy(self):
         return self.__class__(self._list)
@@ -842,7 +1146,7 @@
     def __copy__(self):
         return self.copy()
 
-    def __str__(self, charset='utf-8'):
+    def __str__(self, charset='iso-8859-1'):
         """Returns formatted headers suitable for HTTP transmission."""
         strs = []
         for key, value in self.to_list(charset):
@@ -858,9 +1162,13 @@
 
 
 class ImmutableHeadersMixin(object):
-    """Makes a :class:`Headers` immutable.
+    """Makes a :class:`Headers` immutable.  We do not mark them as
+    hashable though since the only usecase for this datastructure
+    in Werkzeug is a view on a mutable structure.
 
     .. versionadded:: 0.5
+
+    :private:
     """
 
     def __delitem__(self, key):
@@ -897,7 +1205,7 @@
 
     From Werkzeug 0.3 onwards, the `KeyError` raised by this class is also a
     subclass of the :exc:`~exceptions.BadRequest` HTTP exception and will
-    render a page for a ``400 BAD REQUEST`` if catched in a catch-all for
+    render a page for a ``400 BAD REQUEST`` if caught in a catch-all for
     HTTP exceptions.
     """
 
@@ -910,19 +1218,25 @@
                         'no separate initializer' % cls.__name__)
 
     def __eq__(self, other):
-        return self is other
-
-    def __getitem__(self, key, _index_operation=False):
-        # _index_operation is a no-op for this class as there is no index but
+        return self.environ is other.environ
+
+    def __getitem__(self, key, _get_mode=False):
+        # _get_mode is a no-op for this class as there is no index but
         # used because get() calls it.
         key = key.upper().replace('-', '_')
         if key in ('CONTENT_TYPE', 'CONTENT_LENGTH'):
             return self.environ[key]
         return self.environ['HTTP_' + key]
 
+    def __len__(self):
+        # the iter is necessary because otherwise list calls our
+        # len which would call list again and so forth.
+        return len(list(iter(self)))
+
     def __iter__(self):
         for key, value in self.environ.iteritems():
-            if key.startswith('HTTP_'):
+            if key.startswith('HTTP_') and key not in \
+               ('HTTP_CONTENT_TYPE', 'HTTP_CONTENT_LENGTH'):
                 yield key[5:].replace('_', '-').title(), value
             elif key in ('CONTENT_TYPE', 'CONTENT_LENGTH'):
                 yield key.replace('_', '-').title(), value
@@ -936,7 +1250,7 @@
     instances as sequence and it will combine the return values of all wrapped
     dicts:
 
-    >>> from werkzeug import MultiDict, CombinedMultiDict
+    >>> from werkzeug.datastructures import CombinedMultiDict, MultiDict
     >>> post = MultiDict([('foo', 'bar')])
     >>> get = MultiDict([('blub', 'blah')])
     >>> combined = CombinedMultiDict([get, post])
@@ -950,10 +1264,13 @@
 
     From Werkzeug 0.3 onwards, the `KeyError` raised by this class is also a
     subclass of the :exc:`~exceptions.BadRequest` HTTP exception and will
-    render a page for a ``400 BAD REQUEST`` if catched in a catch-all for HTTP
+    render a page for a ``400 BAD REQUEST`` if caught in a catch-all for HTTP
     exceptions.
     """
 
+    def __reduce_ex__(self, protocol):
+        return type(self), (self.dicts,)
+
     def __init__(self, dicts=None):
         self.dicts = dicts or []
 
@@ -966,7 +1283,7 @@
         for d in self.dicts:
             if key in d:
                 return d[key]
-        raise self.KeyError(key)
+        raise BadRequestKeyError(key)
 
     def get(self, key, default=None, type=None):
         for d in self.dicts:
@@ -1083,18 +1400,19 @@
         :param filename: an optional filename
         :param content_type: an optional content type
         """
-        from werkzeug.utils import FileStorage
         if isinstance(file, FileStorage):
-            self[name] = file
-            return
-        if isinstance(file, basestring):
-            if filename is None:
-                filename = file
-            file = open(file, 'rb')
-        if filename and content_type is None:
-            content_type = mimetypes.guess_type(filename)[0] or \
-                           'application/octet-stream'
-        self[name] = FileStorage(file, filename, name, content_type)
+            value = file
+        else:
+            if isinstance(file, basestring):
+                if filename is None:
+                    filename = file
+                file = open(file, 'rb')
+            if filename and content_type is None:
+                content_type = mimetypes.guess_type(filename)[0] or \
+                               'application/octet-stream'
+            value = FileStorage(file, filename, name, content_type)
+
+        self.add(name, value)
 
 
 class ImmutableDict(ImmutableDictMixin, dict):
@@ -1107,7 +1425,7 @@
 
     def copy(self):
         """Return a shallow mutable copy of this object.  Keep in mind that
-        the standard library's :func:`copy` funciton is a no-op for this class
+        the standard library's :func:`copy` function is a no-op for this class
         like for any other python immutable type (eg: :class:`tuple`).
         """
         return dict(self)
@@ -1124,7 +1442,7 @@
 
     def copy(self):
         """Return a shallow mutable copy of this object.  Keep in mind that
-        the standard library's :func:`copy` funciton is a no-op for this class
+        the standard library's :func:`copy` function is a no-op for this class
         like for any other python immutable type (eg: :class:`tuple`).
         """
         return MultiDict(self)
@@ -1133,6 +1451,26 @@
         return self
 
 
+class ImmutableOrderedMultiDict(ImmutableMultiDictMixin, OrderedMultiDict):
+    """An immutable :class:`OrderedMultiDict`.
+
+    .. versionadded:: 0.6
+    """
+
+    def _iter_hashitems(self):
+        return enumerate(self.iteritems(multi=True))
+
+    def copy(self):
+        """Return a shallow mutable copy of this object.  Keep in mind that
+        the standard library's :func:`copy` function is a no-op for this class
+        like for any other python immutable type (eg: :class:`tuple`).
+        """
+        return OrderedMultiDict(self)
+
+    def __copy__(self):
+        return self
+
+
 class Accept(ImmutableList):
     """An :class:`Accept` object is just a list subclass for lists of
     ``(value, quality)`` tuples.  It is automatically sorted by quality.
@@ -1159,7 +1497,7 @@
     0
 
     .. versionchanged:: 0.5
-       :class:`Accept` objects are forzed immutable now.
+       :class:`Accept` objects are forced immutable now.
     """
 
     def __init__(self, values=()):
@@ -1186,12 +1524,21 @@
         returned quality is ``0``.
         """
         if isinstance(key, basestring):
-            for item, quality in self:
-                if self._value_matches(key, item):
-                    return quality
-            return 0
+            return self.quality(key)
         return list.__getitem__(self, key)
 
+    def quality(self, key):
+        """Returns the quality of the key.
+
+        .. versionadded:: 0.6
+           In previous versions you had to use the item-lookup syntax
+           (eg: ``obj[key]`` instead of ``obj.quality(key)``)
+        """
+        for item, quality in self:
+            if self._value_matches(key, item):
+                return quality
+        return 0
+
     def __contains__(self, value):
         for item, quality in self:
             if self._value_matches(value, item):
@@ -1251,6 +1598,25 @@
     def __str__(self):
         return self.to_header()
 
+    def best_match(self, matches, default=None):
+        """Returns the best match from a list of possible matches based
+        on the quality of the client.  If two items have the same quality,
+        the one is returned that comes first.
+
+        :param matches: a list of matches to check for
+        :param default: the value that is returned if none match
+        """
+        best_quality = -1
+        result = default
+        for server_item in matches:
+            for client_item, quality in self:
+                if quality <= best_quality:
+                    break
+                if self._value_matches(server_item, client_item):
+                    best_quality = quality
+                    result = server_item
+        return result
+
     @property
     def best(self):
         """The best match as value."""
@@ -1306,6 +1672,11 @@
             'application/xml' in self
         )
 
+    @property
+    def accept_json(self):
+        """True if this object accepts JSON."""
+        return 'application/json' in self
+
 
 class LanguageAccept(Accept):
     """Like :class:`Accept` but with normalization for languages."""
@@ -1350,15 +1721,9 @@
     to subclass it and add your own items have a look at the sourcecode for
     that class.
 
-    The following attributes are exposed:
-
-    `no_cache`, `no_store`, `max_age`, `max_stale`, `min_fresh`,
-    `no_transform`, `only_if_cached`, `public`, `private`, `must_revalidate`,
-    `proxy_revalidate`, and `s_maxage`
-
     .. versionchanged:: 0.4
 
-       setting `no_cache` or `private` to boolean `True` will set the implicit
+       Setting `no_cache` or `private` to boolean `True` will set the implicit
        none-value which is ``*``:
 
        >>> cc = ResponseCacheControl()
@@ -1370,6 +1735,9 @@
        >>> cc.no_cache = None
        >>> cc
        <ResponseCacheControl ''>
+
+       In versions before 0.5 the behavior documented here affected the now
+       no longer existing `CacheControl` class.
     """
 
     no_cache = cache_property('no-cache', '*', None)
@@ -1435,6 +1803,11 @@
     """A cache control for requests.  This is immutable and gives access
     to all the request-relevant cache control headers.
 
+    To get a header of the :class:`RequestCacheControl` object again you can
+    convert the object into a string or call the :meth:`to_header` method.  If
+    you plan to subclass it and add your own items have a look at the sourcecode
+    for that class.
+
     .. versionadded:: 0.5
        In previous versions a `CacheControl` class existed that was used
        both for request and response.
@@ -1451,6 +1824,11 @@
     this is mutable and gives access to response-relevant cache control
     headers.
 
+    To get a header of the :class:`ResponseCacheControl` object again you can
+    convert the object into a string or call the :meth:`to_header` method.  If
+    you plan to subclass it and add your own items have a look at the sourcecode
+    for that class.
+
     .. versionadded:: 0.5
        In previous versions a `CacheControl` class existed that was used
        both for request and response.
@@ -1463,20 +1841,6 @@
     s_maxage = cache_property('s-maxage', None, None)
 
 
-class CacheControl(ResponseCacheControl):
-    """Deprecated."""
-    max_stale = cache_property('max-stale', '*', int)
-    min_fresh = cache_property('min-fresh', '*', int)
-    no_transform = cache_property('no-transform', None, None)
-    only_if_cached = cache_property('only-if-cached', None, bool)
-
-    def __init__(self, values=(), on_update=None):
-        from warnings import warn
-        warn(DeprecationWarning('CacheControl is deprecated in favor of '
-                                'RequestCacheControl and ResponseCacheControl.'))
-        ResponseCacheControl.__init__(self, values, on_update)
-
-
 # attach cache_property to the _CacheControl as staticmethod
 # so that others can reuse it.
 _CacheControl.cache_property = staticmethod(cache_property)
@@ -1521,7 +1885,7 @@
         self.update((header,))
 
     def remove(self, header):
-        """Remove a layer from the set.  This raises an :exc:`KeyError` if the
+        """Remove a header from the set.  This raises an :exc:`KeyError` if the
         header is not in the set.
 
         .. versionchanged:: 0.5
@@ -1678,7 +2042,10 @@
         return self.is_weak(etag) or self.contains(etag)
 
     def contains(self, etag):
-        """Check if an etag is part of the set ignoring weak tags."""
+        """Check if an etag is part of the set ignoring weak tags.
+        It is also possible to use the ``in`` operator.
+
+        """
         if self.star_tag:
             return True
         return etag in self._strong
@@ -1686,7 +2053,7 @@
     def contains_raw(self, etag):
         """When passed a quoted tag it will check if this tag is part of the
         set.  If the tag is weak it is checked against weak and strong tags,
-        otherwise weak only."""
+        otherwise strong only."""
         etag, weak = unquote_etag(etag)
         if weak:
             return self.contains_weak(etag)
@@ -1727,6 +2094,165 @@
         return '<%s %r>' % (self.__class__.__name__, str(self))
 
 
+class IfRange(object):
+    """Very simple object that represents the `If-Range` header in parsed
+    form.  It will either have neither a etag or date or one of either but
+    never both.
+
+    .. versionadded:: 0.7
+    """
+
+    def __init__(self, etag=None, date=None):
+        #: The etag parsed and unquoted.  Ranges always operate on strong
+        #: etags so the weakness information is not necessary.
+        self.etag = etag
+        #: The date in parsed format or `None`.
+        self.date = date
+
+    def to_header(self):
+        """Converts the object back into an HTTP header."""
+        if self.date is not None:
+            return http_date(self.date)
+        if self.etag is not None:
+            return quote_etag(self.etag)
+        return ''
+
+    def __str__(self):
+        return self.to_header()
+
+    def __repr__(self):
+        return '<%s %r>' % (self.__class__.__name__, str(self))
+
+
+class Range(object):
+    """Represents a range header.  All the methods are only supporting bytes
+    as unit.  It does store multiple ranges but :meth:`range_for_length` will
+    only work if only one range is provided.
+
+    .. versionadded:: 0.7
+    """
+
+    def __init__(self, units, ranges):
+        #: The units of this range.  Usually "bytes".
+        self.units = units
+        #: A list of ``(begin, end)`` tuples for the range header provided.
+        #: The ranges are non-inclusive.
+        self.ranges = ranges
+
+    def range_for_length(self, length):
+        """If the range is for bytes, the length is not None and there is
+        exactly one range and it is satisfiable it returns a ``(start, stop)``
+        tuple, otherwise `None`.
+        """
+        if self.units != 'bytes' or length is None or len(self.ranges) != 1:
+            return None
+        start, end = self.ranges[0]
+        if end is None:
+            end = length
+            if start < 0:
+                start += length
+        if is_byte_range_valid(start, end, length):
+            return start, min(end, length)
+
+    def make_content_range(self, length):
+        """Creates a :class:`~werkzeug.datastructures.ContentRange` object
+        from the current range and given content length.
+        """
+        rng = self.range_for_length(length)
+        if rng is not None:
+            return ContentRange(self.units, rng[0], rng[1], length)
+
+    def to_header(self):
+        """Converts the object back into an HTTP header."""
+        ranges = []
+        for begin, end in self.ranges:
+            if end is None:
+                ranges.append(begin >= 0 and '%s-' % begin or str(begin))
+            else:
+                ranges.append('%s-%s' % (begin, end - 1))
+        return '%s=%s' % (self.units, ','.join(ranges))
+
+    def __str__(self):
+        return self.to_header()
+
+    def __repr__(self):
+        return '<%s %r>' % (self.__class__.__name__, str(self))
+
+
+class ContentRange(object):
+    """Represents the content range header.
+
+    .. versionadded:: 0.7
+    """
+
+    def __init__(self, units, start, stop, length=None, on_update=None):
+        assert is_byte_range_valid(start, stop, length), \
+            'Bad range provided'
+        self.on_update = on_update
+        self.set(start, stop, length, units)
+
+    def _callback_property(name):
+        def fget(self):
+            return getattr(self, name)
+        def fset(self, value):
+            setattr(self, name, value)
+            if self.on_update is not None:
+                self.on_update(self)
+        return property(fget, fset)
+
+    #: The units to use, usually "bytes"
+    units = _callback_property('_units')
+    #: The start point of the range or `None`.
+    start = _callback_property('_start')
+    #: The stop point of the range (non-inclusive) or `None`.  Can only be
+    #: `None` if also start is `None`.
+    stop = _callback_property('_stop')
+    #: The length of the range or `None`.
+    length = _callback_property('_length')
+
+    def set(self, start, stop, length=None, units='bytes'):
+        """Simple method to update the ranges."""
+        assert is_byte_range_valid(start, stop, length), \
+            'Bad range provided'
+        self._units = units
+        self._start = start
+        self._stop = stop
+        self._length = length
+        if self.on_update is not None:
+            self.on_update(self)
+
+    def unset(self):
+        """Sets the units to `None` which indicates that the header should
+        no longer be used.
+        """
+        self.set(None, None, units=None)
+
+    def to_header(self):
+        if self.units is None:
+            return ''
+        if self.length is None:
+            length = '*'
+        else:
+            length = self.length
+        if self.start is None:
+            return '%s */%s' % (self.units, length)
+        return '%s %s-%s/%s' % (
+            self.units,
+            self.start,
+            self.stop - 1,
+            length
+        )
+
+    def __nonzero__(self):
+        return self.units is not None
+
+    def __str__(self):
+        return self.to_header()
+
+    def __repr__(self):
+        return '<%s %r>' % (self.__class__.__name__, str(self))
+
+
 class Authorization(ImmutableDictMixin, dict):
     """Represents an `Authorization` header sent by the client.  You should
     not create this kind of object yourself but use it when it's returned by
@@ -1847,13 +2373,13 @@
 
     def auth_property(name, doc=None):
         """A static helper function for subclasses to add extra authentication
-        system properites onto a class::
+        system properties onto a class::
 
             class FooAuthenticate(WWWAuthenticate):
                 special_realm = auth_property('special_realm')
 
         For more information have a look at the sourcecode to see how the
-        regular properties (:attr:`realm` etc. are implemented).
+        regular properties (:attr:`realm` etc.) are implemented.
         """
         def _set_value(self, value):
             if value is None:
@@ -1899,7 +2425,7 @@
         If the algorithm is not understood, the challenge should be ignored
         (and a different one used, if there is more than one).''')
     qop = _set_property('qop', doc='''
-        A set of quality-of-privacy modifies such as auth and auth-int.''')
+        A set of quality-of-privacy directives such as auth and auth-int.''')
 
     def _get_stale(self):
         val = self.get('stale')
@@ -1921,13 +2447,127 @@
     del _set_property
 
 
+class FileStorage(object):
+    """The :class:`FileStorage` class is a thin wrapper over incoming files.
+    It is used by the request object to represent uploaded files.  All the
+    attributes of the wrapper stream are proxied by the file storage so
+    it's possible to do ``storage.read()`` instead of the long form
+    ``storage.stream.read()``.
+    """
+
+    def __init__(self, stream=None, filename=None, name=None,
+                 content_type=None, content_length=None,
+                 headers=None):
+        self.name = name
+        self.stream = stream or _empty_stream
+
+        # if no filename is provided we can attempt to get the filename
+        # from the stream object passed.  There we have to be careful to
+        # skip things like <fdopen>, <stderr> etc.  Python marks these
+        # special filenames with angular brackets.
+        if filename is None:
+            filename = getattr(stream, 'name', None)
+            if filename and filename[0] == '<' and filename[-1] == '>':
+                filename = None
+
+        self.filename = filename
+        if headers is None:
+            headers = Headers()
+        self.headers = headers
+        if content_type is not None:
+            headers['Content-Type'] = content_type
+        if content_length is not None:
+            headers['Content-Length'] = str(content_length)
+
+    def _parse_content_type(self):
+        if not hasattr(self, '_parsed_content_type'):
+            self._parsed_content_type = \
+                parse_options_header(self.content_type)
+
+    @property
+    def content_type(self):
+        """The file's content type.  Usually not available"""
+        return self.headers.get('content-type')
+
+    @property
+    def content_length(self):
+        """The file's content length.  Usually not available"""
+        return int(self.headers.get('content-length') or 0)
+
+    @property
+    def mimetype(self):
+        """Like :attr:`content_type` but without parameters (eg, without
+        charset, type etc.).  For example if the content
+        type is ``text/html; charset=utf-8`` the mimetype would be
+        ``'text/html'``.
+
+        .. versionadded:: 0.7
+        """
+        self._parse_content_type()
+        return self._parsed_content_type[0]
+
+    @property
+    def mimetype_params(self):
+        """The mimetype parameters as dict.  For example if the content
+        type is ``text/html; charset=utf-8`` the params would be
+        ``{'charset': 'utf-8'}``.
+
+        .. versionadded:: 0.7
+        """
+        self._parse_content_type()
+        return self._parsed_content_type[1]
+
+    def save(self, dst, buffer_size=16384):
+        """Save the file to a destination path or file object.  If the
+        destination is a file object you have to close it yourself after the
+        call.  The buffer size is the number of bytes held in memory during
+        the copy process.  It defaults to 16KB.
+
+        For secure file saving also have a look at :func:`secure_filename`.
+
+        :param dst: a filename or open file object the uploaded file
+                    is saved to.
+        :param buffer_size: the size of the buffer.  This works the same as
+                            the `length` parameter of
+                            :func:`shutil.copyfileobj`.
+        """
+        from shutil import copyfileobj
+        close_dst = False
+        if isinstance(dst, basestring):
+            dst = file(dst, 'wb')
+            close_dst = True
+        try:
+            copyfileobj(self.stream, dst, buffer_size)
+        finally:
+            if close_dst:
+                dst.close()
+
+    def close(self):
+        """Close the underlying file if possible."""
+        try:
+            self.stream.close()
+        except Exception:
+            pass
+
+    def __nonzero__(self):
+        return bool(self.filename)
+
+    def __getattr__(self, name):
+        return getattr(self.stream, name)
+
+    def __iter__(self):
+        return iter(self.readline, '')
+
+    def __repr__(self):
+        return '<%s: %r (%r)>' % (
+            self.__class__.__name__,
+            self.filename,
+            self.content_type
+        )
+
+
 # circular dependencies
 from werkzeug.http import dump_options_header, dump_header, generate_etag, \
-     quote_header_value, parse_set_header, unquote_etag
-
-
-# create all the special key errors now that the classes are defined.
-from werkzeug.exceptions import BadRequest
-for _cls in MultiDict, CombinedMultiDict, Headers, EnvironHeaders:
-    _cls.KeyError = BadRequest.wrap(KeyError, _cls.__name__ + '.KeyError')
-del _cls
+     quote_header_value, parse_set_header, unquote_etag, quote_etag, \
+     parse_options_header, http_date, is_byte_range_valid
+from werkzeug.exceptions import BadRequestKeyError
--- a/MoinMoin/support/werkzeug/debug/__init__.py	Sun Dec 25 16:38:04 2011 +0100
+++ b/MoinMoin/support/werkzeug/debug/__init__.py	Sun Dec 25 16:43:04 2011 +0100
@@ -5,16 +5,20 @@
 
     WSGI application traceback debugger.
 
-    :copyright: (c) 2009 by the Werkzeug Team, see AUTHORS for more details.
+    :copyright: (c) 2011 by the Werkzeug Team, see AUTHORS for more details.
     :license: BSD, see LICENSE for more details.
 """
 import mimetypes
 from os.path import join, dirname, basename, isfile
 from werkzeug.wrappers import BaseRequest as Request, BaseResponse as Response
+from werkzeug.debug.tbtools import get_current_traceback, render_console_html
+from werkzeug.debug.console import Console
+from werkzeug.security import gen_salt
+
+
+#: import this here because it once was documented as being available
+#: from this module.  In case there are users left ...
 from werkzeug.debug.repr import debug_repr
-from werkzeug.debug.tbtools import get_current_traceback
-from werkzeug.debug.console import Console
-from werkzeug.debug.utils import render_template
 
 
 class _ConsoleFrame(object):
@@ -37,6 +41,9 @@
     The `evalex` keyword argument allows evaluating expressions in a
     traceback's frame context.
 
+    .. versionadded:: 0.7
+       The `lodgeit_url` parameter was added.
+
     :param app: the WSGI application to run debugged.
     :param evalex: enable exception evaluation feature (interactive
                    debugging).  This requires a non-forking server.
@@ -50,11 +57,17 @@
     :param show_hidden_frames: by default hidden traceback frames are skipped.
                                You can show them by setting this parameter
                                to `True`.
+    :param lodgeit_url: the base URL of the LodgeIt instance to use for
+                        pasting tracebacks.
     """
 
+    # this class is public
+    __module__ = 'werkzeug'
+
     def __init__(self, app, evalex=False, request_key='werkzeug.request',
                  console_path='/console', console_init_func=None,
-                 show_hidden_frames=False):
+                 show_hidden_frames=False,
+                 lodgeit_url='http://paste.pocoo.org/'):
         if not console_init_func:
             console_init_func = dict
         self.app = app
@@ -65,6 +78,8 @@
         self.console_path = console_path
         self.console_init_func = console_init_func
         self.show_hidden_frames = show_hidden_frames
+        self.lodgeit_url = lodgeit_url
+        self.secret = gen_salt(20)
 
     def debug_application(self, environ, start_response):
         """Run the application and conserve the traceback frames."""
@@ -75,7 +90,7 @@
                 yield item
             if hasattr(app_iter, 'close'):
                 app_iter.close()
-        except:
+        except Exception:
             if hasattr(app_iter, 'close'):
                 app_iter.close()
             traceback = get_current_traceback(skip=1, show_hidden_frames=
@@ -89,17 +104,19 @@
                 start_response('500 INTERNAL SERVER ERROR', [
                     ('Content-Type', 'text/html; charset=utf-8')
                 ])
-            except:
+            except Exception:
                 # if we end up here there has been output but an error
                 # occurred.  in that situation we can do nothing fancy any
                 # more, better log something into the error log and fall
                 # back gracefully.
                 environ['wsgi.errors'].write(
-                    'Debugging middleware catched exception in streamed '
+                    'Debugging middleware caught exception in streamed '
                     'response at a point where response headers were already '
                     'sent.\n')
             else:
-                yield traceback.render_full(evalex=self.evalex) \
+                yield traceback.render_full(evalex=self.evalex,
+                                            lodgeit_url=self.lodgeit_url,
+                                            secret=self.secret) \
                                .encode('utf-8', 'replace')
 
             traceback.log(environ['wsgi.errors'])
@@ -112,13 +129,15 @@
         """Display a standalone shell."""
         if 0 not in self.frames:
             self.frames[0] = _ConsoleFrame(self.console_init_func())
-        return Response(render_template('console.html'), mimetype='text/html')
+        return Response(render_console_html(secret=self.secret),
+                        mimetype='text/html')
 
     def paste_traceback(self, request, traceback):
         """Paste the traceback and return a JSON response."""
-        paste_id = traceback.paste()
-        return Response('{"url": "http://paste.pocoo.org/show/%s/", "id": %s}'
-                        % (paste_id, paste_id), mimetype='application/json')
+        paste_id = traceback.paste(self.lodgeit_url)
+        return Response('{"url": "%sshow/%s/", "id": "%s"}'
+                        % (self.lodgeit_url, paste_id, paste_id),
+                        mimetype='application/json')
 
     def get_source(self, request, frame):
         """Render the source viewer."""
@@ -144,20 +163,23 @@
         # any more!
         request = Request(environ)
         response = self.debug_application
-        if self.evalex and self.console_path is not None and \
-           request.path == self.console_path:
-            response = self.display_console(request)
-        elif request.path.rstrip('/').endswith('/__debugger__'):
+        if request.args.get('__debugger__') == 'yes':
             cmd = request.args.get('cmd')
             arg = request.args.get('f')
+            secret = request.args.get('s')
             traceback = self.tracebacks.get(request.args.get('tb', type=int))
             frame = self.frames.get(request.args.get('frm', type=int))
             if cmd == 'resource' and arg:
                 response = self.get_resource(request, arg)
-            elif cmd == 'paste' and traceback is not None:
+            elif cmd == 'paste' and traceback is not None and \
+                 secret == self.secret:
                 response = self.paste_traceback(request, traceback)
-            elif cmd == 'source' and frame:
+            elif cmd == 'source' and frame and self.secret == secret:
                 response = self.get_source(request, frame)
-            elif self.evalex and cmd is not None and frame is not None:
+            elif self.evalex and cmd is not None and frame is not None and \
+                 self.secret == secret:
                 response = self.execute_command(request, cmd, frame)
+        elif self.evalex and self.console_path is not None and \
+           request.path == self.console_path:
+            response = self.display_console(request)
         return response(environ, start_response)
--- a/MoinMoin/support/werkzeug/debug/console.py	Sun Dec 25 16:38:04 2011 +0100
+++ b/MoinMoin/support/werkzeug/debug/console.py	Sun Dec 25 16:43:04 2011 +0100
@@ -5,7 +5,7 @@
 
     Interactive console support.
 
-    :copyright: (c) 2009 by the Werkzeug Team, see AUTHORS for more details.
+    :copyright: (c) 2011 by the Werkzeug Team, see AUTHORS for more details.
     :license: BSD.
 """
 import sys
@@ -14,7 +14,6 @@
 from werkzeug.utils import escape
 from werkzeug.local import Local
 from werkzeug.debug.repr import debug_repr, dump, helper
-from werkzeug.debug.utils import render_template
 
 
 _local = Local()
@@ -32,6 +31,19 @@
     def close(self):
         pass
 
+    def flush(self):
+        pass
+
+    def seek(self, n, mode=0):
+        pass
+
+    def readline(self):
+        if len(self._buffer) == 0:
+            return ''
+        ret = self._buffer[0]
+        del self._buffer[0]
+        return ret
+
     def reset(self):
         val = ''.join(self._buffer)
         del self._buffer[:]
@@ -53,7 +65,7 @@
     """Thread-local wrapper for sys.stdout for the interactive console."""
 
     def push():
-        if sys.stdout is sys.__stdout__:
+        if not isinstance(sys.stdout, ThreadedStream):
             sys.stdout = ThreadedStream()
         _local.stream = HTMLStringO()
     push = staticmethod(push)
@@ -161,7 +173,7 @@
     def runcode(self, code):
         try:
             exec code in self.globals, self.locals
-        except:
+        except Exception:
             self.showtraceback()
 
     def showtraceback(self):
--- a/MoinMoin/support/werkzeug/debug/render.py	Sun Dec 25 16:38:04 2011 +0100
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,103 +0,0 @@
-# -*- coding: utf-8 -*-
-"""
-    werkzeug.debug.render
-    ~~~~~~~~~~~~~~~~~~~~~
-
-    Render the traceback debugging page.
-
-    :copyright: (c) 2009 by the Werkzeug Team, see AUTHORS for more details.
-    :license: BSD, see LICENSE for more details.
-"""
-import pprint
-from os.path import dirname, join
-
-from werkzeug.templates import Template
-
-
-def get_template(name):
-    return Template.from_file(join(dirname(__file__), 'shared', name),
-                              unicode_mode=False, errors='ignore')
-
-
-def load_resource(res):
-    try:
-        f = file(join(dirname(__file__), 'shared', res))
-    except IOError:
-        return ''
-    try:
-        return f.read()
-    finally:
-        f.close()
-
-
-t_body = get_template('body.tmpl')
-t_codetable = get_template('codetable.tmpl')
-t_vartable = get_template('vartable.tmpl')
-
-
-def code_table(frame):
-    from werkzeug.debug.util import Namespace
-    lines = []
-    lineno = frame['context_lineno']
-    if lineno is not None:
-        lineno += 1
-        for l in frame['pre_context']:
-            lines.append(Namespace(mode='pre', lineno=lineno, code=l))
-            lineno += 1
-        lines.append(Namespace(mode='cur', lineno=lineno,
-                               code=frame['context_line']))
-        lineno += 1
-        for l in frame['post_context']:
-            lines.append(Namespace(mode='post', lineno=lineno, code=l))
-            lineno += 1
-    else:
-        lines.append(Namespace(mode='cur', lineno=1,
-                               code='Sourcecode not available'))
-
-    return t_codetable.render(lines=lines)
-
-
-def var_table(var):
-    def safe_pformat(x):
-        try:
-            lines = pprint.pformat(x).splitlines()
-        except:
-            return '?'
-        tmp = []
-        for line in lines:
-            if len(line) > 79:
-                line = line[:79] + '...'
-            tmp.append(line)
-        return '\n'.join(tmp)
-
-    # dicts
-    if isinstance(var, dict) or hasattr(var, 'items'):
-        value = var.items()
-        if not value:
-            typ = 'empty'
-        else:
-            typ = 'dict'
-            value.sort()
-            value = [(repr(key), safe_pformat(val)) for key, val in value]
-
-    # lists
-    elif isinstance(var, list):
-        if not var:
-            typ = 'empty'
-        else:
-            typ = 'list'
-        value = [safe_pformat(item) for item in var]
-
-    # others
-    else:
-        typ = 'simple'
-        value = repr(var)
-
-    return t_vartable.render(type=typ, value=value)
-
-
-def debug_page(context):
-    tc = context.to_dict()
-    tc['var_table'] = var_table
-    tc['code_table'] = code_table
-    return t_body.render(tc)
--- a/MoinMoin/support/werkzeug/debug/repr.py	Sun Dec 25 16:38:04 2011 +0100
+++ b/MoinMoin/support/werkzeug/debug/repr.py	Sun Dec 25 16:43:04 2011 +0100
@@ -10,7 +10,7 @@
     Together with the CSS and JavaScript files of the debugger this gives
     a colorful and more compact output.
 
-    :copyright: (c) 2009 by the Werkzeug Team, see AUTHORS for more details.
+    :copyright: (c) 2011 by the Werkzeug Team, see AUTHORS for more details.
     :license: BSD.
 """
 import sys
@@ -18,10 +18,9 @@
 from traceback import format_exception_only
 try:
     from collections import deque
-except ImportError:
+except ImportError: # pragma: no cover
     deque = None
 from werkzeug.utils import escape
-from werkzeug.debug.utils import render_template
 
 
 missing = object()
@@ -29,6 +28,21 @@
 RegexType = type(_paragraph_re)
 
 
+HELP_HTML = '''\
+<div class=box>
+  <h3>%(title)s</h3>
+  <pre class=help>%(text)s</pre>
+</div>\
+'''
+OBJECT_DUMP_HTML = '''\
+<div class=box>
+  <h3>%(title)s</h3>
+  %(repr)s
+  <table>%(items)s</table>
+</div>\
+'''
+
+
 def debug_repr(obj):
     """Creates a debug repr of an object as HTML unicode string."""
     return DebugReprGenerator().repr(obj)
@@ -51,23 +65,25 @@
     debugger only because it requires a patched sys.stdout.
     """
 
-    def __call__(self, topic=None):
-        sys.stdout._write(self.get_help(topic))
+    def __repr__(self):
+        return 'Type help(object) for help about object.'
 
-    def get_help(self, topic):
-        title = text = None
-        if topic is not None:
-            import pydoc
-            pydoc.help(topic)
-            rv = sys.stdout.reset().decode('utf-8', 'ignore')
-            paragraphs = _paragraph_re.split(rv)
-            if len(paragraphs) > 1:
-                title = paragraphs[0]
-                text = '\n\n'.join(paragraphs[1:])
-            else:
-                title = 'Help'
-                text = paragraphs[0]
-        return render_template('help_command.html', title=title, text=text)
+    def __call__(self, topic=None):
+        if topic is None:
+            sys.stdout._write('<span class=help>%s</span>' % repr(self))
+            return
+        import pydoc
+        pydoc.help(topic)
+        rv = sys.stdout.reset().decode('utf-8', 'ignore')
+        paragraphs = _paragraph_re.split(rv)
+        if len(paragraphs) > 1:
+            title = paragraphs[0]
+            text = '\n\n'.join(paragraphs[1:])
+        else: # pragma: no cover
+            title = 'Help'
+            text = paragraphs[0]
+        sys.stdout._write(HELP_HTML % {'title': title, 'text': text})
+
 
 helper = _Helper()
 
@@ -167,7 +183,7 @@
 
     def dispatch_repr(self, obj, recursive):
         if obj is helper:
-            return helper.get_help(None)
+            return u'<span class="help">%r</span>' % helper
         if isinstance(obj, (int, long, float, complex)):
             return u'<span class="number">%r</span>' % obj
         if isinstance(obj, basestring):
@@ -191,7 +207,7 @@
     def fallback_repr(self):
         try:
             info = ''.join(format_exception_only(*sys.exc_info()[:2]))
-        except:
+        except Exception: # pragma: no cover
             info = '?'
         return u'<span class="brokenrepr">&lt;broken repr (%s)&gt;' \
                u'</span>' % escape(info.decode('utf-8', 'ignore').strip())
@@ -206,7 +222,7 @@
         try:
             try:
                 return self.dispatch_repr(obj, recursive)
-            except:
+            except Exception:
                 return self.fallback_repr()
         finally:
             self._stack.pop()
@@ -227,14 +243,25 @@
             for key in dir(obj):
                 try:
                     items.append((key, self.repr(getattr(obj, key))))
-                except:
+                except Exception:
                     pass
             title = 'Details for'
         title += ' ' + object.__repr__(obj)[1:-1]
-        return render_template('dump_object.html', items=items,
-                               title=title, repr=repr)
+        return self.render_object_dump(items, title, repr)
 
     def dump_locals(self, d):
         items = [(key, self.repr(value)) for key, value in d.items()]
-        return render_template('dump_object.html', items=items,
-                               title='Local variables in frame', repr=None)
+        return self.render_object_dump(items, 'Local variables in frame')
+
+    def render_object_dump(self, items, title, repr=None):
+        html_items = []
+        for key, value in items:
+            html_items.append('<tr><th>%s<td><pre class=repr>%s</pre>' %
+                              (escape(key), value))
+        if not html_items:
+            html_items.append('<tr><td><em>Nothing</em>')
+        return OBJECT_DUMP_HTML % {
+            'title':    escape(title),
+            'repr':     repr and '<pre class=repr>%s</pre>' % repr or '',
+            'items':    '\n'.join(html_items)
+        }
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/MoinMoin/support/werkzeug/debug/shared/FONT_LICENSE	Sun Dec 25 16:43:04 2011 +0100
@@ -0,0 +1,96 @@
+-------------------------------
+UBUNTU FONT LICENCE Version 1.0
+-------------------------------
+
+PREAMBLE
+This licence allows the licensed fonts to be used, studied, modified and
+redistributed freely. The fonts, including any derivative works, can be
+bundled, embedded, and redistributed provided the terms of this licence
+are met. The fonts and derivatives, however, cannot be released under
+any other licence. The requirement for fonts to remain under this
+licence does not require any document created using the fonts or their
+derivatives to be published under this licence, as long as the primary
+purpose of the document is not to be a vehicle for the distribution of
+the fonts.
+
+DEFINITIONS
+"Font Software" refers to the set of files released by the Copyright
+Holder(s) under this licence and clearly marked as such. This may
+include source files, build scripts and documentation.
+
+"Original Version" refers to the collection of Font Software components
+as received under this licence.
+
+"Modified Version" refers to any derivative made by adding to, deleting,
+or substituting -- in part or in whole -- any of the components of the
+Original Version, by changing formats or by porting the Font Software to
+a new environment.
+
+"Copyright Holder(s)" refers to all individuals and companies who have a
+copyright ownership of the Font Software.
+
+"Substantially Changed" refers to Modified Versions which can be easily
+identified as dissimilar to the Font Software by users of the Font
+Software comparing the Original Version with the Modified Version.
+
+To "Propagate" a work means to do anything with it that, without
+permission, would make you directly or secondarily liable for
+infringement under applicable copyright law, except executing it on a
+computer or modifying a private copy. Propagation includes copying,
+distribution (with or without modification and with or without charging
+a redistribution fee), making available to the public, and in some
+countries other activities as well.
+
+PERMISSION & CONDITIONS
+This licence does not grant any rights under trademark law and all such
+rights are reserved.
+
+Permission is hereby granted, free of charge, to any person obtaining a
+copy of the Font Software, to propagate the Font Software, subject to
+the below conditions:
+
+1) Each copy of the Font Software must contain the above copyright
+notice and this licence. These can be included either as stand-alone
+text files, human-readable headers or in the appropriate machine-
+readable metadata fields within text or binary files as long as those
+fields can be easily viewed by the user.
+
+2) The font name complies with the following:
+(a) The Original Version must retain its name, unmodified.
+(b) Modified Versions which are Substantially Changed must be renamed to
+avoid use of the name of the Original Version or similar names entirely.
+(c) Modified Versions which are not Substantially Changed must be
+renamed to both (i) retain the name of the Original Version and (ii) add
+additional naming elements to distinguish the Modified Version from the
+Original Version. The name of such Modified Versions must be the name of
+the Original Version, with "derivative X" where X represents the name of
+the new work, appended to that name.
+
+3) The name(s) of the Copyright Holder(s) and any contributor to the
+Font Software shall not be used to promote, endorse or advertise any
+Modified Version, except (i) as required by this licence, (ii) to
+acknowledge the contribution(s) of the Copyright Holder(s) or (iii) with
+their explicit written permission.
+
+4) The Font Software, modified or unmodified, in part or in whole, must
+be distributed entirely under this licence, and must not be distributed
+under any other licence. The requirement for fonts to remain under this
+licence does not affect any document created using the Font Software,
+except any version of the Font Software extracted from a document
+created using the Font Software may only be distributed under this
+licence.
+
+TERMINATION
+This licence becomes null and void if any of the above conditions are
+not met.
+
+DISCLAIMER
+THE FONT SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO ANY WARRANTIES OF
+MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT OF
+COPYRIGHT, PATENT, TRADEMARK, OR OTHER RIGHT. IN NO EVENT SHALL THE
+COPYRIGHT HOLDER BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+INCLUDING ANY GENERAL, SPECIAL, INDIRECT, INCIDENTAL, OR CONSEQUENTIAL
+DAMAGES, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+FROM, OUT OF THE USE OR INABILITY TO USE THE FONT SOFTWARE OR FROM OTHER
+DEALINGS IN THE FONT SOFTWARE.
--- a/MoinMoin/support/werkzeug/debug/shared/body.tmpl	Sun Dec 25 16:38:04 2011 +0100
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,81 +0,0 @@
-<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN"
-  "http://www.w3.org/TR/html4/loose.dtd">
-<html>
-  <head>
-    <title>$escape(exception_type) in $escape(last_frame['basename']) (Werkzeug Debugger)</title>
-    <link rel="stylesheet" href="__traceback__?resource=style.css&amp;mimetype=text/css" type="text/css">
-    <script type="text/javascript" src="__traceback__?resource=jquery.js&amp;mimetype=text/javascript"></script>
-    <script type="text/javascript" src="__traceback__?resource=debugger.js&amp;mimetype=text/javascript"></script>
-  </head>
-  <body>
-    <div class="traceback_wrapper">
-      <h1>$escape(exception_type)</h1>
-      <p class="errormsg">$escape(exception_value)</p>
-
-      <p class="errorline">
-        $escape(last_frame['filename']) in
-        $escape(last_frame['function']),
-        line $last_frame['lineno']
-      </p>
-
-      <h2 onclick="changeTB()" class="tb">Traceback <span>(toggle raw view)</span></h2>
-      <div id="interactive">
-        <p class="text">A problem occurred in your Python WSGI application.
-          Here is the sequence of function calls leading up to the error, in the order
-          they occurred. Activate a code line to toggle context lines.</p>
-
-      <% for num, frame in enumerate(frames) %>
-        <div class="frame" id="frame-$num">
-          <h3 class="fn"><em>$escape(frame['function'])</em> in <tt>$escape(frame['filename'])</tt></h3>
-          <a class="locals" href="javascript:toggleFrameVars($num)">[inspect]</a>
-          <% if evalex %><a class="eval" href="javascript:toggleInterpreter($num)">[console]</a><% endif %>
-          $code_table(frame)
-          $var_table(frame['vars'])
-          <% if evalex %>
-            <form class="exec_code" action="">
-              <pre class="output">[console ready]</pre>
-              <input type="hidden" name="tb" value="$tb_uid">
-              <input type="hidden" name="frame" value="$frame['frame_uid']">
-              <input type="text" name="cmd" class="input" value="">
-            </form>
-          <% endif %>
-        </div>
-      <% endfor %>
-      </div>
-
-      <div id="plain">
-        <p class="text">Here is the plain Python traceback for copy and paste:</p>
-        <pre class="plain">$escape(plaintb)</pre>
-        <p class="text pastebininfo">
-          <a href="javascript:pasteIt()">Create a new Paste</a> with
-          this traceback in the lodgeit pastebin.
-        </p>
-      </div>
-
-      <% if req_vars %>
-        <h2>Request Data</h2>
-        <p class="text">The following list contains all important request variables.
-          Select a header to expand the list.</p>
-        <% for num, (key, info) in enumerate(req_vars) %>
-          <dl>
-            <dt onclick="toggleTableVars($num)">$escape(key)</dt>
-            <dd id="tvar-$num">$var_table(info)</dd>
-          </dl>
-        <% endfor %>
-      <% endif %>
-    </div>
-
-    <div id="footer">
-      Brought to you by <span class="arthur">DON'T PANIC</span>, your friendly
-      Werkzeug powered traceback interpreter.
-    </div>
-  </body>
-</html>
-
-<!-- Plain traceback:
-
-<%py
-  import re
-  print re.sub('-{2,}', '-', plaintb)
-%>
--->
--- a/MoinMoin/support/werkzeug/debug/shared/codetable.tmpl	Sun Dec 25 16:38:04 2011 +0100
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,8 +0,0 @@
-<table class="code">
-<% for line in lines %>
-  <tr class="$line.mode">
-    <td class="lineno">$line.lineno</td>
-    <td class="code">$line.code</td>
-  </tr>
-<% endfor %>
-</table>
--- a/MoinMoin/support/werkzeug/debug/shared/debugger.js	Sun Dec 25 16:38:04 2011 +0100
+++ b/MoinMoin/support/werkzeug/debug/shared/debugger.js	Sun Dec 25 16:43:04 2011 +0100
@@ -21,7 +21,7 @@
      * Add an interactive console to the frames
      */
     if (EVALEX)
-      $('<img src="./__debugger__?cmd=resource&f=console.png">')
+      $('<img src="?__debugger__=yes&cmd=resource&f=console.png">')
         .attr('title', 'Open an interactive python shell in this frame')
         .click(function() {
           consoleNode = openShell(consoleNode, target, frameID);
@@ -32,7 +32,7 @@
     /**
      * Show sourcecode
      */
-    var sourceButton = $('<img src="./__debugger__?cmd=resource&f=source.png">')
+    var sourceButton = $('<img src="?__debugger__=yes&cmd=resource&f=source.png">')
       .attr('title', 'Display the sourcecode for this frame')
       .click(function() {
         if (!sourceView)
@@ -44,7 +44,8 @@
             .click(function() {
               sourceView.slideUp('fast');
             });
-        $.get('./__debugger__', {cmd: 'source', frm: frameID}, function(data) {
+        $.get(document.location.pathname, {__debugger__: 'yes', cmd:
+            'source', frm: frameID, s: SECRET}, function(data) {
           $('table', sourceView)
             .replaceWith(data);
           if (!sourceView.is(':visible'))
@@ -76,13 +77,13 @@
     .removeClass('nojavascript')
     .html('<p>To switch between the interactive traceback and the plaintext ' +
           'one, you can click on the "Traceback" headline.  From the text ' +
-          'traceback you can also create a paste of it.  For code execution ' +
-          'mouse-over the frame you want to debug and click on the console ' +
-          'icon on the right side.' +
+          'traceback you can also create a paste of it. ' + (!EVALEX ? '' :
+          'For code execution mouse-over the frame you want to debug and ' +
+          'click on the console icon on the right side.' +
           '<p>You can execute arbitrary Python code in the stack frames and ' +
           'there are some extra helpers available for introspection:' +
           '<ul><li><code>dump()</code> shows all variables in the frame' +
-          '<li><code>dump(obj)</code> dumps all what\'s know about the object</ul>');
+          '<li><code>dump(obj)</code> dumps all that\'s known about the object</ul>'));
 
   /**
    * Add the pastebin feature
@@ -94,8 +95,9 @@
       label.val('submitting...');
       $.ajax({
         dataType:     'json',
-        url:          './__debugger__',
-        data:         {tb: TRACEBACK, cmd: 'paste'},
+        url:          document.location.pathname,
+        data:         {__debugger__: 'yes', tb: TRACEBACK, cmd: 'paste',
+                       s: SECRET},
         success:      function(data) {
           $('div.plain span.pastemessage')
             .removeClass('pastemessage')
@@ -132,7 +134,8 @@
   var form = $('<form>&gt;&gt;&gt; </form>')
     .submit(function() {
       var cmd = command.val();
-      $.get('./__debugger__', {cmd: cmd, frm: frameID}, function(data) {
+      $.get(document.location.pathname, {
+          __debugger__: 'yes', cmd: cmd, frm: frameID, s: SECRET}, function(data) {
         var tmp = $('<div>').html(data);
         $('span.extended', tmp).each(function() {
           var hidden = $(this).wrap('<span>').hide();
@@ -147,6 +150,7 @@
         });
         output.append(tmp);
         command.focus();
+        consoleNode.scrollTop(command.position().top);
         var old = history.pop();
         history.push(cmd);
         if (typeof old != 'undefined')
@@ -160,7 +164,7 @@
 
   var command = $('<input type="text">')
     .appendTo(form)
-    .keypress(function(e) {
+    .keydown(function(e) {
       if (e.charCode == 100 && e.ctrlKey) {
         output.text('--- screen cleared ---');
         return false;
@@ -191,6 +195,6 @@
       break
     line = tmp;
   }
-  var container = $('div.sourceview')[0];
-  container.scrollTop = line.offset().top - container.offsetTop;
+  var container = $('div.sourceview');
+  container.scrollTop(line.offset().top);
 }
--- a/MoinMoin/support/werkzeug/debug/shared/jquery.js	Sun Dec 25 16:38:04 2011 +0100
+++ b/MoinMoin/support/werkzeug/debug/shared/jquery.js	Sun Dec 25 16:43:04 2011 +0100
@@ -1,19 +1,167 @@
-/*
- * jQuery JavaScript Library v1.3.2
+/*!
+ * jQuery JavaScript Library v1.4.4
  * http://jquery.com/
  *
- * Copyright (c) 2009 John Resig
- * Dual licensed under the MIT and GPL licenses.
- * http://docs.jquery.com/License
+ * Copyright 2010, John Resig
+ * Dual licensed under the MIT or GPL Version 2 licenses.
+ * http://jquery.org/license
  *
- * Date: 2009-02-19 17:34:21 -0500 (Thu, 19 Feb 2009)
- * Revision: 6246
+ * Includes Sizzle.js
+ * http://sizzlejs.com/
+ * Copyright 2010, The Dojo Foundation
+ * Released under the MIT, BSD, and GPL Licenses.
+ *
+ * Date: Thu Nov 11 19:04:53 2010 -0500
  */
-(function(){var l=this,g,y=l.jQuery,p=l.$,o=l.jQuery=l.$=function(E,F){return new o.fn.init(E,F)},D=/^[^<]*(<(.|\s)+>)[^>]*$|^#([\w-]+)$/,f=/^.[^:#\[\.,]*$/;o.fn=o.prototype={init:function(E,H){E=E||document;if(E.nodeType){this[0]=E;this.length=1;this.context=E;return this}if(typeof E==="string"){var G=D.exec(E);if(G&&(G[1]||!H)){if(G[1]){E=o.clean([G[1]],H)}else{var I=document.getElementById(G[3]);if(I&&I.id!=G[3]){return o().find(E)}var F=o(I||[]);F.context=document;F.selector=E;return F}}else{return o(H).find(E)}}else{if(o.isFunction(E)){return o(document).ready(E)}}if(E.selector&&E.context){this.selector=E.selector;this.context=E.context}return this.setArray(o.isArray(E)?E:o.makeArray(E))},selector:"",jquery:"1.3.2",size:function(){return this.length},get:function(E){return E===g?Array.prototype.slice.call(this):this[E]},pushStack:function(F,H,E){var G=o(F);G.prevObject=this;G.context=this.context;if(H==="find"){G.selector=this.selector+(this.selector?" ":"")+E}else{if(H){G.selector=this.selector+"."+H+"("+E+")"}}return G},setArray:function(E){this.length=0;Array.prototype.push.apply(this,E);return this},each:function(F,E){return o.each(this,F,E)},index:function(E){return o.inArray(E&&E.jquery?E[0]:E,this)},attr:function(F,H,G){var E=F;if(typeof F==="string"){if(H===g){return this[0]&&o[G||"attr"](this[0],F)}else{E={};E[F]=H}}return this.each(function(I){for(F in E){o.attr(G?this.style:this,F,o.prop(this,E[F],G,I,F))}})},css:function(E,F){if((E=="width"||E=="height")&&parseFloat(F)<0){F=g}return this.attr(E,F,"curCSS")},text:function(F){if(typeof F!=="object"&&F!=null){return this.empty().append((this[0]&&this[0].ownerDocument||document).createTextNode(F))}var E="";o.each(F||this,function(){o.each(this.childNodes,function(){if(this.nodeType!=8){E+=this.nodeType!=1?this.nodeValue:o.fn.text([this])}})});return E},wrapAll:function(E){if(this[0]){var F=o(E,this[0].ownerDocument).clone();if(this[0].parentNode){F.insertBefore(this[0])}F.map(function(){var G=this;while(G.firstChild){G=G.firstChild}return G}).append(this)}return this},wrapInner:function(E){return this.each(function(){o(this).contents().wrapAll(E)})},wrap:function(E){return this.each(function(){o(this).wrapAll(E)})},append:function(){return this.domManip(arguments,true,function(E){if(this.nodeType==1){this.appendChild(E)}})},prepend:function(){return this.domManip(arguments,true,function(E){if(this.nodeType==1){this.insertBefore(E,this.firstChild)}})},before:function(){return this.domManip(arguments,false,function(E){this.parentNode.insertBefore(E,this)})},after:function(){return this.domManip(arguments,false,function(E){this.parentNode.insertBefore(E,this.nextSibling)})},end:function(){return this.prevObject||o([])},push:[].push,sort:[].sort,splice:[].splice,find:function(E){if(this.length===1){var F=this.pushStack([],"find",E);F.length=0;o.find(E,this[0],F);return F}else{return this.pushStack(o.unique(o.map(this,function(G){return o.find(E,G)})),"find",E)}},clone:function(G){var E=this.map(function(){if(!o.support.noCloneEvent&&!o.isXMLDoc(this)){var I=this.outerHTML;if(!I){var J=this.ownerDocument.createElement("div");J.appendChild(this.cloneNode(true));I=J.innerHTML}return o.clean([I.replace(/ jQuery\d+="(?:\d+|null)"/g,"").replace(/^\s*/,"")])[0]}else{return this.cloneNode(true)}});if(G===true){var H=this.find("*").andSelf(),F=0;E.find("*").andSelf().each(function(){if(this.nodeName!==H[F].nodeName){return}var I=o.data(H[F],"events");for(var K in I){for(var J in I[K]){o.event.add(this,K,I[K][J],I[K][J].data)}}F++})}return E},filter:function(E){return this.pushStack(o.isFunction(E)&&o.grep(this,function(G,F){return E.call(G,F)})||o.multiFilter(E,o.grep(this,function(F){return F.nodeType===1})),"filter",E)},closest:function(E){var G=o.expr.match.POS.test(E)?o(E):null,F=0;return this.map(function(){var H=this;while(H&&H.ownerDocument){if(G?G.index(H)>-1:o(H).is(E)){o.data(H,"closest",F);return H}H=H.parentNode;F++}})},not:function(E){if(typeof E==="string"){if(f.test(E)){return this.pushStack(o.multiFilter(E,this,true),"not",E)}else{E=o.multiFilter(E,this)}}var F=E.length&&E[E.length-1]!==g&&!E.nodeType;return this.filter(function(){return F?o.inArray(this,E)<0:this!=E})},add:function(E){return this.pushStack(o.unique(o.merge(this.get(),typeof E==="string"?o(E):o.makeArray(E))))},is:function(E){return !!E&&o.multiFilter(E,this).length>0},hasClass:function(E){return !!E&&this.is("."+E)},val:function(K){if(K===g){var E=this[0];if(E){if(o.nodeName(E,"option")){return(E.attributes.value||{}).specified?E.value:E.text}if(o.nodeName(E,"select")){var I=E.selectedIndex,L=[],M=E.options,H=E.type=="select-one";if(I<0){return null}for(var F=H?I:0,J=H?I+1:M.length;F<J;F++){var G=M[F];if(G.selected){K=o(G).val();if(H){return K}L.push(K)}}return L}return(E.value||"").replace(/\r/g,"")}return g}if(typeof K==="number"){K+=""}return this.each(function(){if(this.nodeType!=1){return}if(o.isArray(K)&&/radio|checkbox/.test(this.type)){this.checked=(o.inArray(this.value,K)>=0||o.inArray(this.name,K)>=0)}else{if(o.nodeName(this,"select")){var N=o.makeArray(K);o("option",this).each(function(){this.selected=(o.inArray(this.value,N)>=0||o.inArray(this.text,N)>=0)});if(!N.length){this.selectedIndex=-1}}else{this.value=K}}})},html:function(E){return E===g?(this[0]?this[0].innerHTML.replace(/ jQuery\d+="(?:\d+|null)"/g,""):null):this.empty().append(E)},replaceWith:function(E){return this.after(E).remove()},eq:function(E){return this.slice(E,+E+1)},slice:function(){return this.pushStack(Array.prototype.slice.apply(this,arguments),"slice",Array.prototype.slice.call(arguments).join(","))},map:function(E){return this.pushStack(o.map(this,function(G,F){return E.call(G,F,G)}))},andSelf:function(){return this.add(this.prevObject)},domManip:function(J,M,L){if(this[0]){var I=(this[0].ownerDocument||this[0]).createDocumentFragment(),F=o.clean(J,(this[0].ownerDocument||this[0]),I),H=I.firstChild;if(H){for(var G=0,E=this.length;G<E;G++){L.call(K(this[G],H),this.length>1||G>0?I.cloneNode(true):I)}}if(F){o.each(F,z)}}return this;function K(N,O){return M&&o.nodeName(N,"table")&&o.nodeName(O,"tr")?(N.getElementsByTagName("tbody")[0]||N.appendChild(N.ownerDocument.createElement("tbody"))):N}}};o.fn.init.prototype=o.fn;function z(E,F){if(F.src){o.ajax({url:F.src,async:false,dataType:"script"})}else{o.globalEval(F.text||F.textContent||F.innerHTML||"")}if(F.parentNode){F.parentNode.removeChild(F)}}function e(){return +new Date}o.extend=o.fn.extend=function(){var J=arguments[0]||{},H=1,I=arguments.length,E=false,G;if(typeof J==="boolean"){E=J;J=arguments[1]||{};H=2}if(typeof J!=="object"&&!o.isFunction(J)){J={}}if(I==H){J=this;--H}for(;H<I;H++){if((G=arguments[H])!=null){for(var F in G){var K=J[F],L=G[F];if(J===L){continue}if(E&&L&&typeof L==="object"&&!L.nodeType){J[F]=o.extend(E,K||(L.length!=null?[]:{}),L)}else{if(L!==g){J[F]=L}}}}}return J};var b=/z-?index|font-?weight|opacity|zoom|line-?height/i,q=document.defaultView||{},s=Object.prototype.toString;o.extend({noConflict:function(E){l.$=p;if(E){l.jQuery=y}return o},isFunction:function(E){return s.call(E)==="[object Function]"},isArray:function(E){return s.call(E)==="[object Array]"},isXMLDoc:function(E){return E.nodeType===9&&E.documentElement.nodeName!=="HTML"||!!E.ownerDocument&&o.isXMLDoc(E.ownerDocument)},globalEval:function(G){if(G&&/\S/.test(G)){var F=document.getElementsByTagName("head")[0]||document.documentElement,E=document.createElement("script");E.type="text/javascript";if(o.support.scriptEval){E.appendChild(document.createTextNode(G))}else{E.text=G}F.insertBefore(E,F.firstChild);F.removeChild(E)}},nodeName:function(F,E){return F.nodeName&&F.nodeName.toUpperCase()==E.toUpperCase()},each:function(G,K,F){var E,H=0,I=G.length;if(F){if(I===g){for(E in G){if(K.apply(G[E],F)===false){break}}}else{for(;H<I;){if(K.apply(G[H++],F)===false){break}}}}else{if(I===g){for(E in G){if(K.call(G[E],E,G[E])===false){break}}}else{for(var J=G[0];H<I&&K.call(J,H,J)!==false;J=G[++H]){}}}return G},prop:function(H,I,G,F,E){if(o.isFunction(I)){I=I.call(H,F)}return typeof I==="number"&&G=="curCSS"&&!b.test(E)?I+"px":I},className:{add:function(E,F){o.each((F||"").split(/\s+/),function(G,H){if(E.nodeType==1&&!o.className.has(E.className,H)){E.className+=(E.className?" ":"")+H}})},remove:function(E,F){if(E.nodeType==1){E.className=F!==g?o.grep(E.className.split(/\s+/),function(G){return !o.className.has(F,G)}).join(" "):""}},has:function(F,E){return F&&o.inArray(E,(F.className||F).toString().split(/\s+/))>-1}},swap:function(H,G,I){var E={};for(var F in G){E[F]=H.style[F];H.style[F]=G[F]}I.call(H);for(var F in G){H.style[F]=E[F]}},css:function(H,F,J,E){if(F=="width"||F=="height"){var L,G={position:"absolute",visibility:"hidden",display:"block"},K=F=="width"?["Left","Right"]:["Top","Bottom"];function I(){L=F=="width"?H.offsetWidth:H.offsetHeight;if(E==="border"){return}o.each(K,function(){if(!E){L-=parseFloat(o.curCSS(H,"padding"+this,true))||0}if(E==="margin"){L+=parseFloat(o.curCSS(H,"margin"+this,true))||0}else{L-=parseFloat(o.curCSS(H,"border"+this+"Width",true))||0}})}if(H.offsetWidth!==0){I()}else{o.swap(H,G,I)}return Math.max(0,Math.round(L))}return o.curCSS(H,F,J)},curCSS:function(I,F,G){var L,E=I.style;if(F=="opacity"&&!o.support.opacity){L=o.attr(E,"opacity");return L==""?"1":L}if(F.match(/float/i)){F=w}if(!G&&E&&E[F]){L=E[F]}else{if(q.getComputedStyle){if(F.match(/float/i)){F="float"}F=F.replace(/([A-Z])/g,"-$1").toLowerCase();var M=q.getComputedStyle(I,null);if(M){L=M.getPropertyValue(F)}if(F=="opacity"&&L==""){L="1"}}else{if(I.currentStyle){var J=F.replace(/\-(\w)/g,function(N,O){return O.toUpperCase()});L=I.currentStyle[F]||I.currentStyle[J];if(!/^\d+(px)?$/i.test(L)&&/^\d/.test(L)){var H=E.left,K=I.runtimeStyle.left;I.runtimeStyle.left=I.currentStyle.left;E.left=L||0;L=E.pixelLeft+"px";E.left=H;I.runtimeStyle.left=K}}}}return L},clean:function(F,K,I){K=K||document;if(typeof K.createElement==="undefined"){K=K.ownerDocument||K[0]&&K[0].ownerDocument||document}if(!I&&F.length===1&&typeof F[0]==="string"){var H=/^<(\w+)\s*\/?>$/.exec(F[0]);if(H){return[K.createElement(H[1])]}}var G=[],E=[],L=K.createElement("div");o.each(F,function(P,S){if(typeof S==="number"){S+=""}if(!S){return}if(typeof S==="string"){S=S.replace(/(<(\w+)[^>]*?)\/>/g,function(U,V,T){return T.match(/^(abbr|br|col|img|input|link|meta|param|hr|area|embed)$/i)?U:V+"></"+T+">"});var O=S.replace(/^\s+/,"").substring(0,10).toLowerCase();var Q=!O.indexOf("<opt")&&[1,"<select multiple='multiple'>","</select>"]||!O.indexOf("<leg")&&[1,"<fieldset>","</fieldset>"]||O.match(/^<(thead|tbody|tfoot|colg|cap)/)&&[1,"<table>","</table>"]||!O.indexOf("<tr")&&[2,"<table><tbody>","</tbody></table>"]||(!O.indexOf("<td")||!O.indexOf("<th"))&&[3,"<table><tbody><tr>","</tr></tbody></table>"]||!O.indexOf("<col")&&[2,"<table><tbody></tbody><colgroup>","</colgroup></table>"]||!o.support.htmlSerialize&&[1,"div<div>","</div>"]||[0,"",""];L.innerHTML=Q[1]+S+Q[2];while(Q[0]--){L=L.lastChild}if(!o.support.tbody){var R=/<tbody/i.test(S),N=!O.indexOf("<table")&&!R?L.firstChild&&L.firstChild.childNodes:Q[1]=="<table>"&&!R?L.childNodes:[];for(var M=N.length-1;M>=0;--M){if(o.nodeName(N[M],"tbody")&&!N[M].childNodes.length){N[M].parentNode.removeChild(N[M])}}}if(!o.support.leadingWhitespace&&/^\s/.test(S)){L.insertBefore(K.createTextNode(S.match(/^\s*/)[0]),L.firstChild)}S=o.makeArray(L.childNodes)}if(S.nodeType){G.push(S)}else{G=o.merge(G,S)}});if(I){for(var J=0;G[J];J++){if(o.nodeName(G[J],"script")&&(!G[J].type||G[J].type.toLowerCase()==="text/javascript")){E.push(G[J].parentNode?G[J].parentNode.removeChild(G[J]):G[J])}else{if(G[J].nodeType===1){G.splice.apply(G,[J+1,0].concat(o.makeArray(G[J].getElementsByTagName("script"))))}I.appendChild(G[J])}}return E}return G},attr:function(J,G,K){if(!J||J.nodeType==3||J.nodeType==8){return g}var H=!o.isXMLDoc(J),L=K!==g;G=H&&o.props[G]||G;if(J.tagName){var F=/href|src|style/.test(G);if(G=="selected"&&J.parentNode){J.parentNode.selectedIndex}if(G in J&&H&&!F){if(L){if(G=="type"&&o.nodeName(J,"input")&&J.parentNode){throw"type property can't be changed"}J[G]=K}if(o.nodeName(J,"form")&&J.getAttributeNode(G)){return J.getAttributeNode(G).nodeValue}if(G=="tabIndex"){var I=J.getAttributeNode("tabIndex");return I&&I.specified?I.value:J.nodeName.match(/(button|input|object|select|textarea)/i)?0:J.nodeName.match(/^(a|area)$/i)&&J.href?0:g}return J[G]}if(!o.support.style&&H&&G=="style"){return o.attr(J.style,"cssText",K)}if(L){J.setAttribute(G,""+K)}var E=!o.support.hrefNormalized&&H&&F?J.getAttribute(G,2):J.getAttribute(G);return E===null?g:E}if(!o.support.opacity&&G=="opacity"){if(L){J.zoom=1;J.filter=(J.filter||"").replace(/alpha\([^)]*\)/,"")+(parseInt(K)+""=="NaN"?"":"alpha(opacity="+K*100+")")}return J.filter&&J.filter.indexOf("opacity=")>=0?(parseFloat(J.filter.match(/opacity=([^)]*)/)[1])/100)+"":""}G=G.replace(/-([a-z])/ig,function(M,N){return N.toUpperCase()});if(L){J[G]=K}return J[G]},trim:function(E){return(E||"").replace(/^\s+|\s+$/g,"")},makeArray:function(G){var E=[];if(G!=null){var F=G.length;if(F==null||typeof G==="string"||o.isFunction(G)||G.setInterval){E[0]=G}else{while(F){E[--F]=G[F]}}}return E},inArray:function(G,H){for(var E=0,F=H.length;E<F;E++){if(H[E]===G){return E}}return -1},merge:function(H,E){var F=0,G,I=H.length;if(!o.support.getAll){while((G=E[F++])!=null){if(G.nodeType!=8){H[I++]=G}}}else{while((G=E[F++])!=null){H[I++]=G}}return H},unique:function(K){var F=[],E={};try{for(var G=0,H=K.length;G<H;G++){var J=o.data(K[G]);if(!E[J]){E[J]=true;F.push(K[G])}}}catch(I){F=K}return F},grep:function(F,J,E){var G=[];for(var H=0,I=F.length;H<I;H++){if(!E!=!J(F[H],H)){G.push(F[H])}}return G},map:function(E,J){var F=[];for(var G=0,H=E.length;G<H;G++){var I=J(E[G],G);if(I!=null){F[F.length]=I}}return F.concat.apply([],F)}});var C=navigator.userAgent.toLowerCase();o.browser={version:(C.match(/.+(?:rv|it|ra|ie)[\/: ]([\d.]+)/)||[0,"0"])[1],safari:/webkit/.test(C),opera:/opera/.test(C),msie:/msie/.test(C)&&!/opera/.test(C),mozilla:/mozilla/.test(C)&&!/(compatible|webkit)/.test(C)};o.each({parent:function(E){return E.parentNode},parents:function(E){return o.dir(E,"parentNode")},next:function(E){return o.nth(E,2,"nextSibling")},prev:function(E){return o.nth(E,2,"previousSibling")},nextAll:function(E){return o.dir(E,"nextSibling")},prevAll:function(E){return o.dir(E,"previousSibling")},siblings:function(E){return o.sibling(E.parentNode.firstChild,E)},children:function(E){return o.sibling(E.firstChild)},contents:function(E){return o.nodeName(E,"iframe")?E.contentDocument||E.contentWindow.document:o.makeArray(E.childNodes)}},function(E,F){o.fn[E]=function(G){var H=o.map(this,F);if(G&&typeof G=="string"){H=o.multiFilter(G,H)}return this.pushStack(o.unique(H),E,G)}});o.each({appendTo:"append",prependTo:"prepend",insertBefore:"before",insertAfter:"after",replaceAll:"replaceWith"},function(E,F){o.fn[E]=function(G){var J=[],L=o(G);for(var K=0,H=L.length;K<H;K++){var I=(K>0?this.clone(true):this).get();o.fn[F].apply(o(L[K]),I);J=J.concat(I)}return this.pushStack(J,E,G)}});o.each({removeAttr:function(E){o.attr(this,E,"");if(this.nodeType==1){this.removeAttribute(E)}},addClass:function(E){o.className.add(this,E)},removeClass:function(E){o.className.remove(this,E)},toggleClass:function(F,E){if(typeof E!=="boolean"){E=!o.className.has(this,F)}o.className[E?"add":"remove"](this,F)},remove:function(E){if(!E||o.filter(E,[this]).length){o("*",this).add([this]).each(function(){o.event.remove(this);o.removeData(this)});if(this.parentNode){this.parentNode.removeChild(this)}}},empty:function(){o(this).children().remove();while(this.firstChild){this.removeChild(this.firstChild)}}},function(E,F){o.fn[E]=function(){return this.each(F,arguments)}});function j(E,F){return E[0]&&parseInt(o.curCSS(E[0],F,true),10)||0}var h="jQuery"+e(),v=0,A={};o.extend({cache:{},data:function(F,E,G){F=F==l?A:F;var H=F[h];if(!H){H=F[h]=++v}if(E&&!o.cache[H]){o.cache[H]={}}if(G!==g){o.cache[H][E]=G}return E?o.cache[H][E]:H},removeData:function(F,E){F=F==l?A:F;var H=F[h];if(E){if(o.cache[H]){delete o.cache[H][E];E="";for(E in o.cache[H]){break}if(!E){o.removeData(F)}}}else{try{delete F[h]}catch(G){if(F.removeAttribute){F.removeAttribute(h)}}delete o.cache[H]}},queue:function(F,E,H){if(F){E=(E||"fx")+"queue";var G=o.data(F,E);if(!G||o.isArray(H)){G=o.data(F,E,o.makeArray(H))}else{if(H){G.push(H)}}}return G},dequeue:function(H,G){var E=o.queue(H,G),F=E.shift();if(!G||G==="fx"){F=E[0]}if(F!==g){F.call(H)}}});o.fn.extend({data:function(E,G){var H=E.split(".");H[1]=H[1]?"."+H[1]:"";if(G===g){var F=this.triggerHandler("getData"+H[1]+"!",[H[0]]);if(F===g&&this.length){F=o.data(this[0],E)}return F===g&&H[1]?this.data(H[0]):F}else{return this.trigger("setData"+H[1]+"!",[H[0],G]).each(function(){o.data(this,E,G)})}},removeData:function(E){return this.each(function(){o.removeData(this,E)})},queue:function(E,F){if(typeof E!=="string"){F=E;E="fx"}if(F===g){return o.queue(this[0],E)}return this.each(function(){var G=o.queue(this,E,F);if(E=="fx"&&G.length==1){G[0].call(this)}})},dequeue:function(E){return this.each(function(){o.dequeue(this,E)})}});
-/*
- * Sizzle CSS Selector Engine - v0.9.3
- *  Copyright 2009, The Dojo Foundation
- *  Released under the MIT, BSD, and GPL Licenses.
- *  More information: http://sizzlejs.com/
- */
-(function(){var R=/((?:\((?:\([^()]+\)|[^()]+)+\)|\[(?:\[[^[\]]*\]|['"][^'"]*['"]|[^[\]'"]+)+\]|\\.|[^ >+~,(\[\\]+)+|[>+~])(\s*,\s*)?/g,L=0,H=Object.prototype.toString;var F=function(Y,U,ab,ac){ab=ab||[];U=U||document;if(U.nodeType!==1&&U.nodeType!==9){return[]}if(!Y||typeof Y!=="string"){return ab}var Z=[],W,af,ai,T,ad,V,X=true;R.lastIndex=0;while((W=R.exec(Y))!==null){Z.push(W[1]);if(W[2]){V=RegExp.rightContext;break}}if(Z.length>1&&M.exec(Y)){if(Z.length===2&&I.relative[Z[0]]){af=J(Z[0]+Z[1],U)}else{af=I.relative[Z[0]]?[U]:F(Z.shift(),U);while(Z.length){Y=Z.shift();if(I.relative[Y]){Y+=Z.shift()}af=J(Y,af)}}}else{var ae=ac?{expr:Z.pop(),set:E(ac)}:F.find(Z.pop(),Z.length===1&&U.parentNode?U.parentNode:U,Q(U));af=F.filter(ae.expr,ae.set);if(Z.length>0){ai=E(af)}else{X=false}while(Z.length){var ah=Z.pop(),ag=ah;if(!I.relative[ah]){ah=""}else{ag=Z.pop()}if(ag==null){ag=U}I.relative[ah](ai,ag,Q(U))}}if(!ai){ai=af}if(!ai){throw"Syntax error, unrecognized expression: "+(ah||Y)}if(H.call(ai)==="[object Array]"){if(!X){ab.push.apply(ab,ai)}else{if(U.nodeType===1){for(var aa=0;ai[aa]!=null;aa++){if(ai[aa]&&(ai[aa]===true||ai[aa].nodeType===1&&K(U,ai[aa]))){ab.push(af[aa])}}}else{for(var aa=0;ai[aa]!=null;aa++){if(ai[aa]&&ai[aa].nodeType===1){ab.push(af[aa])}}}}}else{E(ai,ab)}if(V){F(V,U,ab,ac);if(G){hasDuplicate=false;ab.sort(G);if(hasDuplicate){for(var aa=1;aa<ab.length;aa++){if(ab[aa]===ab[aa-1]){ab.splice(aa--,1)}}}}}return ab};F.matches=function(T,U){return F(T,null,null,U)};F.find=function(aa,T,ab){var Z,X;if(!aa){return[]}for(var W=0,V=I.order.length;W<V;W++){var Y=I.order[W],X;if((X=I.match[Y].exec(aa))){var U=RegExp.leftContext;if(U.substr(U.length-1)!=="\\"){X[1]=(X[1]||"").replace(/\\/g,"");Z=I.find[Y](X,T,ab);if(Z!=null){aa=aa.replace(I.match[Y],"");break}}}}if(!Z){Z=T.getElementsByTagName("*")}return{set:Z,expr:aa}};F.filter=function(ad,ac,ag,W){var V=ad,ai=[],aa=ac,Y,T,Z=ac&&ac[0]&&Q(ac[0]);while(ad&&ac.length){for(var ab in I.filter){if((Y=I.match[ab].exec(ad))!=null){var U=I.filter[ab],ah,af;T=false;if(aa==ai){ai=[]}if(I.preFilter[ab]){Y=I.preFilter[ab](Y,aa,ag,ai,W,Z);if(!Y){T=ah=true}else{if(Y===true){continue}}}if(Y){for(var X=0;(af=aa[X])!=null;X++){if(af){ah=U(af,Y,X,aa);var ae=W^!!ah;if(ag&&ah!=null){if(ae){T=true}else{aa[X]=false}}else{if(ae){ai.push(af);T=true}}}}}if(ah!==g){if(!ag){aa=ai}ad=ad.replace(I.match[ab],"");if(!T){return[]}break}}}if(ad==V){if(T==null){throw"Syntax error, unrecognized expression: "+ad}else{break}}V=ad}return aa};var I=F.selectors={order:["ID","NAME","TAG"],match:{ID:/#((?:[\w\u00c0-\uFFFF_-]|\\.)+)/,CLASS:/\.((?:[\w\u00c0-\uFFFF_-]|\\.)+)/,NAME:/\[name=['"]*((?:[\w\u00c0-\uFFFF_-]|\\.)+)['"]*\]/,ATTR:/\[\s*((?:[\w\u00c0-\uFFFF_-]|\\.)+)\s*(?:(\S?=)\s*(['"]*)(.*?)\3|)\s*\]/,TAG:/^((?:[\w\u00c0-\uFFFF\*_-]|\\.)+)/,CHILD:/:(only|nth|last|first)-child(?:\((even|odd|[\dn+-]*)\))?/,POS:/:(nth|eq|gt|lt|first|last|even|odd)(?:\((\d*)\))?(?=[^-]|$)/,PSEUDO:/:((?:[\w\u00c0-\uFFFF_-]|\\.)+)(?:\((['"]*)((?:\([^\)]+\)|[^\2\(\)]*)+)\2\))?/},attrMap:{"class":"className","for":"htmlFor"},attrHandle:{href:function(T){return T.getAttribute("href")}},relative:{"+":function(aa,T,Z){var X=typeof T==="string",ab=X&&!/\W/.test(T),Y=X&&!ab;if(ab&&!Z){T=T.toUpperCase()}for(var W=0,V=aa.length,U;W<V;W++){if((U=aa[W])){while((U=U.previousSibling)&&U.nodeType!==1){}aa[W]=Y||U&&U.nodeName===T?U||false:U===T}}if(Y){F.filter(T,aa,true)}},">":function(Z,U,aa){var X=typeof U==="string";if(X&&!/\W/.test(U)){U=aa?U:U.toUpperCase();for(var V=0,T=Z.length;V<T;V++){var Y=Z[V];if(Y){var W=Y.parentNode;Z[V]=W.nodeName===U?W:false}}}else{for(var V=0,T=Z.length;V<T;V++){var Y=Z[V];if(Y){Z[V]=X?Y.parentNode:Y.parentNode===U}}if(X){F.filter(U,Z,true)}}},"":function(W,U,Y){var V=L++,T=S;if(!U.match(/\W/)){var X=U=Y?U:U.toUpperCase();T=P}T("parentNode",U,V,W,X,Y)},"~":function(W,U,Y){var V=L++,T=S;if(typeof U==="string"&&!U.match(/\W/)){var X=U=Y?U:U.toUpperCase();T=P}T("previousSibling",U,V,W,X,Y)}},find:{ID:function(U,V,W){if(typeof V.getElementById!=="undefined"&&!W){var T=V.getElementById(U[1]);return T?[T]:[]}},NAME:function(V,Y,Z){if(typeof Y.getElementsByName!=="undefined"){var U=[],X=Y.getElementsByName(V[1]);for(var W=0,T=X.length;W<T;W++){if(X[W].getAttribute("name")===V[1]){U.push(X[W])}}return U.length===0?null:U}},TAG:function(T,U){return U.getElementsByTagName(T[1])}},preFilter:{CLASS:function(W,U,V,T,Z,aa){W=" "+W[1].replace(/\\/g,"")+" ";if(aa){return W}for(var X=0,Y;(Y=U[X])!=null;X++){if(Y){if(Z^(Y.className&&(" "+Y.className+" ").indexOf(W)>=0)){if(!V){T.push(Y)}}else{if(V){U[X]=false}}}}return false},ID:function(T){return T[1].replace(/\\/g,"")},TAG:function(U,T){for(var V=0;T[V]===false;V++){}return T[V]&&Q(T[V])?U[1]:U[1].toUpperCase()},CHILD:function(T){if(T[1]=="nth"){var U=/(-?)(\d*)n((?:\+|-)?\d*)/.exec(T[2]=="even"&&"2n"||T[2]=="odd"&&"2n+1"||!/\D/.test(T[2])&&"0n+"+T[2]||T[2]);T[2]=(U[1]+(U[2]||1))-0;T[3]=U[3]-0}T[0]=L++;return T},ATTR:function(X,U,V,T,Y,Z){var W=X[1].replace(/\\/g,"");if(!Z&&I.attrMap[W]){X[1]=I.attrMap[W]}if(X[2]==="~="){X[4]=" "+X[4]+" "}return X},PSEUDO:function(X,U,V,T,Y){if(X[1]==="not"){if(X[3].match(R).length>1||/^\w/.test(X[3])){X[3]=F(X[3],null,null,U)}else{var W=F.filter(X[3],U,V,true^Y);if(!V){T.push.apply(T,W)}return false}}else{if(I.match.POS.test(X[0])||I.match.CHILD.test(X[0])){return true}}return X},POS:function(T){T.unshift(true);return T}},filters:{enabled:function(T){return T.disabled===false&&T.type!=="hidden"},disabled:function(T){return T.disabled===true},checked:function(T){return T.checked===true},selected:function(T){T.parentNode.selectedIndex;return T.selected===true},parent:function(T){return !!T.firstChild},empty:function(T){return !T.firstChild},has:function(V,U,T){return !!F(T[3],V).length},header:function(T){return/h\d/i.test(T.nodeName)},text:function(T){return"text"===T.type},radio:function(T){return"radio"===T.type},checkbox:function(T){return"checkbox"===T.type},file:function(T){return"file"===T.type},password:function(T){return"password"===T.type},submit:function(T){return"submit"===T.type},image:function(T){return"image"===T.type},reset:function(T){return"reset"===T.type},button:function(T){return"button"===T.type||T.nodeName.toUpperCase()==="BUTTON"},input:function(T){return/input|select|textarea|button/i.test(T.nodeName)}},setFilters:{first:function(U,T){return T===0},last:function(V,U,T,W){return U===W.length-1},even:function(U,T){return T%2===0},odd:function(U,T){return T%2===1},lt:function(V,U,T){return U<T[3]-0},gt:function(V,U,T){return U>T[3]-0},nth:function(V,U,T){return T[3]-0==U},eq:function(V,U,T){return T[3]-0==U}},filter:{PSEUDO:function(Z,V,W,aa){var U=V[1],X=I.filters[U];if(X){return X(Z,W,V,aa)}else{if(U==="contains"){return(Z.textContent||Z.innerText||"").indexOf(V[3])>=0}else{if(U==="not"){var Y=V[3];for(var W=0,T=Y.length;W<T;W++){if(Y[W]===Z){return false}}return true}}}},CHILD:function(T,W){var Z=W[1],U=T;switch(Z){case"only":case"first":while(U=U.previousSibling){if(U.nodeType===1){return false}}if(Z=="first"){return true}U=T;case"last":while(U=U.nextSibling){if(U.nodeType===1){return false}}return true;case"nth":var V=W[2],ac=W[3];if(V==1&&ac==0){return true}var Y=W[0],ab=T.parentNode;if(ab&&(ab.sizcache!==Y||!T.nodeIndex)){var X=0;for(U=ab.firstChild;U;U=U.nextSibling){if(U.nodeType===1){U.nodeIndex=++X}}ab.sizcache=Y}var aa=T.nodeIndex-ac;if(V==0){return aa==0}else{return(aa%V==0&&aa/V>=0)}}},ID:function(U,T){return U.nodeType===1&&U.getAttribute("id")===T},TAG:function(U,T){return(T==="*"&&U.nodeType===1)||U.nodeName===T},CLASS:function(U,T){return(" "+(U.className||U.getAttribute("class"))+" ").indexOf(T)>-1},ATTR:function(Y,W){var V=W[1],T=I.attrHandle[V]?I.attrHandle[V](Y):Y[V]!=null?Y[V]:Y.getAttribute(V),Z=T+"",X=W[2],U=W[4];return T==null?X==="!=":X==="="?Z===U:X==="*="?Z.indexOf(U)>=0:X==="~="?(" "+Z+" ").indexOf(U)>=0:!U?Z&&T!==false:X==="!="?Z!=U:X==="^="?Z.indexOf(U)===0:X==="$="?Z.substr(Z.length-U.length)===U:X==="|="?Z===U||Z.substr(0,U.length+1)===U+"-":false},POS:function(X,U,V,Y){var T=U[2],W=I.setFilters[T];if(W){return W(X,V,U,Y)}}}};var M=I.match.POS;for(var O in I.match){I.match[O]=RegExp(I.match[O].source+/(?![^\[]*\])(?![^\(]*\))/.source)}var E=function(U,T){U=Array.prototype.slice.call(U);if(T){T.push.apply(T,U);return T}return U};try{Array.prototype.slice.call(document.documentElement.childNodes)}catch(N){E=function(X,W){var U=W||[];if(H.call(X)==="[object Array]"){Array.prototype.push.apply(U,X)}else{if(typeof X.length==="number"){for(var V=0,T=X.length;V<T;V++){U.push(X[V])}}else{for(var V=0;X[V];V++){U.push(X[V])}}}return U}}var G;if(document.documentElement.compareDocumentPosition){G=function(U,T){var V=U.compareDocumentPosition(T)&4?-1:U===T?0:1;if(V===0){hasDuplicate=true}return V}}else{if("sourceIndex" in document.documentElement){G=function(U,T){var V=U.sourceIndex-T.sourceIndex;if(V===0){hasDuplicate=true}return V}}else{if(document.createRange){G=function(W,U){var V=W.ownerDocument.createRange(),T=U.ownerDocument.createRange();V.selectNode(W);V.collapse(true);T.selectNode(U);T.collapse(true);var X=V.compareBoundaryPoints(Range.START_TO_END,T);if(X===0){hasDuplicate=true}return X}}}}(function(){var U=document.createElement("form"),V="script"+(new Date).getTime();U.innerHTML="<input name='"+V+"'/>";var T=document.documentElement;T.insertBefore(U,T.firstChild);if(!!document.getElementById(V)){I.find.ID=function(X,Y,Z){if(typeof Y.getElementById!=="undefined"&&!Z){var W=Y.getElementById(X[1]);return W?W.id===X[1]||typeof W.getAttributeNode!=="undefined"&&W.getAttributeNode("id").nodeValue===X[1]?[W]:g:[]}};I.filter.ID=function(Y,W){var X=typeof Y.getAttributeNode!=="undefined"&&Y.getAttributeNode("id");return Y.nodeType===1&&X&&X.nodeValue===W}}T.removeChild(U)})();(function(){var T=document.createElement("div");T.appendChild(document.createComment(""));if(T.getElementsByTagName("*").length>0){I.find.TAG=function(U,Y){var X=Y.getElementsByTagName(U[1]);if(U[1]==="*"){var W=[];for(var V=0;X[V];V++){if(X[V].nodeType===1){W.push(X[V])}}X=W}return X}}T.innerHTML="<a href='#'></a>";if(T.firstChild&&typeof T.firstChild.getAttribute!=="undefined"&&T.firstChild.getAttribute("href")!=="#"){I.attrHandle.href=function(U){return U.getAttribute("href",2)}}})();if(document.querySelectorAll){(function(){var T=F,U=document.createElement("div");U.innerHTML="<p class='TEST'></p>";if(U.querySelectorAll&&U.querySelectorAll(".TEST").length===0){return}F=function(Y,X,V,W){X=X||document;if(!W&&X.nodeType===9&&!Q(X)){try{return E(X.querySelectorAll(Y),V)}catch(Z){}}return T(Y,X,V,W)};F.find=T.find;F.filter=T.filter;F.selectors=T.selectors;F.matches=T.matches})()}if(document.getElementsByClassName&&document.documentElement.getElementsByClassName){(function(){var T=document.createElement("div");T.innerHTML="<div class='test e'></div><div class='test'></div>";if(T.getElementsByClassName("e").length===0){return}T.lastChild.className="e";if(T.getElementsByClassName("e").length===1){return}I.order.splice(1,0,"CLASS");I.find.CLASS=function(U,V,W){if(typeof V.getElementsByClassName!=="undefined"&&!W){return V.getElementsByClassName(U[1])}}})()}function P(U,Z,Y,ad,aa,ac){var ab=U=="previousSibling"&&!ac;for(var W=0,V=ad.length;W<V;W++){var T=ad[W];if(T){if(ab&&T.nodeType===1){T.sizcache=Y;T.sizset=W}T=T[U];var X=false;while(T){if(T.sizcache===Y){X=ad[T.sizset];break}if(T.nodeType===1&&!ac){T.sizcache=Y;T.sizset=W}if(T.nodeName===Z){X=T;break}T=T[U]}ad[W]=X}}}function S(U,Z,Y,ad,aa,ac){var ab=U=="previousSibling"&&!ac;for(var W=0,V=ad.length;W<V;W++){var T=ad[W];if(T){if(ab&&T.nodeType===1){T.sizcache=Y;T.sizset=W}T=T[U];var X=false;while(T){if(T.sizcache===Y){X=ad[T.sizset];break}if(T.nodeType===1){if(!ac){T.sizcache=Y;T.sizset=W}if(typeof Z!=="string"){if(T===Z){X=true;break}}else{if(F.filter(Z,[T]).length>0){X=T;break}}}T=T[U]}ad[W]=X}}}var K=document.compareDocumentPosition?function(U,T){return U.compareDocumentPosition(T)&16}:function(U,T){return U!==T&&(U.contains?U.contains(T):true)};var Q=function(T){return T.nodeType===9&&T.documentElement.nodeName!=="HTML"||!!T.ownerDocument&&Q(T.ownerDocument)};var J=function(T,aa){var W=[],X="",Y,V=aa.nodeType?[aa]:aa;while((Y=I.match.PSEUDO.exec(T))){X+=Y[0];T=T.replace(I.match.PSEUDO,"")}T=I.relative[T]?T+"*":T;for(var Z=0,U=V.length;Z<U;Z++){F(T,V[Z],W)}return F.filter(X,W)};o.find=F;o.filter=F.filter;o.expr=F.selectors;o.expr[":"]=o.expr.filters;F.selectors.filters.hidden=function(T){return T.offsetWidth===0||T.offsetHeight===0};F.selectors.filters.visible=function(T){return T.offsetWidth>0||T.offsetHeight>0};F.selectors.filters.animated=function(T){return o.grep(o.timers,function(U){return T===U.elem}).length};o.multiFilter=function(V,T,U){if(U){V=":not("+V+")"}return F.matches(V,T)};o.dir=function(V,U){var T=[],W=V[U];while(W&&W!=document){if(W.nodeType==1){T.push(W)}W=W[U]}return T};o.nth=function(X,T,V,W){T=T||1;var U=0;for(;X;X=X[V]){if(X.nodeType==1&&++U==T){break}}return X};o.sibling=function(V,U){var T=[];for(;V;V=V.nextSibling){if(V.nodeType==1&&V!=U){T.push(V)}}return T};return;l.Sizzle=F})();o.event={add:function(I,F,H,K){if(I.nodeType==3||I.nodeType==8){return}if(I.setInterval&&I!=l){I=l}if(!H.guid){H.guid=this.guid++}if(K!==g){var G=H;H=this.proxy(G);H.data=K}var E=o.data(I,"events")||o.data(I,"events",{}),J=o.data(I,"handle")||o.data(I,"handle",function(){return typeof o!=="undefined"&&!o.event.triggered?o.event.handle.apply(arguments.callee.elem,arguments):g});J.elem=I;o.each(F.split(/\s+/),function(M,N){var O=N.split(".");N=O.shift();H.type=O.slice().sort().join(".");var L=E[N];if(o.event.specialAll[N]){o.event.specialAll[N].setup.call(I,K,O)}if(!L){L=E[N]={};if(!o.event.special[N]||o.event.special[N].setup.call(I,K,O)===false){if(I.addEventListener){I.addEventListener(N,J,false)}else{if(I.attachEvent){I.attachEvent("on"+N,J)}}}}L[H.guid]=H;o.event.global[N]=true});I=null},guid:1,global:{},remove:function(K,H,J){if(K.nodeType==3||K.nodeType==8){return}var G=o.data(K,"events"),F,E;if(G){if(H===g||(typeof H==="string"&&H.charAt(0)==".")){for(var I in G){this.remove(K,I+(H||""))}}else{if(H.type){J=H.handler;H=H.type}o.each(H.split(/\s+/),function(M,O){var Q=O.split(".");O=Q.shift();var N=RegExp("(^|\\.)"+Q.slice().sort().join(".*\\.")+"(\\.|$)");if(G[O]){if(J){delete G[O][J.guid]}else{for(var P in G[O]){if(N.test(G[O][P].type)){delete G[O][P]}}}if(o.event.specialAll[O]){o.event.specialAll[O].teardown.call(K,Q)}for(F in G[O]){break}if(!F){if(!o.event.special[O]||o.event.special[O].teardown.call(K,Q)===false){if(K.removeEventListener){K.removeEventListener(O,o.data(K,"handle"),false)}else{if(K.detachEvent){K.detachEvent("on"+O,o.data(K,"handle"))}}}F=null;delete G[O]}}})}for(F in G){break}if(!F){var L=o.data(K,"handle");if(L){L.elem=null}o.removeData(K,"events");o.removeData(K,"handle")}}},trigger:function(I,K,H,E){var G=I.type||I;if(!E){I=typeof I==="object"?I[h]?I:o.extend(o.Event(G),I):o.Event(G);if(G.indexOf("!")>=0){I.type=G=G.slice(0,-1);I.exclusive=true}if(!H){I.stopPropagation();if(this.global[G]){o.each(o.cache,function(){if(this.events&&this.events[G]){o.event.trigger(I,K,this.handle.elem)}})}}if(!H||H.nodeType==3||H.nodeType==8){return g}I.result=g;I.target=H;K=o.makeArray(K);K.unshift(I)}I.currentTarget=H;var J=o.data(H,"handle");if(J){J.apply(H,K)}if((!H[G]||(o.nodeName(H,"a")&&G=="click"))&&H["on"+G]&&H["on"+G].apply(H,K)===false){I.result=false}if(!E&&H[G]&&!I.isDefaultPrevented()&&!(o.nodeName(H,"a")&&G=="click")){this.triggered=true;try{H[G]()}catch(L){}}this.triggered=false;if(!I.isPropagationStopped()){var F=H.parentNode||H.ownerDocument;if(F){o.event.trigger(I,K,F,true)}}},handle:function(K){var J,E;K=arguments[0]=o.event.fix(K||l.event);K.currentTarget=this;var L=K.type.split(".");K.type=L.shift();J=!L.length&&!K.exclusive;var I=RegExp("(^|\\.)"+L.slice().sort().join(".*\\.")+"(\\.|$)");E=(o.data(this,"events")||{})[K.type];for(var G in E){var H=E[G];if(J||I.test(H.type)){K.handler=H;K.data=H.data;var F=H.apply(this,arguments);if(F!==g){K.result=F;if(F===false){K.preventDefault();K.stopPropagation()}}if(K.isImmediatePropagationStopped()){break}}}},props:"altKey attrChange attrName bubbles button cancelable charCode clientX clientY ctrlKey currentTarget data detail eventPhase fromElement handler keyCode metaKey newValue originalTarget pageX pageY prevValue relatedNode relatedTarget screenX screenY shiftKey srcElement target toElement view wheelDelta which".split(" "),fix:function(H){if(H[h]){return H}var F=H;H=o.Event(F);for(var G=this.props.length,J;G;){J=this.props[--G];H[J]=F[J]}if(!H.target){H.target=H.srcElement||document}if(H.target.nodeType==3){H.target=H.target.parentNode}if(!H.relatedTarget&&H.fromElement){H.relatedTarget=H.fromElement==H.target?H.toElement:H.fromElement}if(H.pageX==null&&H.clientX!=null){var I=document.documentElement,E=document.body;H.pageX=H.clientX+(I&&I.scrollLeft||E&&E.scrollLeft||0)-(I.clientLeft||0);H.pageY=H.clientY+(I&&I.scrollTop||E&&E.scrollTop||0)-(I.clientTop||0)}if(!H.which&&((H.charCode||H.charCode===0)?H.charCode:H.keyCode)){H.which=H.charCode||H.keyCode}if(!H.metaKey&&H.ctrlKey){H.metaKey=H.ctrlKey}if(!H.which&&H.button){H.which=(H.button&1?1:(H.button&2?3:(H.button&4?2:0)))}return H},proxy:function(F,E){E=E||function(){return F.apply(this,arguments)};E.guid=F.guid=F.guid||E.guid||this.guid++;return E},special:{ready:{setup:B,teardown:function(){}}},specialAll:{live:{setup:function(E,F){o.event.add(this,F[0],c)},teardown:function(G){if(G.length){var E=0,F=RegExp("(^|\\.)"+G[0]+"(\\.|$)");o.each((o.data(this,"events").live||{}),function(){if(F.test(this.type)){E++}});if(E<1){o.event.remove(this,G[0],c)}}}}}};o.Event=function(E){if(!this.preventDefault){return new o.Event(E)}if(E&&E.type){this.originalEvent=E;this.type=E.type}else{this.type=E}this.timeStamp=e();this[h]=true};function k(){return false}function u(){return true}o.Event.prototype={preventDefault:function(){this.isDefaultPrevented=u;var E=this.originalEvent;if(!E){return}if(E.preventDefault){E.preventDefault()}E.returnValue=false},stopPropagation:function(){this.isPropagationStopped=u;var E=this.originalEvent;if(!E){return}if(E.stopPropagation){E.stopPropagation()}E.cancelBubble=true},stopImmediatePropagation:function(){this.isImmediatePropagationStopped=u;this.stopPropagation()},isDefaultPrevented:k,isPropagationStopped:k,isImmediatePropagationStopped:k};var a=function(F){var E=F.relatedTarget;while(E&&E!=this){try{E=E.parentNode}catch(G){E=this}}if(E!=this){F.type=F.data;o.event.handle.apply(this,arguments)}};o.each({mouseover:"mouseenter",mouseout:"mouseleave"},function(F,E){o.event.special[E]={setup:function(){o.event.add(this,F,a,E)},teardown:function(){o.event.remove(this,F,a)}}});o.fn.extend({bind:function(F,G,E){return F=="unload"?this.one(F,G,E):this.each(function(){o.event.add(this,F,E||G,E&&G)})},one:function(G,H,F){var E=o.event.proxy(F||H,function(I){o(this).unbind(I,E);return(F||H).apply(this,arguments)});return this.each(function(){o.event.add(this,G,E,F&&H)})},unbind:function(F,E){return this.each(function(){o.event.remove(this,F,E)})},trigger:function(E,F){return this.each(function(){o.event.trigger(E,F,this)})},triggerHandler:function(E,G){if(this[0]){var F=o.Event(E);F.preventDefault();F.stopPropagation();o.event.trigger(F,G,this[0]);return F.result}},toggle:function(G){var E=arguments,F=1;while(F<E.length){o.event.proxy(G,E[F++])}return this.click(o.event.proxy(G,function(H){this.lastToggle=(this.lastToggle||0)%F;H.preventDefault();return E[this.lastToggle++].apply(this,arguments)||false}))},hover:function(E,F){return this.mouseenter(E).mouseleave(F)},ready:function(E){B();if(o.isReady){E.call(document,o)}else{o.readyList.push(E)}return this},live:function(G,F){var E=o.event.proxy(F);E.guid+=this.selector+G;o(document).bind(i(G,this.selector),this.selector,E);return this},die:function(F,E){o(document).unbind(i(F,this.selector),E?{guid:E.guid+this.selector+F}:null);return this}});function c(H){var E=RegExp("(^|\\.)"+H.type+"(\\.|$)"),G=true,F=[];o.each(o.data(this,"events").live||[],function(I,J){if(E.test(J.type)){var K=o(H.target).closest(J.data)[0];if(K){F.push({elem:K,fn:J})}}});F.sort(function(J,I){return o.data(J.elem,"closest")-o.data(I.elem,"closest")});o.each(F,function(){if(this.fn.call(this.elem,H,this.fn.data)===false){return(G=false)}});return G}function i(F,E){return["live",F,E.replace(/\./g,"`").replace(/ /g,"|")].join(".")}o.extend({isReady:false,readyList:[],ready:function(){if(!o.isReady){o.isReady=true;if(o.readyList){o.each(o.readyList,function(){this.call(document,o)});o.readyList=null}o(document).triggerHandler("ready")}}});var x=false;function B(){if(x){return}x=true;if(document.addEventListener){document.addEventListener("DOMContentLoaded",function(){document.removeEventListener("DOMContentLoaded",arguments.callee,false);o.ready()},false)}else{if(document.attachEvent){document.attachEvent("onreadystatechange",function(){if(document.readyState==="complete"){document.detachEvent("onreadystatechange",arguments.callee);o.ready()}});if(document.documentElement.doScroll&&l==l.top){(function(){if(o.isReady){return}try{document.documentElement.doScroll("left")}catch(E){setTimeout(arguments.callee,0);return}o.ready()})()}}}o.event.add(l,"load",o.ready)}o.each(("blur,focus,load,resize,scroll,unload,click,dblclick,mousedown,mouseup,mousemove,mouseover,mouseout,mouseenter,mouseleave,change,select,submit,keydown,keypress,keyup,error").split(","),function(F,E){o.fn[E]=function(G){return G?this.bind(E,G):this.trigger(E)}});o(l).bind("unload",function(){for(var E in o.cache){if(E!=1&&o.cache[E].handle){o.event.remove(o.cache[E].handle.elem)}}});(function(){o.support={};var F=document.documentElement,G=document.createElement("script"),K=document.createElement("div"),J="script"+(new Date).getTime();K.style.display="none";K.innerHTML='   <link/><table></table><a href="/a" style="color:red;float:left;opacity:.5;">a</a><select><option>text</option></select><object><param/></object>';var H=K.getElementsByTagName("*"),E=K.getElementsByTagName("a")[0];if(!H||!H.length||!E){return}o.support={leadingWhitespace:K.firstChild.nodeType==3,tbody:!K.getElementsByTagName("tbody").length,objectAll:!!K.getElementsByTagName("object")[0].getElementsByTagName("*").length,htmlSerialize:!!K.getElementsByTagName("link").length,style:/red/.test(E.getAttribute("style")),hrefNormalized:E.getAttribute("href")==="/a",opacity:E.style.opacity==="0.5",cssFloat:!!E.style.cssFloat,scriptEval:false,noCloneEvent:true,boxModel:null};G.type="text/javascript";try{G.appendChild(document.createTextNode("window."+J+"=1;"))}catch(I){}F.insertBefore(G,F.firstChild);if(l[J]){o.support.scriptEval=true;delete l[J]}F.removeChild(G);if(K.attachEvent&&K.fireEvent){K.attachEvent("onclick",function(){o.support.noCloneEvent=false;K.detachEvent("onclick",arguments.callee)});K.cloneNode(true).fireEvent("onclick")}o(function(){var L=document.createElement("div");L.style.width=L.style.paddingLeft="1px";document.body.appendChild(L);o.boxModel=o.support.boxModel=L.offsetWidth===2;document.body.removeChild(L).style.display="none"})})();var w=o.support.cssFloat?"cssFloat":"styleFloat";o.props={"for":"htmlFor","class":"className","float":w,cssFloat:w,styleFloat:w,readonly:"readOnly",maxlength:"maxLength",cellspacing:"cellSpacing",rowspan:"rowSpan",tabindex:"tabIndex"};o.fn.extend({_load:o.fn.load,load:function(G,J,K){if(typeof G!=="string"){return this._load(G)}var I=G.indexOf(" ");if(I>=0){var E=G.slice(I,G.length);G=G.slice(0,I)}var H="GET";if(J){if(o.isFunction(J)){K=J;J=null}else{if(typeof J==="object"){J=o.param(J);H="POST"}}}var F=this;o.ajax({url:G,type:H,dataType:"html",data:J,complete:function(M,L){if(L=="success"||L=="notmodified"){F.html(E?o("<div/>").append(M.responseText.replace(/<script(.|\s)*?\/script>/g,"")).find(E):M.responseText)}if(K){F.each(K,[M.responseText,L,M])}}});return this},serialize:function(){return o.param(this.serializeArray())},serializeArray:function(){return this.map(function(){return this.elements?o.makeArray(this.elements):this}).filter(function(){return this.name&&!this.disabled&&(this.checked||/select|textarea/i.test(this.nodeName)||/text|hidden|password|search/i.test(this.type))}).map(function(E,F){var G=o(this).val();return G==null?null:o.isArray(G)?o.map(G,function(I,H){return{name:F.name,value:I}}):{name:F.name,value:G}}).get()}});o.each("ajaxStart,ajaxStop,ajaxComplete,ajaxError,ajaxSuccess,ajaxSend".split(","),function(E,F){o.fn[F]=function(G){return this.bind(F,G)}});var r=e();o.extend({get:function(E,G,H,F){if(o.isFunction(G)){H=G;G=null}return o.ajax({type:"GET",url:E,data:G,success:H,dataType:F})},getScript:function(E,F){return o.get(E,null,F,"script")},getJSON:function(E,F,G){return o.get(E,F,G,"json")},post:function(E,G,H,F){if(o.isFunction(G)){H=G;G={}}return o.ajax({type:"POST",url:E,data:G,success:H,dataType:F})},ajaxSetup:function(E){o.extend(o.ajaxSettings,E)},ajaxSettings:{url:location.href,global:true,type:"GET",contentType:"application/x-www-form-urlencoded",processData:true,async:true,xhr:function(){return l.ActiveXObject?new ActiveXObject("Microsoft.XMLHTTP"):new XMLHttpRequest()},accepts:{xml:"application/xml, text/xml",html:"text/html",script:"text/javascript, application/javascript",json:"application/json, text/javascript",text:"text/plain",_default:"*/*"}},lastModified:{},ajax:function(M){M=o.extend(true,M,o.extend(true,{},o.ajaxSettings,M));var W,F=/=\?(&|$)/g,R,V,G=M.type.toUpperCase();if(M.data&&M.processData&&typeof M.data!=="string"){M.data=o.param(M.data)}if(M.dataType=="jsonp"){if(G=="GET"){if(!M.url.match(F)){M.url+=(M.url.match(/\?/)?"&":"?")+(M.jsonp||"callback")+"=?"}}else{if(!M.data||!M.data.match(F)){M.data=(M.data?M.data+"&":"")+(M.jsonp||"callback")+"=?"}}M.dataType="json"}if(M.dataType=="json"&&(M.data&&M.data.match(F)||M.url.match(F))){W="jsonp"+r++;if(M.data){M.data=(M.data+"").replace(F,"="+W+"$1")}M.url=M.url.replace(F,"="+W+"$1");M.dataType="script";l[W]=function(X){V=X;I();L();l[W]=g;try{delete l[W]}catch(Y){}if(H){H.removeChild(T)}}}if(M.dataType=="script"&&M.cache==null){M.cache=false}if(M.cache===false&&G=="GET"){var E=e();var U=M.url.replace(/(\?|&)_=.*?(&|$)/,"$1_="+E+"$2");M.url=U+((U==M.url)?(M.url.match(/\?/)?"&":"?")+"_="+E:"")}if(M.data&&G=="GET"){M.url+=(M.url.match(/\?/)?"&":"?")+M.data;M.data=null}if(M.global&&!o.active++){o.event.trigger("ajaxStart")}var Q=/^(\w+:)?\/\/([^\/?#]+)/.exec(M.url);if(M.dataType=="script"&&G=="GET"&&Q&&(Q[1]&&Q[1]!=location.protocol||Q[2]!=location.host)){var H=document.getElementsByTagName("head")[0];var T=document.createElement("script");T.src=M.url;if(M.scriptCharset){T.charset=M.scriptCharset}if(!W){var O=false;T.onload=T.onreadystatechange=function(){if(!O&&(!this.readyState||this.readyState=="loaded"||this.readyState=="complete")){O=true;I();L();T.onload=T.onreadystatechange=null;H.removeChild(T)}}}H.appendChild(T);return g}var K=false;var J=M.xhr();if(M.username){J.open(G,M.url,M.async,M.username,M.password)}else{J.open(G,M.url,M.async)}try{if(M.data){J.setRequestHeader("Content-Type",M.contentType)}if(M.ifModified){J.setRequestHeader("If-Modified-Since",o.lastModified[M.url]||"Thu, 01 Jan 1970 00:00:00 GMT")}J.setRequestHeader("X-Requested-With","XMLHttpRequest");J.setRequestHeader("Accept",M.dataType&&M.accepts[M.dataType]?M.accepts[M.dataType]+", */*":M.accepts._default)}catch(S){}if(M.beforeSend&&M.beforeSend(J,M)===false){if(M.global&&!--o.active){o.event.trigger("ajaxStop")}J.abort();return false}if(M.global){o.event.trigger("ajaxSend",[J,M])}var N=function(X){if(J.readyState==0){if(P){clearInterval(P);P=null;if(M.global&&!--o.active){o.event.trigger("ajaxStop")}}}else{if(!K&&J&&(J.readyState==4||X=="timeout")){K=true;if(P){clearInterval(P);P=null}R=X=="timeout"?"timeout":!o.httpSuccess(J)?"error":M.ifModified&&o.httpNotModified(J,M.url)?"notmodified":"success";if(R=="success"){try{V=o.httpData(J,M.dataType,M)}catch(Z){R="parsererror"}}if(R=="success"){var Y;try{Y=J.getResponseHeader("Last-Modified")}catch(Z){}if(M.ifModified&&Y){o.lastModified[M.url]=Y}if(!W){I()}}else{o.handleError(M,J,R)}L();if(X){J.abort()}if(M.async){J=null}}}};if(M.async){var P=setInterval(N,13);if(M.timeout>0){setTimeout(function(){if(J&&!K){N("timeout")}},M.timeout)}}try{J.send(M.data)}catch(S){o.handleError(M,J,null,S)}if(!M.async){N()}function I(){if(M.success){M.success(V,R)}if(M.global){o.event.trigger("ajaxSuccess",[J,M])}}function L(){if(M.complete){M.complete(J,R)}if(M.global){o.event.trigger("ajaxComplete",[J,M])}if(M.global&&!--o.active){o.event.trigger("ajaxStop")}}return J},handleError:function(F,H,E,G){if(F.error){F.error(H,E,G)}if(F.global){o.event.trigger("ajaxError",[H,F,G])}},active:0,httpSuccess:function(F){try{return !F.status&&location.protocol=="file:"||(F.status>=200&&F.status<300)||F.status==304||F.status==1223}catch(E){}return false},httpNotModified:function(G,E){try{var H=G.getResponseHeader("Last-Modified");return G.status==304||H==o.lastModified[E]}catch(F){}return false},httpData:function(J,H,G){var F=J.getResponseHeader("content-type"),E=H=="xml"||!H&&F&&F.indexOf("xml")>=0,I=E?J.responseXML:J.responseText;if(E&&I.documentElement.tagName=="parsererror"){throw"parsererror"}if(G&&G.dataFilter){I=G.dataFilter(I,H)}if(typeof I==="string"){if(H=="script"){o.globalEval(I)}if(H=="json"){I=l["eval"]("("+I+")")}}return I},param:function(E){var G=[];function H(I,J){G[G.length]=encodeURIComponent(I)+"="+encodeURIComponent(J)}if(o.isArray(E)||E.jquery){o.each(E,function(){H(this.name,this.value)})}else{for(var F in E){if(o.isArray(E[F])){o.each(E[F],function(){H(F,this)})}else{H(F,o.isFunction(E[F])?E[F]():E[F])}}}return G.join("&").replace(/%20/g,"+")}});var m={},n,d=[["height","marginTop","marginBottom","paddingTop","paddingBottom"],["width","marginLeft","marginRight","paddingLeft","paddingRight"],["opacity"]];function t(F,E){var G={};o.each(d.concat.apply([],d.slice(0,E)),function(){G[this]=F});return G}o.fn.extend({show:function(J,L){if(J){return this.animate(t("show",3),J,L)}else{for(var H=0,F=this.length;H<F;H++){var E=o.data(this[H],"olddisplay");this[H].style.display=E||"";if(o.css(this[H],"display")==="none"){var G=this[H].tagName,K;if(m[G]){K=m[G]}else{var I=o("<"+G+" />").appendTo("body");K=I.css("display");if(K==="none"){K="block"}I.remove();m[G]=K}o.data(this[H],"olddisplay",K)}}for(var H=0,F=this.length;H<F;H++){this[H].style.display=o.data(this[H],"olddisplay")||""}return this}},hide:function(H,I){if(H){return this.animate(t("hide",3),H,I)}else{for(var G=0,F=this.length;G<F;G++){var E=o.data(this[G],"olddisplay");if(!E&&E!=="none"){o.data(this[G],"olddisplay",o.css(this[G],"display"))}}for(var G=0,F=this.length;G<F;G++){this[G].style.display="none"}return this}},_toggle:o.fn.toggle,toggle:function(G,F){var E=typeof G==="boolean";return o.isFunction(G)&&o.isFunction(F)?this._toggle.apply(this,arguments):G==null||E?this.each(function(){var H=E?G:o(this).is(":hidden");o(this)[H?"show":"hide"]()}):this.animate(t("toggle",3),G,F)},fadeTo:function(E,G,F){return this.animate({opacity:G},E,F)},animate:function(I,F,H,G){var E=o.speed(F,H,G);return this[E.queue===false?"each":"queue"](function(){var K=o.extend({},E),M,L=this.nodeType==1&&o(this).is(":hidden"),J=this;for(M in I){if(I[M]=="hide"&&L||I[M]=="show"&&!L){return K.complete.call(this)}if((M=="height"||M=="width")&&this.style){K.display=o.css(this,"display");K.overflow=this.style.overflow}}if(K.overflow!=null){this.style.overflow="hidden"}K.curAnim=o.extend({},I);o.each(I,function(O,S){var R=new o.fx(J,K,O);if(/toggle|show|hide/.test(S)){R[S=="toggle"?L?"show":"hide":S](I)}else{var Q=S.toString().match(/^([+-]=)?([\d+-.]+)(.*)$/),T=R.cur(true)||0;if(Q){var N=parseFloat(Q[2]),P=Q[3]||"px";if(P!="px"){J.style[O]=(N||1)+P;T=((N||1)/R.cur(true))*T;J.style[O]=T+P}if(Q[1]){N=((Q[1]=="-="?-1:1)*N)+T}R.custom(T,N,P)}else{R.custom(T,S,"")}}});return true})},stop:function(F,E){var G=o.timers;if(F){this.queue([])}this.each(function(){for(var H=G.length-1;H>=0;H--){if(G[H].elem==this){if(E){G[H](true)}G.splice(H,1)}}});if(!E){this.dequeue()}return this}});o.each({slideDown:t("show",1),slideUp:t("hide",1),slideToggle:t("toggle",1),fadeIn:{opacity:"show"},fadeOut:{opacity:"hide"}},function(E,F){o.fn[E]=function(G,H){return this.animate(F,G,H)}});o.extend({speed:function(G,H,F){var E=typeof G==="object"?G:{complete:F||!F&&H||o.isFunction(G)&&G,duration:G,easing:F&&H||H&&!o.isFunction(H)&&H};E.duration=o.fx.off?0:typeof E.duration==="number"?E.duration:o.fx.speeds[E.duration]||o.fx.speeds._default;E.old=E.complete;E.complete=function(){if(E.queue!==false){o(this).dequeue()}if(o.isFunction(E.old)){E.old.call(this)}};return E},easing:{linear:function(G,H,E,F){return E+F*G},swing:function(G,H,E,F){return((-Math.cos(G*Math.PI)/2)+0.5)*F+E}},timers:[],fx:function(F,E,G){this.options=E;this.elem=F;this.prop=G;if(!E.orig){E.orig={}}}});o.fx.prototype={update:function(){if(this.options.step){this.options.step.call(this.elem,this.now,this)}(o.fx.step[this.prop]||o.fx.step._default)(this);if((this.prop=="height"||this.prop=="width")&&this.elem.style){this.elem.style.display="block"}},cur:function(F){if(this.elem[this.prop]!=null&&(!this.elem.style||this.elem.style[this.prop]==null)){return this.elem[this.prop]}var E=parseFloat(o.css(this.elem,this.prop,F));return E&&E>-10000?E:parseFloat(o.curCSS(this.elem,this.prop))||0},custom:function(I,H,G){this.startTime=e();this.start=I;this.end=H;this.unit=G||this.unit||"px";this.now=this.start;this.pos=this.state=0;var E=this;function F(J){return E.step(J)}F.elem=this.elem;if(F()&&o.timers.push(F)&&!n){n=setInterval(function(){var K=o.timers;for(var J=0;J<K.length;J++){if(!K[J]()){K.splice(J--,1)}}if(!K.length){clearInterval(n);n=g}},13)}},show:function(){this.options.orig[this.prop]=o.attr(this.elem.style,this.prop);this.options.show=true;this.custom(this.prop=="width"||this.prop=="height"?1:0,this.cur());o(this.elem).show()},hide:function(){this.options.orig[this.prop]=o.attr(this.elem.style,this.prop);this.options.hide=true;this.custom(this.cur(),0)},step:function(H){var G=e();if(H||G>=this.options.duration+this.startTime){this.now=this.end;this.pos=this.state=1;this.update();this.options.curAnim[this.prop]=true;var E=true;for(var F in this.options.curAnim){if(this.options.curAnim[F]!==true){E=false}}if(E){if(this.options.display!=null){this.elem.style.overflow=this.options.overflow;this.elem.style.display=this.options.display;if(o.css(this.elem,"display")=="none"){this.elem.style.display="block"}}if(this.options.hide){o(this.elem).hide()}if(this.options.hide||this.options.show){for(var I in this.options.curAnim){o.attr(this.elem.style,I,this.options.orig[I])}}this.options.complete.call(this.elem)}return false}else{var J=G-this.startTime;this.state=J/this.options.duration;this.pos=o.easing[this.options.easing||(o.easing.swing?"swing":"linear")](this.state,J,0,1,this.options.duration);this.now=this.start+((this.end-this.start)*this.pos);this.update()}return true}};o.extend(o.fx,{speeds:{slow:600,fast:200,_default:400},step:{opacity:function(E){o.attr(E.elem.style,"opacity",E.now)},_default:function(E){if(E.elem.style&&E.elem.style[E.prop]!=null){E.elem.style[E.prop]=E.now+E.unit}else{E.elem[E.prop]=E.now}}}});if(document.documentElement.getBoundingClientRect){o.fn.offset=function(){if(!this[0]){return{top:0,left:0}}if(this[0]===this[0].ownerDocument.body){return o.offset.bodyOffset(this[0])}var G=this[0].getBoundingClientRect(),J=this[0].ownerDocument,F=J.body,E=J.documentElement,L=E.clientTop||F.clientTop||0,K=E.clientLeft||F.clientLeft||0,I=G.top+(self.pageYOffset||o.boxModel&&E.scrollTop||F.scrollTop)-L,H=G.left+(self.pageXOffset||o.boxModel&&E.scrollLeft||F.scrollLeft)-K;return{top:I,left:H}}}else{o.fn.offset=function(){if(!this[0]){return{top:0,left:0}}if(this[0]===this[0].ownerDocument.body){return o.offset.bodyOffset(this[0])}o.offset.initialized||o.offset.initialize();var J=this[0],G=J.offsetParent,F=J,O=J.ownerDocument,M,H=O.documentElement,K=O.body,L=O.defaultView,E=L.getComputedStyle(J,null),N=J.offsetTop,I=J.offsetLeft;while((J=J.parentNode)&&J!==K&&J!==H){M=L.getComputedStyle(J,null);N-=J.scrollTop,I-=J.scrollLeft;if(J===G){N+=J.offsetTop,I+=J.offsetLeft;if(o.offset.doesNotAddBorder&&!(o.offset.doesAddBorderForTableAndCells&&/^t(able|d|h)$/i.test(J.tagName))){N+=parseInt(M.borderTopWidth,10)||0,I+=parseInt(M.borderLeftWidth,10)||0}F=G,G=J.offsetParent}if(o.offset.subtractsBorderForOverflowNotVisible&&M.overflow!=="visible"){N+=parseInt(M.borderTopWidth,10)||0,I+=parseInt(M.borderLeftWidth,10)||0}E=M}if(E.position==="relative"||E.position==="static"){N+=K.offsetTop,I+=K.offsetLeft}if(E.position==="fixed"){N+=Math.max(H.scrollTop,K.scrollTop),I+=Math.max(H.scrollLeft,K.scrollLeft)}return{top:N,left:I}}}o.offset={initialize:function(){if(this.initialized){return}var L=document.body,F=document.createElement("div"),H,G,N,I,M,E,J=L.style.marginTop,K='<div style="position:absolute;top:0;left:0;margin:0;border:5px solid #000;padding:0;width:1px;height:1px;"><div></div></div><table style="position:absolute;top:0;left:0;margin:0;border:5px solid #000;padding:0;width:1px;height:1px;" cellpadding="0" cellspacing="0"><tr><td></td></tr></table>';M={position:"absolute",top:0,left:0,margin:0,border:0,width:"1px",height:"1px",visibility:"hidden"};for(E in M){F.style[E]=M[E]}F.innerHTML=K;L.insertBefore(F,L.firstChild);H=F.firstChild,G=H.firstChild,I=H.nextSibling.firstChild.firstChild;this.doesNotAddBorder=(G.offsetTop!==5);this.doesAddBorderForTableAndCells=(I.offsetTop===5);H.style.overflow="hidden",H.style.position="relative";this.subtractsBorderForOverflowNotVisible=(G.offsetTop===-5);L.style.marginTop="1px";this.doesNotIncludeMarginInBodyOffset=(L.offsetTop===0);L.style.marginTop=J;L.removeChild(F);this.initialized=true},bodyOffset:function(E){o.offset.initialized||o.offset.initialize();var G=E.offsetTop,F=E.offsetLeft;if(o.offset.doesNotIncludeMarginInBodyOffset){G+=parseInt(o.curCSS(E,"marginTop",true),10)||0,F+=parseInt(o.curCSS(E,"marginLeft",true),10)||0}return{top:G,left:F}}};o.fn.extend({position:function(){var I=0,H=0,F;if(this[0]){var G=this.offsetParent(),J=this.offset(),E=/^body|html$/i.test(G[0].tagName)?{top:0,left:0}:G.offset();J.top-=j(this,"marginTop");J.left-=j(this,"marginLeft");E.top+=j(G,"borderTopWidth");E.left+=j(G,"borderLeftWidth");F={top:J.top-E.top,left:J.left-E.left}}return F},offsetParent:function(){var E=this[0].offsetParent||document.body;while(E&&(!/^body|html$/i.test(E.tagName)&&o.css(E,"position")=="static")){E=E.offsetParent}return o(E)}});o.each(["Left","Top"],function(F,E){var G="scroll"+E;o.fn[G]=function(H){if(!this[0]){return null}return H!==g?this.each(function(){this==l||this==document?l.scrollTo(!F?H:o(l).scrollLeft(),F?H:o(l).scrollTop()):this[G]=H}):this[0]==l||this[0]==document?self[F?"pageYOffset":"pageXOffset"]||o.boxModel&&document.documentElement[G]||document.body[G]:this[0][G]}});o.each(["Height","Width"],function(I,G){var E=I?"Left":"Top",H=I?"Right":"Bottom",F=G.toLowerCase();o.fn["inner"+G]=function(){return this[0]?o.css(this[0],F,false,"padding"):null};o.fn["outer"+G]=function(K){return this[0]?o.css(this[0],F,false,K?"margin":"border"):null};var J=G.toLowerCase();o.fn[J]=function(K){return this[0]==l?document.compatMode=="CSS1Compat"&&document.documentElement["client"+G]||document.body["client"+G]:this[0]==document?Math.max(document.documentElement["client"+G],document.body["scroll"+G],document.documentElement["scroll"+G],document.body["offset"+G],document.documentElement["offset"+G]):K===g?(this.length?o.css(this[0],J):null):this.css(J,typeof K==="string"?K:K+"px")}})})();
\ No newline at end of file
+(function(E,B){function ka(a,b,d){if(d===B&&a.nodeType===1){d=a.getAttribute("data-"+b);if(typeof d==="string"){try{d=d==="true"?true:d==="false"?false:d==="null"?null:!c.isNaN(d)?parseFloat(d):Ja.test(d)?c.parseJSON(d):d}catch(e){}c.data(a,b,d)}else d=B}return d}function U(){return false}function ca(){return true}function la(a,b,d){d[0].type=a;return c.event.handle.apply(b,d)}function Ka(a){var b,d,e,f,h,l,k,o,x,r,A,C=[];f=[];h=c.data(this,this.nodeType?"events":"__events__");if(typeof h==="function")h=
+h.events;if(!(a.liveFired===this||!h||!h.live||a.button&&a.type==="click")){if(a.namespace)A=RegExp("(^|\\.)"+a.namespace.split(".").join("\\.(?:.*\\.)?")+"(\\.|$)");a.liveFired=this;var J=h.live.slice(0);for(k=0;k<J.length;k++){h=J[k];h.origType.replace(X,"")===a.type?f.push(h.selector):J.splice(k--,1)}f=c(a.target).closest(f,a.currentTarget);o=0;for(x=f.length;o<x;o++){r=f[o];for(k=0;k<J.length;k++){h=J[k];if(r.selector===h.selector&&(!A||A.test(h.namespace))){l=r.elem;e=null;if(h.preType==="mouseenter"||
+h.preType==="mouseleave"){a.type=h.preType;e=c(a.relatedTarget).closest(h.selector)[0]}if(!e||e!==l)C.push({elem:l,handleObj:h,level:r.level})}}}o=0;for(x=C.length;o<x;o++){f=C[o];if(d&&f.level>d)break;a.currentTarget=f.elem;a.data=f.handleObj.data;a.handleObj=f.handleObj;A=f.handleObj.origHandler.apply(f.elem,arguments);if(A===false||a.isPropagationStopped()){d=f.level;if(A===false)b=false;if(a.isImmediatePropagationStopped())break}}return b}}function Y(a,b){return(a&&a!=="*"?a+".":"")+b.replace(La,
+"`").replace(Ma,"&")}function ma(a,b,d){if(c.isFunction(b))return c.grep(a,function(f,h){return!!b.call(f,h,f)===d});else if(b.nodeType)return c.grep(a,function(f){return f===b===d});else if(typeof b==="string"){var e=c.grep(a,function(f){return f.nodeType===1});if(Na.test(b))return c.filter(b,e,!d);else b=c.filter(b,e)}return c.grep(a,function(f){return c.inArray(f,b)>=0===d})}function na(a,b){var d=0;b.each(function(){if(this.nodeName===(a[d]&&a[d].nodeName)){var e=c.data(a[d++]),f=c.data(this,
+e);if(e=e&&e.events){delete f.handle;f.events={};for(var h in e)for(var l in e[h])c.event.add(this,h,e[h][l],e[h][l].data)}}})}function Oa(a,b){b.src?c.ajax({url:b.src,async:false,dataType:"script"}):c.globalEval(b.text||b.textContent||b.innerHTML||"");b.parentNode&&b.parentNode.removeChild(b)}function oa(a,b,d){var e=b==="width"?a.offsetWidth:a.offsetHeight;if(d==="border")return e;c.each(b==="width"?Pa:Qa,function(){d||(e-=parseFloat(c.css(a,"padding"+this))||0);if(d==="margin")e+=parseFloat(c.css(a,
+"margin"+this))||0;else e-=parseFloat(c.css(a,"border"+this+"Width"))||0});return e}function da(a,b,d,e){if(c.isArray(b)&&b.length)c.each(b,function(f,h){d||Ra.test(a)?e(a,h):da(a+"["+(typeof h==="object"||c.isArray(h)?f:"")+"]",h,d,e)});else if(!d&&b!=null&&typeof b==="object")c.isEmptyObject(b)?e(a,""):c.each(b,function(f,h){da(a+"["+f+"]",h,d,e)});else e(a,b)}function S(a,b){var d={};c.each(pa.concat.apply([],pa.slice(0,b)),function(){d[this]=a});return d}function qa(a){if(!ea[a]){var b=c("<"+
+a+">").appendTo("body"),d=b.css("display");b.remove();if(d==="none"||d==="")d="block";ea[a]=d}return ea[a]}function fa(a){return c.isWindow(a)?a:a.nodeType===9?a.defaultView||a.parentWindow:false}var t=E.document,c=function(){function a(){if(!b.isReady){try{t.documentElement.doScroll("left")}catch(j){setTimeout(a,1);return}b.ready()}}var b=function(j,s){return new b.fn.init(j,s)},d=E.jQuery,e=E.$,f,h=/^(?:[^<]*(<[\w\W]+>)[^>]*$|#([\w\-]+)$)/,l=/\S/,k=/^\s+/,o=/\s+$/,x=/\W/,r=/\d/,A=/^<(\w+)\s*\/?>(?:<\/\1>)?$/,
+C=/^[\],:{}\s]*$/,J=/\\(?:["\\\/bfnrt]|u[0-9a-fA-F]{4})/g,w=/"[^"\\\n\r]*"|true|false|null|-?\d+(?:\.\d*)?(?:[eE][+\-]?\d+)?/g,I=/(?:^|:|,)(?:\s*\[)+/g,L=/(webkit)[ \/]([\w.]+)/,g=/(opera)(?:.*version)?[ \/]([\w.]+)/,i=/(msie) ([\w.]+)/,n=/(mozilla)(?:.*? rv:([\w.]+))?/,m=navigator.userAgent,p=false,q=[],u,y=Object.prototype.toString,F=Object.prototype.hasOwnProperty,M=Array.prototype.push,N=Array.prototype.slice,O=String.prototype.trim,D=Array.prototype.indexOf,R={};b.fn=b.prototype={init:function(j,
+s){var v,z,H;if(!j)return this;if(j.nodeType){this.context=this[0]=j;this.length=1;return this}if(j==="body"&&!s&&t.body){this.context=t;this[0]=t.body;this.selector="body";this.length=1;return this}if(typeof j==="string")if((v=h.exec(j))&&(v[1]||!s))if(v[1]){H=s?s.ownerDocument||s:t;if(z=A.exec(j))if(b.isPlainObject(s)){j=[t.createElement(z[1])];b.fn.attr.call(j,s,true)}else j=[H.createElement(z[1])];else{z=b.buildFragment([v[1]],[H]);j=(z.cacheable?z.fragment.cloneNode(true):z.fragment).childNodes}return b.merge(this,
+j)}else{if((z=t.getElementById(v[2]))&&z.parentNode){if(z.id!==v[2])return f.find(j);this.length=1;this[0]=z}this.context=t;this.selector=j;return this}else if(!s&&!x.test(j)){this.selector=j;this.context=t;j=t.getElementsByTagName(j);return b.merge(this,j)}else return!s||s.jquery?(s||f).find(j):b(s).find(j);else if(b.isFunction(j))return f.ready(j);if(j.selector!==B){this.selector=j.selector;this.context=j.context}return b.makeArray(j,this)},selector:"",jquery:"1.4.4",length:0,size:function(){return this.length},
+toArray:function(){return N.call(this,0)},get:function(j){return j==null?this.toArray():j<0?this.slice(j)[0]:this[j]},pushStack:function(j,s,v){var z=b();b.isArray(j)?M.apply(z,j):b.merge(z,j);z.prevObject=this;z.context=this.context;if(s==="find")z.selector=this.selector+(this.selector?" ":"")+v;else if(s)z.selector=this.selector+"."+s+"("+v+")";return z},each:function(j,s){return b.each(this,j,s)},ready:function(j){b.bindReady();if(b.isReady)j.call(t,b);else q&&q.push(j);return this},eq:function(j){return j===
+-1?this.slice(j):this.slice(j,+j+1)},first:function(){return this.eq(0)},last:function(){return this.eq(-1)},slice:function(){return this.pushStack(N.apply(this,arguments),"slice",N.call(arguments).join(","))},map:function(j){return this.pushStack(b.map(this,function(s,v){return j.call(s,v,s)}))},end:function(){return this.prevObject||b(null)},push:M,sort:[].sort,splice:[].splice};b.fn.init.prototype=b.fn;b.extend=b.fn.extend=function(){var j,s,v,z,H,G=arguments[0]||{},K=1,Q=arguments.length,ga=false;
+if(typeof G==="boolean"){ga=G;G=arguments[1]||{};K=2}if(typeof G!=="object"&&!b.isFunction(G))G={};if(Q===K){G=this;--K}for(;K<Q;K++)if((j=arguments[K])!=null)for(s in j){v=G[s];z=j[s];if(G!==z)if(ga&&z&&(b.isPlainObject(z)||(H=b.isArray(z)))){if(H){H=false;v=v&&b.isArray(v)?v:[]}else v=v&&b.isPlainObject(v)?v:{};G[s]=b.extend(ga,v,z)}else if(z!==B)G[s]=z}return G};b.extend({noConflict:function(j){E.$=e;if(j)E.jQuery=d;return b},isReady:false,readyWait:1,ready:function(j){j===true&&b.readyWait--;
+if(!b.readyWait||j!==true&&!b.isReady){if(!t.body)return setTimeout(b.ready,1);b.isReady=true;if(!(j!==true&&--b.readyWait>0))if(q){var s=0,v=q;for(q=null;j=v[s++];)j.call(t,b);b.fn.trigger&&b(t).trigger("ready").unbind("ready")}}},bindReady:function(){if(!p){p=true;if(t.readyState==="complete")return setTimeout(b.ready,1);if(t.addEventListener){t.addEventListener("DOMContentLoaded",u,false);E.addEventListener("load",b.ready,false)}else if(t.attachEvent){t.attachEvent("onreadystatechange",u);E.attachEvent("onload",
+b.ready);var j=false;try{j=E.frameElement==null}catch(s){}t.documentElement.doScroll&&j&&a()}}},isFunction:function(j){return b.type(j)==="function"},isArray:Array.isArray||function(j){return b.type(j)==="array"},isWindow:function(j){return j&&typeof j==="object"&&"setInterval"in j},isNaN:function(j){return j==null||!r.test(j)||isNaN(j)},type:function(j){return j==null?String(j):R[y.call(j)]||"object"},isPlainObject:function(j){if(!j||b.type(j)!=="object"||j.nodeType||b.isWindow(j))return false;if(j.constructor&&
+!F.call(j,"constructor")&&!F.call(j.constructor.prototype,"isPrototypeOf"))return false;for(var s in j);return s===B||F.call(j,s)},isEmptyObject:function(j){for(var s in j)return false;return true},error:function(j){throw j;},parseJSON:function(j){if(typeof j!=="string"||!j)return null;j=b.trim(j);if(C.test(j.replace(J,"@").replace(w,"]").replace(I,"")))return E.JSON&&E.JSON.parse?E.JSON.parse(j):(new Function("return "+j))();else b.error("Invalid JSON: "+j)},noop:function(){},globalEval:function(j){if(j&&
+l.test(j)){var s=t.getElementsByTagName("head")[0]||t.documentElement,v=t.createElement("script");v.type="text/javascript";if(b.support.scriptEval)v.appendChild(t.createTextNode(j));else v.text=j;s.insertBefore(v,s.firstChild);s.removeChild(v)}},nodeName:function(j,s){return j.nodeName&&j.nodeName.toUpperCase()===s.toUpperCase()},each:function(j,s,v){var z,H=0,G=j.length,K=G===B||b.isFunction(j);if(v)if(K)for(z in j){if(s.apply(j[z],v)===false)break}else for(;H<G;){if(s.apply(j[H++],v)===false)break}else if(K)for(z in j){if(s.call(j[z],
+z,j[z])===false)break}else for(v=j[0];H<G&&s.call(v,H,v)!==false;v=j[++H]);return j},trim:O?function(j){return j==null?"":O.call(j)}:function(j){return j==null?"":j.toString().replace(k,"").replace(o,"")},makeArray:function(j,s){var v=s||[];if(j!=null){var z=b.type(j);j.length==null||z==="string"||z==="function"||z==="regexp"||b.isWindow(j)?M.call(v,j):b.merge(v,j)}return v},inArray:function(j,s){if(s.indexOf)return s.indexOf(j);for(var v=0,z=s.length;v<z;v++)if(s[v]===j)return v;return-1},merge:function(j,
+s){var v=j.length,z=0;if(typeof s.length==="number")for(var H=s.length;z<H;z++)j[v++]=s[z];else for(;s[z]!==B;)j[v++]=s[z++];j.length=v;return j},grep:function(j,s,v){var z=[],H;v=!!v;for(var G=0,K=j.length;G<K;G++){H=!!s(j[G],G);v!==H&&z.push(j[G])}return z},map:function(j,s,v){for(var z=[],H,G=0,K=j.length;G<K;G++){H=s(j[G],G,v);if(H!=null)z[z.length]=H}return z.concat.apply([],z)},guid:1,proxy:function(j,s,v){if(arguments.length===2)if(typeof s==="string"){v=j;j=v[s];s=B}else if(s&&!b.isFunction(s)){v=
+s;s=B}if(!s&&j)s=function(){return j.apply(v||this,arguments)};if(j)s.guid=j.guid=j.guid||s.guid||b.guid++;return s},access:function(j,s,v,z,H,G){var K=j.length;if(typeof s==="object"){for(var Q in s)b.access(j,Q,s[Q],z,H,v);return j}if(v!==B){z=!G&&z&&b.isFunction(v);for(Q=0;Q<K;Q++)H(j[Q],s,z?v.call(j[Q],Q,H(j[Q],s)):v,G);return j}return K?H(j[0],s):B},now:function(){return(new Date).getTime()},uaMatch:function(j){j=j.toLowerCase();j=L.exec(j)||g.exec(j)||i.exec(j)||j.indexOf("compatible")<0&&n.exec(j)||
+[];return{browser:j[1]||"",version:j[2]||"0"}},browser:{}});b.each("Boolean Number String Function Array Date RegExp Object".split(" "),function(j,s){R["[object "+s+"]"]=s.toLowerCase()});m=b.uaMatch(m);if(m.browser){b.browser[m.browser]=true;b.browser.version=m.version}if(b.browser.webkit)b.browser.safari=true;if(D)b.inArray=function(j,s){return D.call(s,j)};if(!/\s/.test("\u00a0")){k=/^[\s\xA0]+/;o=/[\s\xA0]+$/}f=b(t);if(t.addEventListener)u=function(){t.removeEventListener("DOMContentLoaded",u,
+false);b.ready()};else if(t.attachEvent)u=function(){if(t.readyState==="complete"){t.detachEvent("onreadystatechange",u);b.ready()}};return E.jQuery=E.$=b}();(function(){c.support={};var a=t.documentElement,b=t.createElement("script"),d=t.createElement("div"),e="script"+c.now();d.style.display="none";d.innerHTML="   <link/><table></table><a href='/a' style='color:red;float:left;opacity:.55;'>a</a><input type='checkbox'/>";var f=d.getElementsByTagName("*"),h=d.getElementsByTagName("a")[0],l=t.createElement("select"),
+k=l.appendChild(t.createElement("option"));if(!(!f||!f.length||!h)){c.support={leadingWhitespace:d.firstChild.nodeType===3,tbody:!d.getElementsByTagName("tbody").length,htmlSerialize:!!d.getElementsByTagName("link").length,style:/red/.test(h.getAttribute("style")),hrefNormalized:h.getAttribute("href")==="/a",opacity:/^0.55$/.test(h.style.opacity),cssFloat:!!h.style.cssFloat,checkOn:d.getElementsByTagName("input")[0].value==="on",optSelected:k.selected,deleteExpando:true,optDisabled:false,checkClone:false,
+scriptEval:false,noCloneEvent:true,boxModel:null,inlineBlockNeedsLayout:false,shrinkWrapBlocks:false,reliableHiddenOffsets:true};l.disabled=true;c.support.optDisabled=!k.disabled;b.type="text/javascript";try{b.appendChild(t.createTextNode("window."+e+"=1;"))}catch(o){}a.insertBefore(b,a.firstChild);if(E[e]){c.support.scriptEval=true;delete E[e]}try{delete b.test}catch(x){c.support.deleteExpando=false}a.removeChild(b);if(d.attachEvent&&d.fireEvent){d.attachEvent("onclick",function r(){c.support.noCloneEvent=
+false;d.detachEvent("onclick",r)});d.cloneNode(true).fireEvent("onclick")}d=t.createElement("div");d.innerHTML="<input type='radio' name='radiotest' checked='checked'/>";a=t.createDocumentFragment();a.appendChild(d.firstChild);c.support.checkClone=a.cloneNode(true).cloneNode(true).lastChild.checked;c(function(){var r=t.createElement("div");r.style.width=r.style.paddingLeft="1px";t.body.appendChild(r);c.boxModel=c.support.boxModel=r.offsetWidth===2;if("zoom"in r.style){r.style.display="inline";r.style.zoom=
+1;c.support.inlineBlockNeedsLayout=r.offsetWidth===2;r.style.display="";r.innerHTML="<div style='width:4px;'></div>";c.support.shrinkWrapBlocks=r.offsetWidth!==2}r.innerHTML="<table><tr><td style='padding:0;display:none'></td><td>t</td></tr></table>";var A=r.getElementsByTagName("td");c.support.reliableHiddenOffsets=A[0].offsetHeight===0;A[0].style.display="";A[1].style.display="none";c.support.reliableHiddenOffsets=c.support.reliableHiddenOffsets&&A[0].offsetHeight===0;r.innerHTML="";t.body.removeChild(r).style.display=
+"none"});a=function(r){var A=t.createElement("div");r="on"+r;var C=r in A;if(!C){A.setAttribute(r,"return;");C=typeof A[r]==="function"}return C};c.support.submitBubbles=a("submit");c.support.changeBubbles=a("change");a=b=d=f=h=null}})();var ra={},Ja=/^(?:\{.*\}|\[.*\])$/;c.extend({cache:{},uuid:0,expando:"jQuery"+c.now(),noData:{embed:true,object:"clsid:D27CDB6E-AE6D-11cf-96B8-444553540000",applet:true},data:function(a,b,d){if(c.acceptData(a)){a=a==E?ra:a;var e=a.nodeType,f=e?a[c.expando]:null,h=
+c.cache;if(!(e&&!f&&typeof b==="string"&&d===B)){if(e)f||(a[c.expando]=f=++c.uuid);else h=a;if(typeof b==="object")if(e)h[f]=c.extend(h[f],b);else c.extend(h,b);else if(e&&!h[f])h[f]={};a=e?h[f]:h;if(d!==B)a[b]=d;return typeof b==="string"?a[b]:a}}},removeData:function(a,b){if(c.acceptData(a)){a=a==E?ra:a;var d=a.nodeType,e=d?a[c.expando]:a,f=c.cache,h=d?f[e]:e;if(b){if(h){delete h[b];d&&c.isEmptyObject(h)&&c.removeData(a)}}else if(d&&c.support.deleteExpando)delete a[c.expando];else if(a.removeAttribute)a.removeAttribute(c.expando);
+else if(d)delete f[e];else for(var l in a)delete a[l]}},acceptData:function(a){if(a.nodeName){var b=c.noData[a.nodeName.toLowerCase()];if(b)return!(b===true||a.getAttribute("classid")!==b)}return true}});c.fn.extend({data:function(a,b){var d=null;if(typeof a==="undefined"){if(this.length){var e=this[0].attributes,f;d=c.data(this[0]);for(var h=0,l=e.length;h<l;h++){f=e[h].name;if(f.indexOf("data-")===0){f=f.substr(5);ka(this[0],f,d[f])}}}return d}else if(typeof a==="object")return this.each(function(){c.data(this,
+a)});var k=a.split(".");k[1]=k[1]?"."+k[1]:"";if(b===B){d=this.triggerHandler("getData"+k[1]+"!",[k[0]]);if(d===B&&this.length){d=c.data(this[0],a);d=ka(this[0],a,d)}return d===B&&k[1]?this.data(k[0]):d}else return this.each(function(){var o=c(this),x=[k[0],b];o.triggerHandler("setData"+k[1]+"!",x);c.data(this,a,b);o.triggerHandler("changeData"+k[1]+"!",x)})},removeData:function(a){return this.each(function(){c.removeData(this,a)})}});c.extend({queue:function(a,b,d){if(a){b=(b||"fx")+"queue";var e=
+c.data(a,b);if(!d)return e||[];if(!e||c.isArray(d))e=c.data(a,b,c.makeArray(d));else e.push(d);return e}},dequeue:function(a,b){b=b||"fx";var d=c.queue(a,b),e=d.shift();if(e==="inprogress")e=d.shift();if(e){b==="fx"&&d.unshift("inprogress");e.call(a,function(){c.dequeue(a,b)})}}});c.fn.extend({queue:function(a,b){if(typeof a!=="string"){b=a;a="fx"}if(b===B)return c.queue(this[0],a);return this.each(function(){var d=c.queue(this,a,b);a==="fx"&&d[0]!=="inprogress"&&c.dequeue(this,a)})},dequeue:function(a){return this.each(function(){c.dequeue(this,
+a)})},delay:function(a,b){a=c.fx?c.fx.speeds[a]||a:a;b=b||"fx";return this.queue(b,function(){var d=this;setTimeout(function(){c.dequeue(d,b)},a)})},clearQueue:function(a){return this.queue(a||"fx",[])}});var sa=/[\n\t]/g,ha=/\s+/,Sa=/\r/g,Ta=/^(?:href|src|style)$/,Ua=/^(?:button|input)$/i,Va=/^(?:button|input|object|select|textarea)$/i,Wa=/^a(?:rea)?$/i,ta=/^(?:radio|checkbox)$/i;c.props={"for":"htmlFor","class":"className",readonly:"readOnly",maxlength:"maxLength",cellspacing:"cellSpacing",rowspan:"rowSpan",
+colspan:"colSpan",tabindex:"tabIndex",usemap:"useMap",frameborder:"frameBorder"};c.fn.extend({attr:function(a,b){return c.access(this,a,b,true,c.attr)},removeAttr:function(a){return this.each(function(){c.attr(this,a,"");this.nodeType===1&&this.removeAttribute(a)})},addClass:function(a){if(c.isFunction(a))return this.each(function(x){var r=c(this);r.addClass(a.call(this,x,r.attr("class")))});if(a&&typeof a==="string")for(var b=(a||"").split(ha),d=0,e=this.length;d<e;d++){var f=this[d];if(f.nodeType===
+1)if(f.className){for(var h=" "+f.className+" ",l=f.className,k=0,o=b.length;k<o;k++)if(h.indexOf(" "+b[k]+" ")<0)l+=" "+b[k];f.className=c.trim(l)}else f.className=a}return this},removeClass:function(a){if(c.isFunction(a))return this.each(function(o){var x=c(this);x.removeClass(a.call(this,o,x.attr("class")))});if(a&&typeof a==="string"||a===B)for(var b=(a||"").split(ha),d=0,e=this.length;d<e;d++){var f=this[d];if(f.nodeType===1&&f.className)if(a){for(var h=(" "+f.className+" ").replace(sa," "),
+l=0,k=b.length;l<k;l++)h=h.replace(" "+b[l]+" "," ");f.className=c.trim(h)}else f.className=""}return this},toggleClass:function(a,b){var d=typeof a,e=typeof b==="boolean";if(c.isFunction(a))return this.each(function(f){var h=c(this);h.toggleClass(a.call(this,f,h.attr("class"),b),b)});return this.each(function(){if(d==="string")for(var f,h=0,l=c(this),k=b,o=a.split(ha);f=o[h++];){k=e?k:!l.hasClass(f);l[k?"addClass":"removeClass"](f)}else if(d==="undefined"||d==="boolean"){this.className&&c.data(this,
+"__className__",this.className);this.className=this.className||a===false?"":c.data(this,"__className__")||""}})},hasClass:function(a){a=" "+a+" ";for(var b=0,d=this.length;b<d;b++)if((" "+this[b].className+" ").replace(sa," ").indexOf(a)>-1)return true;return false},val:function(a){if(!arguments.length){var b=this[0];if(b){if(c.nodeName(b,"option")){var d=b.attributes.value;return!d||d.specified?b.value:b.text}if(c.nodeName(b,"select")){var e=b.selectedIndex;d=[];var f=b.options;b=b.type==="select-one";
+if(e<0)return null;var h=b?e:0;for(e=b?e+1:f.length;h<e;h++){var l=f[h];if(l.selected&&(c.support.optDisabled?!l.disabled:l.getAttribute("disabled")===null)&&(!l.parentNode.disabled||!c.nodeName(l.parentNode,"optgroup"))){a=c(l).val();if(b)return a;d.push(a)}}return d}if(ta.test(b.type)&&!c.support.checkOn)return b.getAttribute("value")===null?"on":b.value;return(b.value||"").replace(Sa,"")}return B}var k=c.isFunction(a);return this.each(function(o){var x=c(this),r=a;if(this.nodeType===1){if(k)r=
+a.call(this,o,x.val());if(r==null)r="";else if(typeof r==="number")r+="";else if(c.isArray(r))r=c.map(r,function(C){return C==null?"":C+""});if(c.isArray(r)&&ta.test(this.type))this.checked=c.inArray(x.val(),r)>=0;else if(c.nodeName(this,"select")){var A=c.makeArray(r);c("option",this).each(function(){this.selected=c.inArray(c(this).val(),A)>=0});if(!A.length)this.selectedIndex=-1}else this.value=r}})}});c.extend({attrFn:{val:true,css:true,html:true,text:true,data:true,width:true,height:true,offset:true},
+attr:function(a,b,d,e){if(!a||a.nodeType===3||a.nodeType===8)return B;if(e&&b in c.attrFn)return c(a)[b](d);e=a.nodeType!==1||!c.isXMLDoc(a);var f=d!==B;b=e&&c.props[b]||b;var h=Ta.test(b);if((b in a||a[b]!==B)&&e&&!h){if(f){b==="type"&&Ua.test(a.nodeName)&&a.parentNode&&c.error("type property can't be changed");if(d===null)a.nodeType===1&&a.removeAttribute(b);else a[b]=d}if(c.nodeName(a,"form")&&a.getAttributeNode(b))return a.getAttributeNode(b).nodeValue;if(b==="tabIndex")return(b=a.getAttributeNode("tabIndex"))&&
+b.specified?b.value:Va.test(a.nodeName)||Wa.test(a.nodeName)&&a.href?0:B;return a[b]}if(!c.support.style&&e&&b==="style"){if(f)a.style.cssText=""+d;return a.style.cssText}f&&a.setAttribute(b,""+d);if(!a.attributes[b]&&a.hasAttribute&&!a.hasAttribute(b))return B;a=!c.support.hrefNormalized&&e&&h?a.getAttribute(b,2):a.getAttribute(b);return a===null?B:a}});var X=/\.(.*)$/,ia=/^(?:textarea|input|select)$/i,La=/\./g,Ma=/ /g,Xa=/[^\w\s.|`]/g,Ya=function(a){return a.replace(Xa,"\\$&")},ua={focusin:0,focusout:0};
+c.event={add:function(a,b,d,e){if(!(a.nodeType===3||a.nodeType===8)){if(c.isWindow(a)&&a!==E&&!a.frameElement)a=E;if(d===false)d=U;else if(!d)return;var f,h;if(d.handler){f=d;d=f.handler}if(!d.guid)d.guid=c.guid++;if(h=c.data(a)){var l=a.nodeType?"events":"__events__",k=h[l],o=h.handle;if(typeof k==="function"){o=k.handle;k=k.events}else if(!k){a.nodeType||(h[l]=h=function(){});h.events=k={}}if(!o)h.handle=o=function(){return typeof c!=="undefined"&&!c.event.triggered?c.event.handle.apply(o.elem,
+arguments):B};o.elem=a;b=b.split(" ");for(var x=0,r;l=b[x++];){h=f?c.extend({},f):{handler:d,data:e};if(l.indexOf(".")>-1){r=l.split(".");l=r.shift();h.namespace=r.slice(0).sort().join(".")}else{r=[];h.namespace=""}h.type=l;if(!h.guid)h.guid=d.guid;var A=k[l],C=c.event.special[l]||{};if(!A){A=k[l]=[];if(!C.setup||C.setup.call(a,e,r,o)===false)if(a.addEventListener)a.addEventListener(l,o,false);else a.attachEvent&&a.attachEvent("on"+l,o)}if(C.add){C.add.call(a,h);if(!h.handler.guid)h.handler.guid=
+d.guid}A.push(h);c.event.global[l]=true}a=null}}},global:{},remove:function(a,b,d,e){if(!(a.nodeType===3||a.nodeType===8)){if(d===false)d=U;var f,h,l=0,k,o,x,r,A,C,J=a.nodeType?"events":"__events__",w=c.data(a),I=w&&w[J];if(w&&I){if(typeof I==="function"){w=I;I=I.events}if(b&&b.type){d=b.handler;b=b.type}if(!b||typeof b==="string"&&b.charAt(0)==="."){b=b||"";for(f in I)c.event.remove(a,f+b)}else{for(b=b.split(" ");f=b[l++];){r=f;k=f.indexOf(".")<0;o=[];if(!k){o=f.split(".");f=o.shift();x=RegExp("(^|\\.)"+
+c.map(o.slice(0).sort(),Ya).join("\\.(?:.*\\.)?")+"(\\.|$)")}if(A=I[f])if(d){r=c.event.special[f]||{};for(h=e||0;h<A.length;h++){C=A[h];if(d.guid===C.guid){if(k||x.test(C.namespace)){e==null&&A.splice(h--,1);r.remove&&r.remove.call(a,C)}if(e!=null)break}}if(A.length===0||e!=null&&A.length===1){if(!r.teardown||r.teardown.call(a,o)===false)c.removeEvent(a,f,w.handle);delete I[f]}}else for(h=0;h<A.length;h++){C=A[h];if(k||x.test(C.namespace)){c.event.remove(a,r,C.handler,h);A.splice(h--,1)}}}if(c.isEmptyObject(I)){if(b=
+w.handle)b.elem=null;delete w.events;delete w.handle;if(typeof w==="function")c.removeData(a,J);else c.isEmptyObject(w)&&c.removeData(a)}}}}},trigger:function(a,b,d,e){var f=a.type||a;if(!e){a=typeof a==="object"?a[c.expando]?a:c.extend(c.Event(f),a):c.Event(f);if(f.indexOf("!")>=0){a.type=f=f.slice(0,-1);a.exclusive=true}if(!d){a.stopPropagation();c.event.global[f]&&c.each(c.cache,function(){this.events&&this.events[f]&&c.event.trigger(a,b,this.handle.elem)})}if(!d||d.nodeType===3||d.nodeType===
+8)return B;a.result=B;a.target=d;b=c.makeArray(b);b.unshift(a)}a.currentTarget=d;(e=d.nodeType?c.data(d,"handle"):(c.data(d,"__events__")||{}).handle)&&e.apply(d,b);e=d.parentNode||d.ownerDocument;try{if(!(d&&d.nodeName&&c.noData[d.nodeName.toLowerCase()]))if(d["on"+f]&&d["on"+f].apply(d,b)===false){a.result=false;a.preventDefault()}}catch(h){}if(!a.isPropagationStopped()&&e)c.event.trigger(a,b,e,true);else if(!a.isDefaultPrevented()){var l;e=a.target;var k=f.replace(X,""),o=c.nodeName(e,"a")&&k===
+"click",x=c.event.special[k]||{};if((!x._default||x._default.call(d,a)===false)&&!o&&!(e&&e.nodeName&&c.noData[e.nodeName.toLowerCase()])){try{if(e[k]){if(l=e["on"+k])e["on"+k]=null;c.event.triggered=true;e[k]()}}catch(r){}if(l)e["on"+k]=l;c.event.triggered=false}}},handle:function(a){var b,d,e,f;d=[];var h=c.makeArray(arguments);a=h[0]=c.event.fix(a||E.event);a.currentTarget=this;b=a.type.indexOf(".")<0&&!a.exclusive;if(!b){e=a.type.split(".");a.type=e.shift();d=e.slice(0).sort();e=RegExp("(^|\\.)"+
+d.join("\\.(?:.*\\.)?")+"(\\.|$)")}a.namespace=a.namespace||d.join(".");f=c.data(this,this.nodeType?"events":"__events__");if(typeof f==="function")f=f.events;d=(f||{})[a.type];if(f&&d){d=d.slice(0);f=0;for(var l=d.length;f<l;f++){var k=d[f];if(b||e.test(k.namespace)){a.handler=k.handler;a.data=k.data;a.handleObj=k;k=k.handler.apply(this,h);if(k!==B){a.result=k;if(k===false){a.preventDefault();a.stopPropagation()}}if(a.isImmediatePropagationStopped())break}}}return a.result},props:"altKey attrChange attrName bubbles button cancelable charCode clientX clientY ctrlKey currentTarget data detail eventPhase fromElement handler keyCode layerX layerY metaKey newValue offsetX offsetY pageX pageY prevValue relatedNode relatedTarget screenX screenY shiftKey srcElement target toElement view wheelDelta which".split(" "),
+fix:function(a){if(a[c.expando])return a;var b=a;a=c.Event(b);for(var d=this.props.length,e;d;){e=this.props[--d];a[e]=b[e]}if(!a.target)a.target=a.srcElement||t;if(a.target.nodeType===3)a.target=a.target.parentNode;if(!a.relatedTarget&&a.fromElement)a.relatedTarget=a.fromElement===a.target?a.toElement:a.fromElement;if(a.pageX==null&&a.clientX!=null){b=t.documentElement;d=t.body;a.pageX=a.clientX+(b&&b.scrollLeft||d&&d.scrollLeft||0)-(b&&b.clientLeft||d&&d.clientLeft||0);a.pageY=a.clientY+(b&&b.scrollTop||
+d&&d.scrollTop||0)-(b&&b.clientTop||d&&d.clientTop||0)}if(a.which==null&&(a.charCode!=null||a.keyCode!=null))a.which=a.charCode!=null?a.charCode:a.keyCode;if(!a.metaKey&&a.ctrlKey)a.metaKey=a.ctrlKey;if(!a.which&&a.button!==B)a.which=a.button&1?1:a.button&2?3:a.button&4?2:0;return a},guid:1E8,proxy:c.proxy,special:{ready:{setup:c.bindReady,teardown:c.noop},live:{add:function(a){c.event.add(this,Y(a.origType,a.selector),c.extend({},a,{handler:Ka,guid:a.handler.guid}))},remove:function(a){c.event.remove(this,
+Y(a.origType,a.selector),a)}},beforeunload:{setup:function(a,b,d){if(c.isWindow(this))this.onbeforeunload=d},teardown:function(a,b){if(this.onbeforeunload===b)this.onbeforeunload=null}}}};c.removeEvent=t.removeEventListener?function(a,b,d){a.removeEventListener&&a.removeEventListener(b,d,false)}:function(a,b,d){a.detachEvent&&a.detachEvent("on"+b,d)};c.Event=function(a){if(!this.preventDefault)return new c.Event(a);if(a&&a.type){this.originalEvent=a;this.type=a.type}else this.type=a;this.timeStamp=
+c.now();this[c.expando]=true};c.Event.prototype={preventDefault:function(){this.isDefaultPrevented=ca;var a=this.originalEvent;if(a)if(a.preventDefault)a.preventDefault();else a.returnValue=false},stopPropagation:function(){this.isPropagationStopped=ca;var a=this.originalEvent;if(a){a.stopPropagation&&a.stopPropagation();a.cancelBubble=true}},stopImmediatePropagation:function(){this.isImmediatePropagationStopped=ca;this.stopPropagation()},isDefaultPrevented:U,isPropagationStopped:U,isImmediatePropagationStopped:U};
+var va=function(a){var b=a.relatedTarget;try{for(;b&&b!==this;)b=b.parentNode;if(b!==this){a.type=a.data;c.event.handle.apply(this,arguments)}}catch(d){}},wa=function(a){a.type=a.data;c.event.handle.apply(this,arguments)};c.each({mouseenter:"mouseover",mouseleave:"mouseout"},function(a,b){c.event.special[a]={setup:function(d){c.event.add(this,b,d&&d.selector?wa:va,a)},teardown:function(d){c.event.remove(this,b,d&&d.selector?wa:va)}}});if(!c.support.submitBubbles)c.event.special.submit={setup:function(){if(this.nodeName.toLowerCase()!==
+"form"){c.event.add(this,"click.specialSubmit",function(a){var b=a.target,d=b.type;if((d==="submit"||d==="image")&&c(b).closest("form").length){a.liveFired=B;return la("submit",this,arguments)}});c.event.add(this,"keypress.specialSubmit",function(a){var b=a.target,d=b.type;if((d==="text"||d==="password")&&c(b).closest("form").length&&a.keyCode===13){a.liveFired=B;return la("submit",this,arguments)}})}else return false},teardown:function(){c.event.remove(this,".specialSubmit")}};if(!c.support.changeBubbles){var V,
+xa=function(a){var b=a.type,d=a.value;if(b==="radio"||b==="checkbox")d=a.checked;else if(b==="select-multiple")d=a.selectedIndex>-1?c.map(a.options,function(e){return e.selected}).join("-"):"";else if(a.nodeName.toLowerCase()==="select")d=a.selectedIndex;return d},Z=function(a,b){var d=a.target,e,f;if(!(!ia.test(d.nodeName)||d.readOnly)){e=c.data(d,"_change_data");f=xa(d);if(a.type!=="focusout"||d.type!=="radio")c.data(d,"_change_data",f);if(!(e===B||f===e))if(e!=null||f){a.type="change";a.liveFired=
+B;return c.event.trigger(a,b,d)}}};c.event.special.change={filters:{focusout:Z,beforedeactivate:Z,click:function(a){var b=a.target,d=b.type;if(d==="radio"||d==="checkbox"||b.nodeName.toLowerCase()==="select")return Z.call(this,a)},keydown:function(a){var b=a.target,d=b.type;if(a.keyCode===13&&b.nodeName.toLowerCase()!=="textarea"||a.keyCode===32&&(d==="checkbox"||d==="radio")||d==="select-multiple")return Z.call(this,a)},beforeactivate:function(a){a=a.target;c.data(a,"_change_data",xa(a))}},setup:function(){if(this.type===
+"file")return false;for(var a in V)c.event.add(this,a+".specialChange",V[a]);return ia.test(this.nodeName)},teardown:function(){c.event.remove(this,".specialChange");return ia.test(this.nodeName)}};V=c.event.special.change.filters;V.focus=V.beforeactivate}t.addEventListener&&c.each({focus:"focusin",blur:"focusout"},function(a,b){function d(e){e=c.event.fix(e);e.type=b;return c.event.trigger(e,null,e.target)}c.event.special[b]={setup:function(){ua[b]++===0&&t.addEventListener(a,d,true)},teardown:function(){--ua[b]===
+0&&t.removeEventListener(a,d,true)}}});c.each(["bind","one"],function(a,b){c.fn[b]=function(d,e,f){if(typeof d==="object"){for(var h in d)this[b](h,e,d[h],f);return this}if(c.isFunction(e)||e===false){f=e;e=B}var l=b==="one"?c.proxy(f,function(o){c(this).unbind(o,l);return f.apply(this,arguments)}):f;if(d==="unload"&&b!=="one")this.one(d,e,f);else{h=0;for(var k=this.length;h<k;h++)c.event.add(this[h],d,l,e)}return this}});c.fn.extend({unbind:function(a,b){if(typeof a==="object"&&!a.preventDefault)for(var d in a)this.unbind(d,
+a[d]);else{d=0;for(var e=this.length;d<e;d++)c.event.remove(this[d],a,b)}return this},delegate:function(a,b,d,e){return this.live(b,d,e,a)},undelegate:function(a,b,d){return arguments.length===0?this.unbind("live"):this.die(b,null,d,a)},trigger:function(a,b){return this.each(function(){c.event.trigger(a,b,this)})},triggerHandler:function(a,b){if(this[0]){var d=c.Event(a);d.preventDefault();d.stopPropagation();c.event.trigger(d,b,this[0]);return d.result}},toggle:function(a){for(var b=arguments,d=
+1;d<b.length;)c.proxy(a,b[d++]);return this.click(c.proxy(a,function(e){var f=(c.data(this,"lastToggle"+a.guid)||0)%d;c.data(this,"lastToggle"+a.guid,f+1);e.preventDefault();return b[f].apply(this,arguments)||false}))},hover:function(a,b){return this.mouseenter(a).mouseleave(b||a)}});var ya={focus:"focusin",blur:"focusout",mouseenter:"mouseover",mouseleave:"mouseout"};c.each(["live","die"],function(a,b){c.fn[b]=function(d,e,f,h){var l,k=0,o,x,r=h||this.selector;h=h?this:c(this.context);if(typeof d===
+"object"&&!d.preventDefault){for(l in d)h[b](l,e,d[l],r);return this}if(c.isFunction(e)){f=e;e=B}for(d=(d||"").split(" ");(l=d[k++])!=null;){o=X.exec(l);x="";if(o){x=o[0];l=l.replace(X,"")}if(l==="hover")d.push("mouseenter"+x,"mouseleave"+x);else{o=l;if(l==="focus"||l==="blur"){d.push(ya[l]+x);l+=x}else l=(ya[l]||l)+x;if(b==="live"){x=0;for(var A=h.length;x<A;x++)c.event.add(h[x],"live."+Y(l,r),{data:e,selector:r,handler:f,origType:l,origHandler:f,preType:o})}else h.unbind("live."+Y(l,r),f)}}return this}});
+c.each("blur focus focusin focusout load resize scroll unload click dblclick mousedown mouseup mousemove mouseover mouseout mouseenter mouseleave change select submit keydown keypress keyup error".split(" "),function(a,b){c.fn[b]=function(d,e){if(e==null){e=d;d=null}return arguments.length>0?this.bind(b,d,e):this.trigger(b)};if(c.attrFn)c.attrFn[b]=true});E.attachEvent&&!E.addEventListener&&c(E).bind("unload",function(){for(var a in c.cache)if(c.cache[a].handle)try{c.event.remove(c.cache[a].handle.elem)}catch(b){}});
+(function(){function a(g,i,n,m,p,q){p=0;for(var u=m.length;p<u;p++){var y=m[p];if(y){var F=false;for(y=y[g];y;){if(y.sizcache===n){F=m[y.sizset];break}if(y.nodeType===1&&!q){y.sizcache=n;y.sizset=p}if(y.nodeName.toLowerCase()===i){F=y;break}y=y[g]}m[p]=F}}}function b(g,i,n,m,p,q){p=0;for(var u=m.length;p<u;p++){var y=m[p];if(y){var F=false;for(y=y[g];y;){if(y.sizcache===n){F=m[y.sizset];break}if(y.nodeType===1){if(!q){y.sizcache=n;y.sizset=p}if(typeof i!=="string"){if(y===i){F=true;break}}else if(k.filter(i,
+[y]).length>0){F=y;break}}y=y[g]}m[p]=F}}}var d=/((?:\((?:\([^()]+\)|[^()]+)+\)|\[(?:\[[^\[\]]*\]|['"][^'"]*['"]|[^\[\]'"]+)+\]|\\.|[^ >+~,(\[\\]+)+|[>+~])(\s*,\s*)?((?:.|\r|\n)*)/g,e=0,f=Object.prototype.toString,h=false,l=true;[0,0].sort(function(){l=false;return 0});var k=function(g,i,n,m){n=n||[];var p=i=i||t;if(i.nodeType!==1&&i.nodeType!==9)return[];if(!g||typeof g!=="string")return n;var q,u,y,F,M,N=true,O=k.isXML(i),D=[],R=g;do{d.exec("");if(q=d.exec(R)){R=q[3];D.push(q[1]);if(q[2]){F=q[3];
+break}}}while(q);if(D.length>1&&x.exec(g))if(D.length===2&&o.relative[D[0]])u=L(D[0]+D[1],i);else for(u=o.relative[D[0]]?[i]:k(D.shift(),i);D.length;){g=D.shift();if(o.relative[g])g+=D.shift();u=L(g,u)}else{if(!m&&D.length>1&&i.nodeType===9&&!O&&o.match.ID.test(D[0])&&!o.match.ID.test(D[D.length-1])){q=k.find(D.shift(),i,O);i=q.expr?k.filter(q.expr,q.set)[0]:q.set[0]}if(i){q=m?{expr:D.pop(),set:C(m)}:k.find(D.pop(),D.length===1&&(D[0]==="~"||D[0]==="+")&&i.parentNode?i.parentNode:i,O);u=q.expr?k.filter(q.expr,
+q.set):q.set;if(D.length>0)y=C(u);else N=false;for(;D.length;){q=M=D.pop();if(o.relative[M])q=D.pop();else M="";if(q==null)q=i;o.relative[M](y,q,O)}}else y=[]}y||(y=u);y||k.error(M||g);if(f.call(y)==="[object Array]")if(N)if(i&&i.nodeType===1)for(g=0;y[g]!=null;g++){if(y[g]&&(y[g]===true||y[g].nodeType===1&&k.contains(i,y[g])))n.push(u[g])}else for(g=0;y[g]!=null;g++)y[g]&&y[g].nodeType===1&&n.push(u[g]);else n.push.apply(n,y);else C(y,n);if(F){k(F,p,n,m);k.uniqueSort(n)}return n};k.uniqueSort=function(g){if(w){h=
+l;g.sort(w);if(h)for(var i=1;i<g.length;i++)g[i]===g[i-1]&&g.splice(i--,1)}return g};k.matches=function(g,i){return k(g,null,null,