changeset 5803:6446873d152f

upgraded bundled pygments from 1.3.1 to 1.4
author Thomas Waldmann <tw AT waldmann-edv DOT de>
date Thu, 01 Dec 2011 02:26:16 +0100
parents dba48cb280f9
children 2e7f76626b58
files MoinMoin/support/pygments/__init__.py MoinMoin/support/pygments/formatters/html.py MoinMoin/support/pygments/formatters/img.py MoinMoin/support/pygments/formatters/latex.py MoinMoin/support/pygments/lexer.py MoinMoin/support/pygments/lexers/_luabuiltins.py MoinMoin/support/pygments/lexers/_mapping.py MoinMoin/support/pygments/lexers/agile.py MoinMoin/support/pygments/lexers/compiled.py MoinMoin/support/pygments/lexers/dotnet.py MoinMoin/support/pygments/lexers/functional.py MoinMoin/support/pygments/lexers/hdl.py MoinMoin/support/pygments/lexers/math.py MoinMoin/support/pygments/lexers/other.py MoinMoin/support/pygments/lexers/templates.py MoinMoin/support/pygments/lexers/text.py MoinMoin/support/pygments/lexers/web.py MoinMoin/support/pygments/styles/__init__.py MoinMoin/support/pygments/token.py MoinMoin/support/pygments/util.py docs/REQUIREMENTS
diffstat 21 files changed, 2947 insertions(+), 419 deletions(-) [+]
line wrap: on
line diff
--- a/MoinMoin/support/pygments/__init__.py	Thu Dec 01 02:05:15 2011 +0100
+++ b/MoinMoin/support/pygments/__init__.py	Thu Dec 01 02:26:16 2011 +0100
@@ -20,13 +20,13 @@
     The `Pygments tip`_ is installable with ``easy_install Pygments==dev``.
 
     .. _Pygments tip:
-       http://dev.pocoo.org/hg/pygments-main/archive/tip.tar.gz#egg=Pygments-dev
+       http://bitbucket.org/birkenfeld/pygments-main/get/tip.zip#egg=Pygments-dev
 
     :copyright: Copyright 2006-2010 by the Pygments team, see AUTHORS.
     :license: BSD, see LICENSE for details.
 """
 
-__version__ = '1.3.1'
+__version__ = '1.4'
 __docformat__ = 'restructuredtext'
 
 __all__ = ['lex', 'format', 'highlight']
--- a/MoinMoin/support/pygments/formatters/html.py	Thu Dec 01 02:05:15 2011 +0100
+++ b/MoinMoin/support/pygments/formatters/html.py	Thu Dec 01 02:26:16 2011 +0100
@@ -21,14 +21,17 @@
 __all__ = ['HtmlFormatter']
 
 
-def escape_html(text):
+_escape_html_table = {
+    ord('&'): u'&amp;',
+    ord('<'): u'&lt;',
+    ord('>'): u'&gt;',
+    ord('"'): u'&quot;',
+    ord("'"): u'&#39;',
+}
+
+def escape_html(text, table=_escape_html_table):
     """Escape &, <, > as well as single and double quotes for HTML."""
-    return text.replace('&', '&amp;').  \
-                replace('<', '&lt;').   \
-                replace('>', '&gt;').   \
-                replace('"', '&quot;'). \
-                replace("'", '&#39;')
-
+    return text.translate(table)
 
 def get_random_id():
     """Return a random id for javascript fields."""
@@ -371,22 +374,21 @@
             except ValueError:
                 pass
 
-        self._class_cache = {}
         self._create_stylesheet()
 
     def _get_css_class(self, ttype):
         """Return the css class of this token type prefixed with
         the classprefix option."""
-        if ttype in self._class_cache:
-            return self._class_cache[ttype]
-        return self.classprefix + _get_ttype_class(ttype)
+        ttypeclass = _get_ttype_class(ttype)
+        if ttypeclass:
+            return self.classprefix + ttypeclass
+        return ''
 
     def _create_stylesheet(self):
         t2c = self.ttype2class = {Token: ''}
         c2s = self.class2style = {}
-        cp = self.classprefix
         for ttype, ndef in self.style:
-            name = cp + _get_ttype_class(ttype)
+            name = self._get_css_class(ttype)
             style = ''
             if ndef['color']:
                 style += 'color: #%s; ' % ndef['color']
@@ -508,6 +510,7 @@
         st = self.linenostep
         la = self.lineanchors
         aln = self.anchorlinenos
+        nocls = self.noclasses
         if sp:
             lines = []
 
@@ -542,9 +545,16 @@
         # in case you wonder about the seemingly redundant <div> here: since the
         # content in the other cell also is wrapped in a div, some browsers in
         # some configurations seem to mess up the formatting...
-        yield 0, ('<table class="%stable">' % self.cssclass +
-                  '<tr><td class="linenos"><div class="linenodiv"><pre>' +
-                  ls + '</pre></div></td><td class="code">')
+        if nocls:
+            yield 0, ('<table class="%stable">' % self.cssclass +
+                      '<tr><td><div class="linenodiv" '
+                      'style="background-color: #f0f0f0; padding-right: 10px">'
+                      '<pre style="line-height: 125%">' +
+                      ls + '</pre></div></td><td class="code">')
+        else:
+            yield 0, ('<table class="%stable">' % self.cssclass +
+                      '<tr><td class="linenos"><div class="linenodiv"><pre>' +
+                      ls + '</pre></div></td><td class="code">')
         yield 0, dummyoutfile.getvalue()
         yield 0, '</td></tr></table>'
 
@@ -556,7 +566,23 @@
         num = self.linenostart
         mw = len(str(len(lines) + num - 1))
 
-        if sp:
+        if self.noclasses:
+            if sp:
+                for t, line in lines:
+                    if num%sp == 0:
+                        style = 'background-color: #ffffc0; padding: 0 5px 0 5px'
+                    else:
+                        style = 'background-color: #f0f0f0; padding: 0 5px 0 5px'
+                    yield 1, '<span style="%s">%*s</span> ' % (
+                        style, mw, (num%st and ' ' or num)) + line
+                    num += 1
+            else:
+                for t, line in lines:
+                    yield 1, ('<span style="background-color: #f0f0f0; '
+                              'padding: 0 5px 0 5px">%*s</span> ' % (
+                              mw, (num%st and ' ' or num)) + line)
+                    num += 1
+        elif sp:
             for t, line in lines:
                 yield 1, '<span class="lineno%s">%*s</span> ' % (
                     num%sp == 0 and ' special' or '', mw,
@@ -616,6 +642,7 @@
         # for <span style=""> lookup only
         getcls = self.ttype2class.get
         c2s = self.class2style
+        escape_table = _escape_html_table
 
         lspan = ''
         line = ''
@@ -630,7 +657,7 @@
                 cls = self._get_css_class(ttype)
                 cspan = cls and '<span class="%s">' % cls or ''
 
-            parts = escape_html(value).split('\n')
+            parts = value.translate(escape_table).split('\n')
 
             # for all but the last line
             for part in parts[:-1]:
--- a/MoinMoin/support/pygments/formatters/img.py	Thu Dec 01 02:05:15 2011 +0100
+++ b/MoinMoin/support/pygments/formatters/img.py	Thu Dec 01 02:26:16 2011 +0100
@@ -18,7 +18,7 @@
 
 # Import this carefully
 try:
-    import Image, ImageDraw, ImageFont
+    from PIL import Image, ImageDraw, ImageFont
     pil_available = True
 except ImportError:
     pil_available = False
--- a/MoinMoin/support/pygments/formatters/latex.py	Thu Dec 01 02:05:15 2011 +0100
+++ b/MoinMoin/support/pygments/formatters/latex.py	Thu Dec 01 02:26:16 2011 +0100
@@ -21,13 +21,15 @@
     return text.replace('\\', '\x00'). \
                 replace('{', '\x01'). \
                 replace('}', '\x02'). \
-                replace('^', '\x03'). \
-                replace('_', '\x04'). \
                 replace('\x00', r'\%sZbs{}' % commandprefix). \
                 replace('\x01', r'\%sZob{}' % commandprefix). \
                 replace('\x02', r'\%sZcb{}' % commandprefix). \
-                replace('\x03', r'\%sZca{}' % commandprefix). \
-                replace('\x04', r'\%sZus{}' % commandprefix)
+                replace('^', r'\%sZca{}' % commandprefix). \
+                replace('_', r'\%sZus{}' % commandprefix). \
+                replace('#', r'\%sZsh{}' % commandprefix). \
+                replace('%', r'\%sZpc{}' % commandprefix). \
+                replace('$', r'\%sZdl{}' % commandprefix). \
+                replace('~', r'\%sZti{}' % commandprefix)
 
 
 DOC_TEMPLATE = r'''
@@ -81,6 +83,9 @@
 # * \PY@tok@classname sets the \PY@it etc. to reflect the chosen style
 #   for its class.
 # * \PY resets the style, parses the classnames and then calls \PY@do.
+#
+# Tip: to read this code, print it out in substituted form using e.g.
+# >>> print STYLE_TEMPLATE % {'cp': 'PY'}
 
 STYLE_TEMPLATE = r'''
 \makeatletter
@@ -101,6 +106,10 @@
 \def\%(cp)sZob{\char`\{}
 \def\%(cp)sZcb{\char`\}}
 \def\%(cp)sZca{\char`\^}
+\def\%(cp)sZsh{\char`\#}
+\def\%(cp)sZpc{\char`\%%}
+\def\%(cp)sZdl{\char`\$}
+\def\%(cp)sZti{\char`\~}
 %% for compatibility with earlier versions
 \def\%(cp)sZat{@}
 \def\%(cp)sZlb{[}
@@ -131,12 +140,12 @@
 
     .. sourcecode:: latex
 
-        \begin{Verbatim}[commandchars=@\[\]]
-        @PY[k][def ]@PY[n+nf][foo](@PY[n][bar]):
-            @PY[k][pass]
+        \begin{Verbatim}[commandchars=\\{\}]
+        \PY{k}{def }\PY{n+nf}{foo}(\PY{n}{bar}):
+            \PY{k}{pass}
         \end{Verbatim}
 
-    The special command used here (``@PY``) and all the other macros it needs
+    The special command used here (``\PY``) and all the other macros it needs
     are output by the `get_style_defs` method.
 
     With the `full` option, a complete LaTeX document is output, including
--- a/MoinMoin/support/pygments/lexer.py	Thu Dec 01 02:05:15 2011 +0100
+++ b/MoinMoin/support/pygments/lexer.py	Thu Dec 01 02:26:16 2011 +0100
@@ -349,7 +349,53 @@
     self.tokens on the first instantiation.
     """
 
+    def _process_regex(cls, regex, rflags):
+        """Preprocess the regular expression component of a token definition."""
+        return re.compile(regex, rflags).match
+
+    def _process_token(cls, token):
+        """Preprocess the token component of a token definition."""
+        assert type(token) is _TokenType or callable(token), \
+               'token type must be simple type or callable, not %r' % (token,)
+        return token
+
+    def _process_new_state(cls, new_state, unprocessed, processed):
+        """Preprocess the state transition action of a token definition."""
+        if isinstance(new_state, str):
+            # an existing state
+            if new_state == '#pop':
+                return -1
+            elif new_state in unprocessed:
+                return (new_state,)
+            elif new_state == '#push':
+                return new_state
+            elif new_state[:5] == '#pop:':
+                return -int(new_state[5:])
+            else:
+                assert False, 'unknown new state %r' % new_state
+        elif isinstance(new_state, combined):
+            # combine a new state from existing ones
+            tmp_state = '_tmp_%d' % cls._tmpname
+            cls._tmpname += 1
+            itokens = []
+            for istate in new_state:
+                assert istate != new_state, 'circular state ref %r' % istate
+                itokens.extend(cls._process_state(unprocessed,
+                                                  processed, istate))
+            processed[tmp_state] = itokens
+            return (tmp_state,)
+        elif isinstance(new_state, tuple):
+            # push more than one state
+            for istate in new_state:
+                assert (istate in unprocessed or
+                        istate in ('#pop', '#push')), \
+                       'unknown new state ' + istate
+            return new_state
+        else:
+            assert False, 'unknown new state def %r' % new_state
+
     def _process_state(cls, unprocessed, processed, state):
+        """Preprocess a single state definition."""
         assert type(state) is str, "wrong state name %r" % state
         assert state[0] != '#', "invalid state name %r" % state
         if state in processed:
@@ -360,60 +406,31 @@
             if isinstance(tdef, include):
                 # it's a state reference
                 assert tdef != state, "circular state reference %r" % state
-                tokens.extend(cls._process_state(unprocessed, processed, str(tdef)))
+                tokens.extend(cls._process_state(unprocessed, processed,
+                                                 str(tdef)))
                 continue
 
             assert type(tdef) is tuple, "wrong rule def %r" % tdef
 
             try:
-                rex = re.compile(tdef[0], rflags).match
+                rex = cls._process_regex(tdef[0], rflags)
             except Exception, err:
                 raise ValueError("uncompilable regex %r in state %r of %r: %s" %
                                  (tdef[0], state, cls, err))
 
-            assert type(tdef[1]) is _TokenType or callable(tdef[1]), \
-                   'token type must be simple type or callable, not %r' % (tdef[1],)
+            token = cls._process_token(tdef[1])
 
             if len(tdef) == 2:
                 new_state = None
             else:
-                tdef2 = tdef[2]
-                if isinstance(tdef2, str):
-                    # an existing state
-                    if tdef2 == '#pop':
-                        new_state = -1
-                    elif tdef2 in unprocessed:
-                        new_state = (tdef2,)
-                    elif tdef2 == '#push':
-                        new_state = tdef2
-                    elif tdef2[:5] == '#pop:':
-                        new_state = -int(tdef2[5:])
-                    else:
-                        assert False, 'unknown new state %r' % tdef2
-                elif isinstance(tdef2, combined):
-                    # combine a new state from existing ones
-                    new_state = '_tmp_%d' % cls._tmpname
-                    cls._tmpname += 1
-                    itokens = []
-                    for istate in tdef2:
-                        assert istate != state, 'circular state ref %r' % istate
-                        itokens.extend(cls._process_state(unprocessed,
-                                                          processed, istate))
-                    processed[new_state] = itokens
-                    new_state = (new_state,)
-                elif isinstance(tdef2, tuple):
-                    # push more than one state
-                    for state in tdef2:
-                        assert (state in unprocessed or
-                                state in ('#pop', '#push')), \
-                               'unknown new state ' + state
-                    new_state = tdef2
-                else:
-                    assert False, 'unknown new state def %r' % tdef2
-            tokens.append((rex, tdef[1], new_state))
+                new_state = cls._process_new_state(tdef[2],
+                                                   unprocessed, processed)
+
+            tokens.append((rex, token, new_state))
         return tokens
 
     def process_tokendef(cls, name, tokendefs=None):
+        """Preprocess a dictionary of token definitions."""
         processed = cls._all_tokens[name] = {}
         tokendefs = tokendefs or cls.tokens[name]
         for state in tokendefs.keys():
@@ -421,6 +438,7 @@
         return processed
 
     def __call__(cls, *args, **kwds):
+        """Instantiate cls after preprocessing its token definitions."""
         if not hasattr(cls, '_tokens'):
             cls._all_tokens = {}
             cls._tmpname = 0
--- a/MoinMoin/support/pygments/lexers/_luabuiltins.py	Thu Dec 01 02:05:15 2011 +0100
+++ b/MoinMoin/support/pygments/lexers/_luabuiltins.py	Thu Dec 01 02:26:16 2011 +0100
@@ -60,14 +60,7 @@
            'debug.setmetatable',
            'debug.setupvalue',
            'debug.traceback'],
- 'io': ['file:close',
-        'file:flush',
-        'file:lines',
-        'file:read',
-        'file:seek',
-        'file:setvbuf',
-        'file:write',
-        'io.close',
+ 'io': ['io.close',
         'io.flush',
         'io.input',
         'io.lines',
@@ -175,7 +168,7 @@
             return name.startswith('math')
 
         def is_in_io_module(name):
-            return name.startswith('io.') or name.startswith('file:')
+            return name.startswith('io.')
 
         def is_in_os_module(name):
             return name.startswith('os.')
--- a/MoinMoin/support/pygments/lexers/_mapping.py	Thu Dec 01 02:05:15 2011 +0100
+++ b/MoinMoin/support/pygments/lexers/_mapping.py	Thu Dec 01 02:26:16 2011 +0100
@@ -30,16 +30,18 @@
     'ApacheConfLexer': ('pygments.lexers.text', 'ApacheConf', ('apacheconf', 'aconf', 'apache'), ('.htaccess', 'apache.conf', 'apache2.conf'), ('text/x-apacheconf',)),
     'AppleScriptLexer': ('pygments.lexers.other', 'AppleScript', ('applescript',), ('*.applescript',), ()),
     'AsymptoteLexer': ('pygments.lexers.other', 'Asymptote', ('asy', 'asymptote'), ('*.asy',), ('text/x-asymptote',)),
+    'AutohotkeyLexer': ('pygments.lexers.other', 'autohotkey', ('ahk',), ('*.ahk', '*.ahkl'), ('text/x-autohotkey',)),
     'BBCodeLexer': ('pygments.lexers.text', 'BBCode', ('bbcode',), (), ('text/x-bbcode',)),
     'BaseMakefileLexer': ('pygments.lexers.text', 'Makefile', ('basemake',), (), ()),
     'BashLexer': ('pygments.lexers.other', 'Bash', ('bash', 'sh', 'ksh'), ('*.sh', '*.ksh', '*.bash', '*.ebuild', '*.eclass'), ('application/x-sh', 'application/x-shellscript')),
     'BashSessionLexer': ('pygments.lexers.other', 'Bash Session', ('console',), ('*.sh-session',), ('application/x-shell-session',)),
     'BatchLexer': ('pygments.lexers.other', 'Batchfile', ('bat',), ('*.bat', '*.cmd'), ('application/x-dos-batch',)),
     'BefungeLexer': ('pygments.lexers.other', 'Befunge', ('befunge',), ('*.befunge',), ('application/x-befunge',)),
+    'BlitzMaxLexer': ('pygments.lexers.compiled', 'BlitzMax', ('blitzmax', 'bmax'), ('*.bmx',), ('text/x-bmx',)),
     'BooLexer': ('pygments.lexers.dotnet', 'Boo', ('boo',), ('*.boo',), ('text/x-boo',)),
     'BrainfuckLexer': ('pygments.lexers.other', 'Brainfuck', ('brainfuck', 'bf'), ('*.bf', '*.b'), ('application/x-brainfuck',)),
     'CLexer': ('pygments.lexers.compiled', 'C', ('c',), ('*.c', '*.h'), ('text/x-chdr', 'text/x-csrc')),
-    'CMakeLexer': ('pygments.lexers.text', 'CMake', ('cmake',), ('*.cmake',), ('text/x-cmake',)),
+    'CMakeLexer': ('pygments.lexers.text', 'CMake', ('cmake',), ('*.cmake', 'CMakeLists.txt'), ('text/x-cmake',)),
     'CObjdumpLexer': ('pygments.lexers.asm', 'c-objdump', ('c-objdump',), ('*.c-objdump',), ('text/x-c-objdump',)),
     'CSharpAspxLexer': ('pygments.lexers.dotnet', 'aspx-cs', ('aspx-cs',), ('*.aspx', '*.asax', '*.ascx', '*.ashx', '*.asmx', '*.axd'), ()),
     'CSharpLexer': ('pygments.lexers.dotnet', 'C#', ('csharp', 'c#'), ('*.cs',), ('text/x-csharp',)),
@@ -49,7 +51,7 @@
     'CheetahXmlLexer': ('pygments.lexers.templates', 'XML+Cheetah', ('xml+cheetah', 'xml+spitfire'), (), ('application/xml+cheetah', 'application/xml+spitfire')),
     'ClojureLexer': ('pygments.lexers.agile', 'Clojure', ('clojure', 'clj'), ('*.clj',), ('text/x-clojure', 'application/x-clojure')),
     'CoffeeScriptLexer': ('pygments.lexers.web', 'CoffeeScript', ('coffee-script', 'coffeescript'), ('*.coffee',), ('text/coffeescript',)),
-    'ColdfusionHtmlLexer': ('pygments.lexers.templates', 'Coldufsion HTML', ('cfm',), ('*.cfm', '*.cfml', '*.cfc'), ('application/x-coldfusion',)),
+    'ColdfusionHtmlLexer': ('pygments.lexers.templates', 'Coldfusion HTML', ('cfm',), ('*.cfm', '*.cfml', '*.cfc'), ('application/x-coldfusion',)),
     'ColdfusionLexer': ('pygments.lexers.templates', 'cfstatement', ('cfs',), (), ()),
     'CommonLispLexer': ('pygments.lexers.functional', 'Common Lisp', ('common-lisp', 'cl'), ('*.cl', '*.lisp', '*.el'), ('text/x-common-lisp',)),
     'CppLexer': ('pygments.lexers.compiled', 'C++', ('cpp', 'c++'), ('*.cpp', '*.hpp', '*.c++', '*.h++', '*.cc', '*.hh', '*.cxx', '*.hxx'), ('text/x-c++hdr', 'text/x-c++src')),
@@ -68,13 +70,15 @@
     'DelphiLexer': ('pygments.lexers.compiled', 'Delphi', ('delphi', 'pas', 'pascal', 'objectpascal'), ('*.pas',), ('text/x-pascal',)),
     'DiffLexer': ('pygments.lexers.text', 'Diff', ('diff', 'udiff'), ('*.diff', '*.patch'), ('text/x-diff', 'text/x-patch')),
     'DjangoLexer': ('pygments.lexers.templates', 'Django/Jinja', ('django', 'jinja'), (), ('application/x-django-templating', 'application/x-jinja')),
-    'DylanLexer': ('pygments.lexers.compiled', 'Dylan', ('dylan',), ('*.dylan',), ('text/x-dylan',)),
+    'DuelLexer': ('pygments.lexers.web', 'Duel', ('duel', 'Duel Engine', 'Duel View', 'JBST', 'jbst', 'JsonML+BST'), ('*.duel', '*.jbst'), ('text/x-duel', 'text/x-jbst')),
+    'DylanLexer': ('pygments.lexers.compiled', 'Dylan', ('dylan',), ('*.dylan', '*.dyl'), ('text/x-dylan',)),
     'ErbLexer': ('pygments.lexers.templates', 'ERB', ('erb',), (), ('application/x-ruby-templating',)),
     'ErlangLexer': ('pygments.lexers.functional', 'Erlang', ('erlang',), ('*.erl', '*.hrl'), ('text/x-erlang',)),
     'ErlangShellLexer': ('pygments.lexers.functional', 'Erlang erl session', ('erl',), ('*.erl-sh',), ('text/x-erl-shellsession',)),
     'EvoqueHtmlLexer': ('pygments.lexers.templates', 'HTML+Evoque', ('html+evoque',), ('*.html',), ('text/html+evoque',)),
     'EvoqueLexer': ('pygments.lexers.templates', 'Evoque', ('evoque',), ('*.evoque',), ('application/x-evoque',)),
     'EvoqueXmlLexer': ('pygments.lexers.templates', 'XML+Evoque', ('xml+evoque',), ('*.xml',), ('application/xml+evoque',)),
+    'FactorLexer': ('pygments.lexers.agile', 'Factor', ('factor',), ('*.factor',), ('text/x-factor',)),
     'FelixLexer': ('pygments.lexers.compiled', 'Felix', ('felix', 'flx'), ('*.flx', '*.flxh'), ('text/x-felix',)),
     'FortranLexer': ('pygments.lexers.compiled', 'Fortran', ('fortran',), ('*.f', '*.f90'), ('text/x-fortran',)),
     'GLShaderLexer': ('pygments.lexers.compiled', 'GLSL', ('glsl',), ('*.vert', '*.frag', '*.geo'), ('text/x-glslsrc',)),
@@ -85,6 +89,7 @@
     'GherkinLexer': ('pygments.lexers.other', 'Gherkin', ('Cucumber', 'cucumber', 'Gherkin', 'gherkin'), ('*.feature',), ('text/x-gherkin',)),
     'GnuplotLexer': ('pygments.lexers.other', 'Gnuplot', ('gnuplot',), ('*.plot', '*.plt'), ('text/x-gnuplot',)),
     'GoLexer': ('pygments.lexers.compiled', 'Go', ('go',), ('*.go',), ('text/x-gosrc',)),
+    'GoodDataCLLexer': ('pygments.lexers.other', 'GoodData-CL', ('gooddata-cl',), ('*.gdc',), ('text/x-gooddata-cl',)),
     'GroffLexer': ('pygments.lexers.text', 'Groff', ('groff', 'nroff', 'man'), ('*.[1234567]', '*.man'), ('application/x-troff', 'text/troff')),
     'HamlLexer': ('pygments.lexers.web', 'Haml', ('haml', 'HAML'), ('*.haml',), ('text/x-haml',)),
     'HaskellLexer': ('pygments.lexers.functional', 'Haskell', ('haskell', 'hs'), ('*.hs',), ('text/x-haskell',)),
@@ -94,14 +99,17 @@
     'HtmlLexer': ('pygments.lexers.web', 'HTML', ('html',), ('*.html', '*.htm', '*.xhtml', '*.xslt'), ('text/html', 'application/xhtml+xml')),
     'HtmlPhpLexer': ('pygments.lexers.templates', 'HTML+PHP', ('html+php',), ('*.phtml',), ('application/x-php', 'application/x-httpd-php', 'application/x-httpd-php3', 'application/x-httpd-php4', 'application/x-httpd-php5')),
     'HtmlSmartyLexer': ('pygments.lexers.templates', 'HTML+Smarty', ('html+smarty',), (), ('text/html+smarty',)),
-    'IniLexer': ('pygments.lexers.text', 'INI', ('ini', 'cfg'), ('*.ini', '*.cfg', '*.properties'), ('text/x-ini',)),
+    'HybrisLexer': ('pygments.lexers.other', 'Hybris', ('hybris', 'hy'), ('*.hy', '*.hyb'), ('text/x-hybris', 'application/x-hybris')),
+    'IniLexer': ('pygments.lexers.text', 'INI', ('ini', 'cfg'), ('*.ini', '*.cfg'), ('text/x-ini',)),
     'IoLexer': ('pygments.lexers.agile', 'Io', ('io',), ('*.io',), ('text/x-iosrc',)),
+    'IokeLexer': ('pygments.lexers.agile', 'Ioke', ('ioke', 'ik'), ('*.ik',), ('text/x-iokesrc',)),
     'IrcLogsLexer': ('pygments.lexers.text', 'IRC logs', ('irc',), ('*.weechatlog',), ('text/x-irclog',)),
+    'JadeLexer': ('pygments.lexers.web', 'Jade', ('jade', 'JADE'), ('*.jade',), ('text/x-jade',)),
     'JavaLexer': ('pygments.lexers.compiled', 'Java', ('java',), ('*.java',), ('text/x-java',)),
     'JavascriptDjangoLexer': ('pygments.lexers.templates', 'JavaScript+Django/Jinja', ('js+django', 'javascript+django', 'js+jinja', 'javascript+jinja'), (), ('application/x-javascript+django', 'application/x-javascript+jinja', 'text/x-javascript+django', 'text/x-javascript+jinja', 'text/javascript+django', 'text/javascript+jinja')),
     'JavascriptErbLexer': ('pygments.lexers.templates', 'JavaScript+Ruby', ('js+erb', 'javascript+erb', 'js+ruby', 'javascript+ruby'), (), ('application/x-javascript+ruby', 'text/x-javascript+ruby', 'text/javascript+ruby')),
     'JavascriptGenshiLexer': ('pygments.lexers.templates', 'JavaScript+Genshi Text', ('js+genshitext', 'js+genshi', 'javascript+genshitext', 'javascript+genshi'), (), ('application/x-javascript+genshi', 'text/x-javascript+genshi', 'text/javascript+genshi')),
-    'JavascriptLexer': ('pygments.lexers.web', 'JavaScript', ('js', 'javascript'), ('*.js',), ('application/x-javascript', 'text/x-javascript', 'text/javascript')),
+    'JavascriptLexer': ('pygments.lexers.web', 'JavaScript', ('js', 'javascript'), ('*.js',), ('application/javascript', 'application/x-javascript', 'text/x-javascript', 'text/javascript')),
     'JavascriptPhpLexer': ('pygments.lexers.templates', 'JavaScript+PHP', ('js+php', 'javascript+php'), (), ('application/x-javascript+php', 'text/x-javascript+php', 'text/javascript+php')),
     'JavascriptSmartyLexer': ('pygments.lexers.templates', 'JavaScript+Smarty', ('js+smarty', 'javascript+smarty'), (), ('application/x-javascript+smarty', 'text/x-javascript+smarty', 'text/javascript+smarty')),
     'JspLexer': ('pygments.lexers.templates', 'Java Server Page', ('jsp',), ('*.jsp',), ('application/x-jsp',)),
@@ -109,7 +117,7 @@
     'LiterateHaskellLexer': ('pygments.lexers.functional', 'Literate Haskell', ('lhs', 'literate-haskell'), ('*.lhs',), ('text/x-literate-haskell',)),
     'LlvmLexer': ('pygments.lexers.asm', 'LLVM', ('llvm',), ('*.ll',), ('text/x-llvm',)),
     'LogtalkLexer': ('pygments.lexers.other', 'Logtalk', ('logtalk',), ('*.lgt',), ('text/x-logtalk',)),
-    'LuaLexer': ('pygments.lexers.agile', 'Lua', ('lua',), ('*.lua',), ('text/x-lua', 'application/x-lua')),
+    'LuaLexer': ('pygments.lexers.agile', 'Lua', ('lua',), ('*.lua', '*.wlua'), ('text/x-lua', 'application/x-lua')),
     'MOOCodeLexer': ('pygments.lexers.other', 'MOOCode', ('moocode',), ('*.moo',), ('text/x-moocode',)),
     'MakefileLexer': ('pygments.lexers.text', 'Makefile', ('make', 'makefile', 'mf', 'bsdmake'), ('*.mak', 'Makefile', 'makefile', 'Makefile.*', 'GNUmakefile'), ('text/x-makefile',)),
     'MakoCssLexer': ('pygments.lexers.templates', 'CSS+Mako', ('css+mako',), (), ('text/css+mako',)),
@@ -117,6 +125,8 @@
     'MakoJavascriptLexer': ('pygments.lexers.templates', 'JavaScript+Mako', ('js+mako', 'javascript+mako'), (), ('application/x-javascript+mako', 'text/x-javascript+mako', 'text/javascript+mako')),
     'MakoLexer': ('pygments.lexers.templates', 'Mako', ('mako',), ('*.mao',), ('application/x-mako',)),
     'MakoXmlLexer': ('pygments.lexers.templates', 'XML+Mako', ('xml+mako',), (), ('application/xml+mako',)),
+    'MaqlLexer': ('pygments.lexers.other', 'MAQL', ('maql',), ('*.maql',), ('text/x-gooddata-maql', 'application/x-gooddata-maql')),
+    'MasonLexer': ('pygments.lexers.templates', 'Mason', ('mason',), ('*.m', '*.mhtml', '*.mc', '*.mi', 'autohandler', 'dhandler'), ('application/x-mason',)),
     'MatlabLexer': ('pygments.lexers.math', 'Matlab', ('matlab', 'octave'), ('*.m',), ('text/matlab',)),
     'MatlabSessionLexer': ('pygments.lexers.math', 'Matlab session', ('matlabsession',), (), ()),
     'MiniDLexer': ('pygments.lexers.agile', 'MiniD', ('minid',), ('*.md',), ('text/x-minidsrc',)),
@@ -143,8 +153,11 @@
     'OocLexer': ('pygments.lexers.compiled', 'Ooc', ('ooc',), ('*.ooc',), ('text/x-ooc',)),
     'PerlLexer': ('pygments.lexers.agile', 'Perl', ('perl', 'pl'), ('*.pl', '*.pm'), ('text/x-perl', 'application/x-perl')),
     'PhpLexer': ('pygments.lexers.web', 'PHP', ('php', 'php3', 'php4', 'php5'), ('*.php', '*.php[345]'), ('text/x-php',)),
+    'PostScriptLexer': ('pygments.lexers.other', 'PostScript', ('postscript',), ('*.ps', '*.eps'), ('application/postscript',)),
     'PovrayLexer': ('pygments.lexers.other', 'POVRay', ('pov',), ('*.pov', '*.inc'), ('text/x-povray',)),
     'PrologLexer': ('pygments.lexers.compiled', 'Prolog', ('prolog',), ('*.prolog', '*.pro', '*.pl'), ('text/x-prolog',)),
+    'PropertiesLexer': ('pygments.lexers.text', 'Properties', ('properties',), ('*.properties',), ('text/x-java-properties',)),
+    'ProtoBufLexer': ('pygments.lexers.other', 'Protocol Buffer', ('protobuf',), ('*.proto',), ()),
     'Python3Lexer': ('pygments.lexers.agile', 'Python 3', ('python3', 'py3'), (), ('text/x-python3', 'application/x-python3')),
     'Python3TracebackLexer': ('pygments.lexers.agile', 'Python 3.0 Traceback', ('py3tb',), ('*.py3tb',), ('text/x-python3-traceback',)),
     'PythonConsoleLexer': ('pygments.lexers.agile', 'Python console session', ('pycon',), (), ('text/x-python-doctest',)),
@@ -165,17 +178,20 @@
     'RhtmlLexer': ('pygments.lexers.templates', 'RHTML', ('rhtml', 'html+erb', 'html+ruby'), ('*.rhtml',), ('text/html+ruby',)),
     'RstLexer': ('pygments.lexers.text', 'reStructuredText', ('rst', 'rest', 'restructuredtext'), ('*.rst', '*.rest'), ('text/x-rst', 'text/prs.fallenstein.rst')),
     'RubyConsoleLexer': ('pygments.lexers.agile', 'Ruby irb session', ('rbcon', 'irb'), (), ('text/x-ruby-shellsession',)),
-    'RubyLexer': ('pygments.lexers.agile', 'Ruby', ('rb', 'ruby'), ('*.rb', '*.rbw', 'Rakefile', '*.rake', '*.gemspec', '*.rbx'), ('text/x-ruby', 'application/x-ruby')),
+    'RubyLexer': ('pygments.lexers.agile', 'Ruby', ('rb', 'ruby', 'duby'), ('*.rb', '*.rbw', 'Rakefile', '*.rake', '*.gemspec', '*.rbx', '*.duby'), ('text/x-ruby', 'application/x-ruby')),
     'SLexer': ('pygments.lexers.math', 'S', ('splus', 's', 'r'), ('*.S', '*.R'), ('text/S-plus', 'text/S', 'text/R')),
     'SassLexer': ('pygments.lexers.web', 'Sass', ('sass', 'SASS'), ('*.sass',), ('text/x-sass',)),
     'ScalaLexer': ('pygments.lexers.compiled', 'Scala', ('scala',), ('*.scala',), ('text/x-scala',)),
+    'ScamlLexer': ('pygments.lexers.web', 'Scaml', ('scaml', 'SCAML'), ('*.scaml',), ('text/x-scaml',)),
     'SchemeLexer': ('pygments.lexers.functional', 'Scheme', ('scheme', 'scm'), ('*.scm',), ('text/x-scheme', 'application/x-scheme')),
+    'ScssLexer': ('pygments.lexers.web', 'SCSS', ('scss',), ('*.scss',), ('text/x-scss',)),
     'SmalltalkLexer': ('pygments.lexers.other', 'Smalltalk', ('smalltalk', 'squeak'), ('*.st',), ('text/x-smalltalk',)),
     'SmartyLexer': ('pygments.lexers.templates', 'Smarty', ('smarty',), ('*.tpl',), ('application/x-smarty',)),
     'SourcesListLexer': ('pygments.lexers.text', 'Debian Sourcelist', ('sourceslist', 'sources.list'), ('sources.list',), ()),
     'SqlLexer': ('pygments.lexers.other', 'SQL', ('sql',), ('*.sql',), ('text/x-sql',)),
     'SqliteConsoleLexer': ('pygments.lexers.other', 'sqlite3con', ('sqlite3',), ('*.sqlite3-console',), ('text/x-sqlite3-console',)),
     'SquidConfLexer': ('pygments.lexers.text', 'SquidConf', ('squidconf', 'squid.conf', 'squid'), ('squid.conf',), ('text/x-squidconf',)),
+    'SspLexer': ('pygments.lexers.templates', 'Scalate Server Page', ('ssp',), ('*.ssp',), ('application/x-ssp',)),
     'TclLexer': ('pygments.lexers.agile', 'Tcl', ('tcl',), ('*.tcl',), ('text/x-tcl', 'text/x-script.tcl', 'application/x-tcl')),
     'TcshLexer': ('pygments.lexers.other', 'Tcsh', ('tcsh', 'csh'), ('*.tcsh', '*.csh'), ('application/x-csh',)),
     'TexLexer': ('pygments.lexers.text', 'TeX', ('tex', 'latex'), ('*.tex', '*.aux', '*.toc'), ('text/x-tex', 'text/x-latex')),
@@ -183,7 +199,12 @@
     'ValaLexer': ('pygments.lexers.compiled', 'Vala', ('vala', 'vapi'), ('*.vala', '*.vapi'), ('text/x-vala',)),
     'VbNetAspxLexer': ('pygments.lexers.dotnet', 'aspx-vb', ('aspx-vb',), ('*.aspx', '*.asax', '*.ascx', '*.ashx', '*.asmx', '*.axd'), ()),
     'VbNetLexer': ('pygments.lexers.dotnet', 'VB.net', ('vb.net', 'vbnet'), ('*.vb', '*.bas'), ('text/x-vbnet', 'text/x-vba')),
+    'VelocityHtmlLexer': ('pygments.lexers.templates', 'HTML+Velocity', ('html+velocity',), (), ('text/html+velocity',)),
+    'VelocityLexer': ('pygments.lexers.templates', 'Velocity', ('velocity',), ('*.vm', '*.fhtml'), ()),
+    'VelocityXmlLexer': ('pygments.lexers.templates', 'XML+Velocity', ('xml+velocity',), (), ('application/xml+velocity',)),
+    'VerilogLexer': ('pygments.lexers.hdl', 'verilog', ('v',), ('*.v', '*.sv'), ('text/x-verilog',)),
     'VimLexer': ('pygments.lexers.text', 'VimL', ('vim',), ('*.vim', '.vimrc'), ('text/x-vim',)),
+    'XQueryLexer': ('pygments.lexers.web', 'XQuery', ('xquery', 'xqy'), ('*.xqy', '*.xquery'), ('text/xquery', 'application/xquery')),
     'XmlDjangoLexer': ('pygments.lexers.templates', 'XML+Django/Jinja', ('xml+django', 'xml+jinja'), (), ('application/xml+django', 'application/xml+jinja')),
     'XmlErbLexer': ('pygments.lexers.templates', 'XML+Ruby', ('xml+erb', 'xml+ruby'), (), ('application/xml+ruby',)),
     'XmlLexer': ('pygments.lexers.web', 'XML', ('xml',), ('*.xml', '*.xsl', '*.rss', '*.xslt', '*.xsd', '*.wsdl'), ('text/xml', 'application/xml', 'image/svg+xml', 'application/rss+xml', 'application/atom+xml', 'application/xsl+xml', 'application/xslt+xml')),
--- a/MoinMoin/support/pygments/lexers/agile.py	Thu Dec 01 02:05:15 2011 +0100
+++ b/MoinMoin/support/pygments/lexers/agile.py	Thu Dec 01 02:26:16 2011 +0100
@@ -22,7 +22,7 @@
 __all__ = ['PythonLexer', 'PythonConsoleLexer', 'PythonTracebackLexer',
            'RubyLexer', 'RubyConsoleLexer', 'PerlLexer', 'LuaLexer',
            'MiniDLexer', 'IoLexer', 'TclLexer', 'ClojureLexer',
-           'Python3Lexer', 'Python3TracebackLexer']
+           'Python3Lexer', 'Python3TracebackLexer', 'FactorLexer', 'IokeLexer']
 
 # b/w compatibility
 from pygments.lexers.functional import SchemeLexer
@@ -43,8 +43,8 @@
     tokens = {
         'root': [
             (r'\n', Text),
-            (r'^(\s*)("""(?:.|\n)*?""")', bygroups(Text, String.Doc)),
-            (r"^(\s*)('''(?:.|\n)*?''')", bygroups(Text, String.Doc)),
+            (r'^(\s*)([rRuU]{,2}"""(?:.|\n)*?""")', bygroups(Text, String.Doc)),
+            (r"^(\s*)([rRuU]{,2}'''(?:.|\n)*?''')", bygroups(Text, String.Doc)),
             (r'[^\S\n]+', Text),
             (r'#.*$', Comment),
             (r'[]{}:(),;[]', Punctuation),
@@ -104,7 +104,7 @@
         'numbers': [
             (r'(\d+\.\d*|\d*\.\d+)([eE][+-]?[0-9]+)?', Number.Float),
             (r'\d+[eE][+-]?[0-9]+', Number.Float),
-            (r'0\d+', Number.Oct),
+            (r'0[0-7]+', Number.Oct),
             (r'0[xX][a-fA-F0-9]+', Number.Hex),
             (r'\d+L', Number.Integer.Long),
             (r'\d+', Number.Integer)
@@ -310,17 +310,17 @@
         tb = 0
         for match in line_re.finditer(text):
             line = match.group()
-            if line.startswith('>>> ') or line.startswith('... '):
+            if line.startswith(u'>>> ') or line.startswith(u'... '):
                 tb = 0
                 insertions.append((len(curcode),
                                    [(0, Generic.Prompt, line[:4])]))
                 curcode += line[4:]
-            elif line.rstrip() == '...' and not tb:
+            elif line.rstrip() == u'...' and not tb:
                 # only a new >>> prompt can end an exception block
                 # otherwise an ellipsis in place of the traceback frames
                 # will be mishandled
                 insertions.append((len(curcode),
-                                   [(0, Generic.Prompt, '...')]))
+                                   [(0, Generic.Prompt, u'...')]))
                 curcode += line[3:]
             else:
                 if curcode:
@@ -329,8 +329,8 @@
                         yield item
                     curcode = ''
                     insertions = []
-                if (line.startswith('Traceback (most recent call last):') or
-                    re.match(r'  File "[^"]+", line \d+\n$', line)):
+                if (line.startswith(u'Traceback (most recent call last):') or
+                    re.match(ur'  File "[^"]+", line \d+\n$', line)):
                     tb = 1
                     curtb = line
                     tbindex = match.start()
@@ -338,7 +338,7 @@
                     yield match.start(), Name.Class, line
                 elif tb:
                     curtb += line
-                    if not (line.startswith(' ') or line.strip() == '...'):
+                    if not (line.startswith(' ') or line.strip() == u'...'):
                         tb = 0
                         for i, t, v in tblexer.get_tokens_unprocessed(curtb):
                             yield tbindex+i, t, v
@@ -371,7 +371,7 @@
         ],
         'intb': [
             (r'^(  File )("[^"]+")(, line )(\d+)(, in )(.+)(\n)',
-             bygroups(Text, Name.Builtin, Text, Number, Text, Name.Identifier, Text)),
+             bygroups(Text, Name.Builtin, Text, Number, Text, Name, Text)),
             (r'^(  File )("[^"]+")(, line )(\d+)(\n)',
              bygroups(Text, Name.Builtin, Text, Number, Text)),
             (r'^(    )(.+)(\n)',
@@ -379,9 +379,9 @@
             (r'^([ \t]*)(...)(\n)',
              bygroups(Text, Comment, Text)), # for doctests...
             (r'^(.+)(: )(.+)(\n)',
-             bygroups(Name.Class, Text, Name.Identifier, Text), '#pop'),
+             bygroups(Generic.Error, Text, Name, Text), '#pop'),
             (r'^([a-zA-Z_][a-zA-Z0-9_]*)(:?\n)',
-             bygroups(Name.Class, Text), '#pop')
+             bygroups(Generic.Error, Text), '#pop')
         ],
     }
 
@@ -409,15 +409,15 @@
         ],
         'intb': [
             (r'^(  File )("[^"]+")(, line )(\d+)(, in )(.+)(\n)',
-             bygroups(Text, Name.Builtin, Text, Number, Text, Name.Identifier, Text)),
+             bygroups(Text, Name.Builtin, Text, Number, Text, Name, Text)),
             (r'^(    )(.+)(\n)',
              bygroups(Text, using(Python3Lexer), Text)),
             (r'^([ \t]*)(...)(\n)',
              bygroups(Text, Comment, Text)), # for doctests...
             (r'^(.+)(: )(.+)(\n)',
-             bygroups(Name.Class, Text, Name.Identifier, Text), '#pop'),
+             bygroups(Generic.Error, Text, Name, Text), '#pop'),
             (r'^([a-zA-Z_][a-zA-Z0-9_]*)(:?\n)',
-             bygroups(Name.Class, Text), '#pop')
+             bygroups(Generic.Error, Text), '#pop')
         ],
     }
 
@@ -428,8 +428,9 @@
     """
 
     name = 'Ruby'
-    aliases = ['rb', 'ruby']
-    filenames = ['*.rb', '*.rbw', 'Rakefile', '*.rake', '*.gemspec', '*.rbx']
+    aliases = ['rb', 'ruby', 'duby']
+    filenames = ['*.rb', '*.rbw', 'Rakefile', '*.rake', '*.gemspec',
+                 '*.rbx', '*.duby']
     mimetypes = ['text/x-ruby', 'application/x-ruby']
 
     flags = re.DOTALL | re.MULTILINE
@@ -623,7 +624,7 @@
              r'putc|puts|raise|rand|readline|readlines|require|'
              r'scan|select|self|send|set_trace_func|singleton_methods|sleep|'
              r'split|sprintf|srand|sub|syscall|system|taint|'
-             r'test|throw|to_a|to_s|trace_var|trap|type|untaint|untrace_var|'
+             r'test|throw|to_a|to_s|trace_var|trap|untaint|untrace_var|'
              r'warn)\b', Name.Builtin),
             (r'__(FILE|LINE)__\b', Name.Builtin.Pseudo),
             # normal heredocs
@@ -837,7 +838,7 @@
             (r'^=[a-zA-Z0-9]+\s+.*?\n=cut', Comment.Multiline),
             (r'(case|continue|do|else|elsif|for|foreach|if|last|my|'
              r'next|our|redo|reset|then|unless|until|while|use|'
-             r'print|new|BEGIN|END|return)\b', Keyword),
+             r'print|new|BEGIN|CHECK|INIT|END|return)\b', Keyword),
             (r'(format)(\s+)([a-zA-Z0-9_]+)(\s*)(=)(\s*\n)',
              bygroups(Keyword, Text, Name, Text, Punctuation, Text), 'format'),
             (r'(eq|lt|gt|le|ge|ne|not|and|or|cmp)\b', Operator.Word),
@@ -893,11 +894,14 @@
             (r'0_?[0-7]+(_[0-7]+)*', Number.Oct),
             (r'0x[0-9A-Fa-f]+(_[0-9A-Fa-f]+)*', Number.Hex),
             (r'0b[01]+(_[01]+)*', Number.Bin),
-            (r'\d+', Number.Integer),
+            (r'(?i)(\d*(_\d*)*\.\d+(_\d*)*|\d+(_\d*)*\.\d+(_\d*)*)(e[+-]?\d+)?',
+             Number.Float),
+            (r'(?i)\d+(_\d*)*e[+-]?\d+(_\d*)*', Number.Float),
+            (r'\d+(_\d+)*', Number.Integer),
             (r"'(\\\\|\\'|[^'])*'", String),
             (r'"(\\\\|\\"|[^"])*"', String),
             (r'`(\\\\|\\`|[^`])*`', String.Backtick),
-            (r'<([^\s>]+)>', String.Regexp),
+            (r'<([^\s>]+)>', String.Regex),
             (r'(q|qq|qw|qr|qx)\{', String.Other, 'cb-string'),
             (r'(q|qq|qw|qr|qx)\(', String.Other, 'rb-string'),
             (r'(q|qq|qw|qr|qx)\[', String.Other, 'sb-string'),
@@ -974,7 +978,7 @@
     }
 
     def analyse_text(text):
-        if shebang_matches(text, r'perl(\d\.\d\.\d)?'):
+        if shebang_matches(text, r'perl'):
             return True
         if 'my $' in text:
             return 0.9
@@ -1006,7 +1010,7 @@
 
     name = 'Lua'
     aliases = ['lua']
-    filenames = ['*.lua']
+    filenames = ['*.lua', '*.wlua']
     mimetypes = ['text/x-lua', 'application/x-lua']
 
     tokens = {
@@ -1026,10 +1030,11 @@
 
             (r'\n', Text),
             (r'[^\S\n]', Text),
-            (r'(?s)\[(=*)\[.*?\]\1\]', String.Multiline),
-            (r'[\[\]\{\}\(\)\.,:;]', Punctuation),
+            # multiline strings
+            (r'(?s)\[(=*)\[.*?\]\1\]', String),
 
             (r'(==|~=|<=|>=|\.\.|\.\.\.|[=+\-*/%^<>#])', Operator),
+            (r'[\[\]\{\}\(\)\.,:;]', Punctuation),
             (r'(and|or|not)\b', Operator.Word),
 
             ('(break|do|else|elseif|end|for|if|in|repeat|return|then|until|'
@@ -1042,14 +1047,13 @@
 
             (r'[A-Za-z_][A-Za-z0-9_]*(\.[A-Za-z_][A-Za-z0-9_]*)?', Name),
 
-            # multiline strings
-            (r'(?s)\[(=*)\[(.*?)\]\1\]', String),
             ("'", String.Single, combined('stringescape', 'sqs')),
             ('"', String.Double, combined('stringescape', 'dqs'))
         ],
 
         'funcname': [
-            ('[A-Za-z_][A-Za-z0-9_]*', Name.Function, '#pop'),
+            ('(?:([A-Za-z_][A-Za-z0-9_]*)(\.))?([A-Za-z_][A-Za-z0-9_]*)',
+             bygroups(Name.Class, Punctuation, Name.Function), '#pop'),
             # inline function
             ('\(', Punctuation, '#pop'),
         ],
@@ -1483,3 +1487,473 @@
             (r'(\(|\))', Punctuation),
         ],
     }
+
+
+class FactorLexer(RegexLexer):
+    """
+    Lexer for the `Factor <http://factorcode.org>`_ language.
+
+    *New in Pygments 1.4.*
+    """
+    name = 'Factor'
+    aliases = ['factor']
+    filenames = ['*.factor']
+    mimetypes = ['text/x-factor']
+
+    flags = re.MULTILINE | re.UNICODE
+
+    builtin_kernel = (
+        r'(?:or|2bi|2tri|while|wrapper|nip|4dip|wrapper\\?|bi\\*|'
+        r'callstack>array|both\\?|hashcode|die|dupd|callstack|'
+        r'callstack\\?|3dup|tri@|pick|curry|build|\\?execute|3bi|'
+        r'prepose|>boolean|\\?if|clone|eq\\?|tri\\*|\\?|=|swapd|'
+        r'2over|2keep|3keep|clear|2dup|when|not|tuple\\?|dup|2bi\\*|'
+        r'2tri\\*|call|tri-curry|object|bi@|do|unless\\*|if\\*|loop|'
+        r'bi-curry\\*|drop|when\\*|assert=|retainstack|assert\\?|-rot|'
+        r'execute|2bi@|2tri@|boa|with|either\\?|3drop|bi|curry\\?|'
+        r'datastack|until|3dip|over|3curry|tri-curry\\*|tri-curry@|swap|'
+        r'and|2nip|throw|bi-curry|\\(clone\\)|hashcode\\*|compose|2dip|if|3tri|'
+        r'unless|compose\\?|tuple|keep|2curry|equal\\?|assert|tri|2drop|'
+        r'most|<wrapper>|boolean\\?|identity-hashcode|identity-tuple\\?|'
+        r'null|new|dip|bi-curry@|rot|xor|identity-tuple|boolean)\s'
+        )
+
+    builtin_assocs = (
+        r'(?:\\?at|assoc\\?|assoc-clone-like|assoc=|delete-at\\*|'
+        r'assoc-partition|extract-keys|new-assoc|value\\?|assoc-size|'
+        r'map>assoc|push-at|assoc-like|key\\?|assoc-intersect|'
+        r'assoc-refine|update|assoc-union|assoc-combine|at\\*|'
+        r'assoc-empty\\?|at\\+|set-at|assoc-all\\?|assoc-subset\\?|'
+        r'assoc-hashcode|change-at|assoc-each|assoc-diff|zip|values|'
+        r'value-at|rename-at|inc-at|enum\\?|at|cache|assoc>map|<enum>|'
+        r'assoc|assoc-map|enum|value-at\\*|assoc-map-as|>alist|'
+        r'assoc-filter-as|clear-assoc|assoc-stack|maybe-set-at|'
+        r'substitute|assoc-filter|2cache|delete-at|assoc-find|keys|'
+        r'assoc-any\\?|unzip)\s'
+        )
+
+    builtin_combinators = (
+        r'(?:case|execute-effect|no-cond|no-case\\?|3cleave>quot|2cleave|'
+        r'cond>quot|wrong-values\\?|no-cond\\?|cleave>quot|no-case|'
+        r'case>quot|3cleave|wrong-values|to-fixed-point|alist>quot|'
+        r'case-find|cond|cleave|call-effect|2cleave>quot|recursive-hashcode|'
+        r'linear-case-quot|spread|spread>quot)\s'
+        )
+
+    builtin_math = (
+        r'(?:number=|if-zero|next-power-of-2|each-integer|\\?1\\+|'
+        r'fp-special\\?|imaginary-part|unless-zero|float>bits|number\\?|'
+        r'fp-infinity\\?|bignum\\?|fp-snan\\?|denominator|fp-bitwise=|\\*|'
+        r'\\+|power-of-2\\?|-|u>=|/|>=|bitand|log2-expects-positive|<|'
+        r'log2|>|integer\\?|number|bits>double|2/|zero\\?|(find-integer)|'
+        r'bits>float|float\\?|shift|ratio\\?|even\\?|ratio|fp-sign|bitnot|'
+        r'>fixnum|complex\\?|/i|/f|byte-array>bignum|when-zero|sgn|>bignum|'
+        r'next-float|u<|u>|mod|recip|rational|find-last-integer|>float|'
+        r'(all-integers\\?)|2^|times|integer|fixnum\\?|neg|fixnum|sq|'
+        r'bignum|(each-integer)|bit\\?|fp-qnan\\?|find-integer|complex|'
+        r'<fp-nan>|real|double>bits|bitor|rem|fp-nan-payload|all-integers\\?|'
+        r'real-part|log2-expects-positive\\?|prev-float|align|unordered\\?|'
+        r'float|fp-nan\\?|abs|bitxor|u<=|odd\\?|<=|/mod|rational\\?|>integer|'
+        r'real\\?|numerator)\s'
+        )
+
+    builtin_sequences = (
+        r'(?:member-eq\\?|append|assert-sequence=|find-last-from|trim-head-slice|'
+        r'clone-like|3sequence|assert-sequence\\?|map-as|last-index-from|'
+        r'reversed|index-from|cut\\*|pad-tail|remove-eq!|concat-as|'
+        r'but-last|snip|trim-tail|nths|nth|2selector|sequence|slice\\?|'
+        r'<slice>|partition|remove-nth|tail-slice|empty\\?|tail\\*|'
+        r'if-empty|find-from|virtual-sequence\\?|member\\?|set-length|'
+        r'drop-prefix|unclip|unclip-last-slice|iota|map-sum|'
+        r'bounds-error\\?|sequence-hashcode-step|selector-for|'
+        r'accumulate-as|map|start|midpoint@|\\(accumulate\\)|rest-slice|'
+        r'prepend|fourth|sift|accumulate!|new-sequence|follow|map!|'
+        r'like|first4|1sequence|reverse|slice|unless-empty|padding|'
+        r'virtual@|repetition\\?|set-last|index|4sequence|max-length|'
+        r'set-second|immutable-sequence|first2|first3|replicate-as|'
+        r'reduce-index|unclip-slice|supremum|suffix!|insert-nth|'
+        r'trim-tail-slice|tail|3append|short|count|suffix|concat|'
+        r'flip|filter|sum|immutable\\?|reverse!|2sequence|map-integers|'
+        r'delete-all|start\\*|indices|snip-slice|check-slice|sequence\\?|'
+        r'head|map-find|filter!|append-as|reduce|sequence=|halves|'
+        r'collapse-slice|interleave|2map|filter-as|binary-reduce|'
+        r'slice-error\\?|product|bounds-check\\?|bounds-check|harvest|'
+        r'immutable|virtual-exemplar|find|produce|remove|pad-head|last|'
+        r'replicate|set-fourth|remove-eq|shorten|reversed\\?|'
+        r'map-find-last|3map-as|2unclip-slice|shorter\\?|3map|find-last|'
+        r'head-slice|pop\\*|2map-as|tail-slice\\*|but-last-slice|'
+        r'2map-reduce|iota\\?|collector-for|accumulate|each|selector|'
+        r'append!|new-resizable|cut-slice|each-index|head-slice\\*|'
+        r'2reverse-each|sequence-hashcode|pop|set-nth|\\?nth|'
+        r'<flat-slice>|second|join|when-empty|collector|'
+        r'immutable-sequence\\?|<reversed>|all\\?|3append-as|'
+        r'virtual-sequence|subseq\\?|remove-nth!|push-either|new-like|'
+        r'length|last-index|push-if|2all\\?|lengthen|assert-sequence|'
+        r'copy|map-reduce|move|third|first|3each|tail\\?|set-first|'
+        r'prefix|bounds-error|any\\?|<repetition>|trim-slice|exchange|'
+        r'surround|2reduce|cut|change-nth|min-length|set-third|produce-as|'
+        r'push-all|head\\?|delete-slice|rest|sum-lengths|2each|head\\*|'
+        r'infimum|remove!|glue|slice-error|subseq|trim|replace-slice|'
+        r'push|repetition|map-index|trim-head|unclip-last|mismatch)\s'
+        )
+
+    builtin_namespaces = (
+        r'(?:global|\\+@|change|set-namestack|change-global|init-namespaces|'
+        r'on|off|set-global|namespace|set|with-scope|bind|with-variable|'
+        r'inc|dec|counter|initialize|namestack|get|get-global|make-assoc)\s'
+        )
+
+    builtin_arrays = (
+        r'(?:<array>|2array|3array|pair|>array|1array|4array|pair\\?|'
+        r'array|resize-array|array\\?)\s'
+        )
+
+    builtin_io = (
+        r'(?:\\+character\\+|bad-seek-type\\?|readln|each-morsel|stream-seek|'
+        r'read|print|with-output-stream|contents|write1|stream-write1|'
+        r'stream-copy|stream-element-type|with-input-stream|'
+        r'stream-print|stream-read|stream-contents|stream-tell|'
+        r'tell-output|bl|seek-output|bad-seek-type|nl|stream-nl|write|'
+        r'flush|stream-lines|\\+byte\\+|stream-flush|read1|'
+        r'seek-absolute\\?|stream-read1|lines|stream-readln|'
+        r'stream-read-until|each-line|seek-end|with-output-stream\\*|'
+        r'seek-absolute|with-streams|seek-input|seek-relative\\?|'
+        r'input-stream|stream-write|read-partial|seek-end\\?|'
+        r'seek-relative|error-stream|read-until|with-input-stream\\*|'
+        r'with-streams\\*|tell-input|each-block|output-stream|'
+        r'stream-read-partial|each-stream-block|each-stream-line)\s'
+        )
+
+    builtin_strings = (
+        r'(?:resize-string|>string|<string>|1string|string|string\\?)\s'
+        )
+
+    builtin_vectors = (
+        r'(?:vector\\?|<vector>|\\?push|vector|>vector|1vector)\s'
+        )
+
+    builtin_continuations = (
+        r'(?:with-return|restarts|return-continuation|with-datastack|'
+        r'recover|rethrow-restarts|<restart>|ifcc|set-catchstack|'
+        r'>continuation<|cleanup|ignore-errors|restart\\?|'
+        r'compute-restarts|attempt-all-error|error-thread|continue|'
+        r'<continuation>|attempt-all-error\\?|condition\\?|'
+        r'<condition>|throw-restarts|error|catchstack|continue-with|'
+        r'thread-error-hook|continuation|rethrow|callcc1|'
+        r'error-continuation|callcc0|attempt-all|condition|'
+        r'continuation\\?|restart|return)\s'
+        )
+
+    tokens = {
+        'root': [
+            # TODO: (( inputs -- outputs ))
+            # TODO: << ... >>
+
+            # defining words
+            (r'(\s*)(:|::|MACRO:|MEMO:)(\s+)(\S+)',
+                bygroups(Text, Keyword, Text, Name.Function)),
+            (r'(\s*)(M:)(\s+)(\S+)(\s+)(\S+)',
+                bygroups(Text, Keyword, Text, Name.Class, Text, Name.Function)),
+            (r'(\s*)(GENERIC:)(\s+)(\S+)',
+                bygroups(Text, Keyword, Text, Name.Function)),
+            (r'(\s*)(HOOK:|GENERIC#)(\s+)(\S+)(\s+)(\S+)',
+                bygroups(Text, Keyword, Text, Name.Function, Text, Name.Function)),
+            (r'(\()(\s+)', bygroups(Name.Function, Text), 'stackeffect'),
+            (r'\;\s', Keyword),
+
+            # imports and namespaces
+            (r'(USING:)((?:\s|\\\s)+)', bygroups(Keyword.Namespace, Text), 'import'),
+            (r'(USE:)(\s+)(\S+)', bygroups(Keyword.Namespace, Text, Name.Namespace)),
+            (r'(UNUSE:)(\s+)(\S+)', bygroups(Keyword.Namespace, Text, Name.Namespace)),
+            (r'(QUALIFIED:)(\s+)(\S+)',
+                bygroups(Keyword.Namespace, Text, Name.Namespace)),
+            (r'(QUALIFIED-WITH:)(\s+)(\S+)',
+                bygroups(Keyword.Namespace, Text, Name.Namespace)),
+            (r'(FROM:|EXCLUDE:)(\s+)(\S+)(\s+)(=>)',
+                bygroups(Keyword.Namespace, Text, Name.Namespace, Text, Text)),
+            (r'(IN:)(\s+)(\S+)', bygroups(Keyword.Namespace, Text, Name.Namespace)),
+            (r'(?:ALIAS|DEFER|FORGET|POSTPONE):', Keyword.Namespace),
+
+            # tuples and classes
+            (r'(TUPLE:)(\s+)(\S+)(\s+<\s+)(\S+)',
+                bygroups(Keyword, Text, Name.Class, Text, Name.Class), 'slots'),
+            (r'(TUPLE:)(\s+)(\S+)', bygroups(Keyword, Text, Name.Class), 'slots'),
+            (r'(UNION:)(\s+)(\S+)', bygroups(Keyword, Text, Name.Class)),
+            (r'(INTERSECTION:)(\s+)(\S+)', bygroups(Keyword, Text, Name.Class)),
+            (r'(PREDICATE:)(\s+)(\S+)(\s+<\s+)(\S+)',
+                bygroups(Keyword, Text, Name.Class, Text, Name.Class)),
+            (r'(C:)(\s+)(\S+)(\s+)(\S+)',
+                bygroups(Keyword, Text, Name.Function, Text, Name.Class)),
+            (r'INSTANCE:', Keyword),
+            (r'SLOT:', Keyword),
+            (r'MIXIN:', Keyword),
+            (r'(?:SINGLETON|SINGLETONS):', Keyword),
+
+            # other syntax
+            (r'CONSTANT:', Keyword),
+            (r'(?:SYMBOL|SYMBOLS):', Keyword),
+            (r'ERROR:', Keyword),
+            (r'SYNTAX:', Keyword),
+            (r'(HELP:)(\s+)(\S+)', bygroups(Keyword, Text, Name.Function)),
+            (r'(MAIN:)(\s+)(\S+)', bygroups(Keyword.Namespace, Text, Name.Function)),
+            (r'(?:ALIEN|TYPEDEF|FUNCTION|STRUCT):', Keyword),
+
+            # vocab.private
+            # TODO: words inside vocab.private should have red names?
+            (r'(?:<PRIVATE|PRIVATE>)', Keyword.Namespace),
+
+            # strings
+            (r'"""\s+(?:.|\n)*?\s+"""', String),
+            (r'"(?:\\\\|\\"|[^"])*"', String),
+            (r'CHAR:\s+(\\[\\abfnrstv]*|\S)\s', String.Char),
+
+            # comments
+            (r'\!\s+.*$', Comment),
+            (r'#\!\s+.*$', Comment),
+
+            # boolean constants
+            (r'(t|f)\s', Name.Constant),
+
+            # numbers
+            (r'-?\d+\.\d+\s', Number.Float),
+            (r'-?\d+\s', Number.Integer),
+            (r'HEX:\s+[a-fA-F\d]+\s', Number.Hex),
+            (r'BIN:\s+[01]+\s', Number.Integer),
+            (r'OCT:\s+[0-7]+\s', Number.Oct),
+
+            # operators
+            (r'[-+/*=<>^]\s', Operator),
+
+            # keywords
+            (r'(?:deprecated|final|foldable|flushable|inline|recursive)\s', Keyword),
+
+            # builtins
+            (builtin_kernel, Name.Builtin),
+            (builtin_assocs, Name.Builtin),
+            (builtin_combinators, Name.Builtin),
+            (builtin_math, Name.Builtin),
+            (builtin_sequences, Name.Builtin),
+            (builtin_namespaces, Name.Builtin),
+            (builtin_arrays, Name.Builtin),
+            (builtin_io, Name.Builtin),
+            (builtin_strings, Name.Builtin),
+            (builtin_vectors, Name.Builtin),
+            (builtin_continuations, Name.Builtin),
+
+            # whitespaces - usually not relevant
+            (r'\s+', Text),
+
+            # everything else is text
+            (r'\S+', Text),
+        ],
+
+        'stackeffect': [
+            (r'\s*\(', Name.Function, 'stackeffect'),
+            (r'\)', Name.Function, '#pop'),
+            (r'\-\-', Name.Function),
+            (r'\s+', Text),
+            (r'\S+', Name.Variable),
+        ],
+
+        'slots': [
+            (r'\s+', Text),
+            (r';\s', Keyword, '#pop'),
+            (r'\S+', Name.Variable),
+        ],
+
+        'import': [
+            (r';', Keyword, '#pop'),
+            (r'\S+', Name.Namespace),
+            (r'\s+', Text),
+        ],
+    }
+
+
+class IokeLexer(RegexLexer):
+    """
+    For `Ioke <http://ioke.org/>`_ (a strongly typed, dynamic,
+    prototype based programming language) source.
+
+    *New in Pygments 1.4.*
+    """
+    name = 'Ioke'
+    filenames = ['*.ik']
+    aliases = ['ioke', 'ik']
+    mimetypes = ['text/x-iokesrc']
+    tokens = {
+        'interpolatableText': [
+            (r'(\\b|\\e|\\t|\\n|\\f|\\r|\\"|\\\\|\\#|\\\Z|\\u[0-9a-fA-F]{1,4}'
+             r'|\\[0-3]?[0-7]?[0-7])', String.Escape),
+            (r'#{', Punctuation, 'textInterpolationRoot')
+            ],
+
+        'text': [
+            (r'(?<!\\)"', String, '#pop'),
+            include('interpolatableText'),
+            (r'[^"]', String)
+            ],
+
+        'documentation': [
+            (r'(?<!\\)"', String.Doc, '#pop'),
+            include('interpolatableText'),
+            (r'[^"]', String.Doc)
+            ],
+
+        'textInterpolationRoot': [
+            (r'}', Punctuation, '#pop'),
+            include('root')
+            ],
+
+        'slashRegexp': [
+            (r'(?<!\\)/[oxpniums]*', String.Regex, '#pop'),
+            include('interpolatableText'),
+            (r'\\/', String.Regex),
+            (r'[^/]', String.Regex)
+            ],
+
+        'squareRegexp': [
+            (r'(?<!\\)][oxpniums]*', String.Regex, '#pop'),
+            include('interpolatableText'),
+            (r'\\]', String.Regex),
+            (r'[^\]]', String.Regex)
+            ],
+
+        'squareText': [
+            (r'(?<!\\)]', String, '#pop'),
+            include('interpolatableText'),
+            (r'[^\]]', String)
+            ],
+
+        'root': [
+            (r'\n', Text),
+            (r'\s+', Text),
+
+            # Comments
+            (r';(.*?)\n', Comment),
+            (r'\A#!(.*?)\n', Comment),
+
+            #Regexps
+            (r'#/', String.Regex, 'slashRegexp'),
+            (r'#r\[', String.Regex, 'squareRegexp'),
+
+            #Symbols
+            (r':[a-zA-Z0-9_!:?]+', String.Symbol),
+            (r'[a-zA-Z0-9_!:?]+:(?![a-zA-Z0-9_!?])', String.Other),
+            (r':"(\\\\|\\"|[^"])*"', String.Symbol),
+
+            #Documentation
+            (r'((?<=fn\()|(?<=fnx\()|(?<=method\()|(?<=macro\()|(?<=lecro\()'
+             r'|(?<=syntax\()|(?<=dmacro\()|(?<=dlecro\()|(?<=dlecrox\()'
+             r'|(?<=dsyntax\())[\s\n\r]*"', String.Doc, 'documentation'),
+
+            #Text
+            (r'"', String, 'text'),
+            (r'#\[', String, 'squareText'),
+
+            #Mimic
+            (r'[a-zA-Z0-9_][a-zA-Z0-9!?_:]+(?=\s*=.*mimic\s)', Name.Entity),
+
+            #Assignment
+            (r'[a-zA-Z_][a-zA-Z0-9_!:?]*(?=[\s]*[+*/-]?=[^=].*($|\.))', Name.Variable),
+
+            # keywords
+            (r'(break|cond|continue|do|ensure|for|for:dict|for:set|if|let|'
+             r'loop|p:for|p:for:dict|p:for:set|return|unless|until|while|'
+             r'with)(?![a-zA-Z0-9!:_?])', Keyword.Reserved),
+
+            # Origin
+            (r'(eval|mimic|print|println)(?![a-zA-Z0-9!:_?])', Keyword),
+
+            # Base
+            (r'(cell\?|cellNames|cellOwner\?|cellOwner|cells|cell|'
+             r'documentation|hash|identity|mimic|removeCell\!|undefineCell\!)'
+             r'(?![a-zA-Z0-9!:_?])', Keyword),
+
+            # Ground
+            (r'(stackTraceAsText)(?![a-zA-Z0-9!:_?])', Keyword),
+
+            #DefaultBehaviour Literals
+            (r'(dict|list|message|set)(?![a-zA-Z0-9!:_?])', Keyword.Reserved),
+
+            #DefaultBehaviour Case
+            (r'(case|case:and|case:else|case:nand|case:nor|case:not|case:or|'
+             r'case:otherwise|case:xor)(?![a-zA-Z0-9!:_?])', Keyword.Reserved),
+
+            #DefaultBehaviour Reflection
+            (r'(asText|become\!|derive|freeze\!|frozen\?|in\?|is\?|kind\?|'
+             r'mimic\!|mimics|mimics\?|prependMimic\!|removeAllMimics\!|'
+             r'removeMimic\!|same\?|send|thaw\!|uniqueHexId)'
+             r'(?![a-zA-Z0-9!:_?])', Keyword),
+
+            #DefaultBehaviour Aspects
+            (r'(after|around|before)(?![a-zA-Z0-9!:_?])', Keyword.Reserved),
+
+            # DefaultBehaviour
+            (r'(kind|cellDescriptionDict|cellSummary|genSym|inspect|notice)'
+             r'(?![a-zA-Z0-9!:_?])', Keyword),
+            (r'(use|destructuring)', Keyword.Reserved),
+
+            #DefaultBehavior BaseBehavior
+            (r'(cell\?|cellOwner\?|cellOwner|cellNames|cells|cell|'
+             r'documentation|identity|removeCell!|undefineCell)'
+             r'(?![a-zA-Z0-9!:_?])', Keyword),
+
+            #DefaultBehavior Internal
+            (r'(internal:compositeRegexp|internal:concatenateText|'
+             r'internal:createDecimal|internal:createNumber|'
+             r'internal:createRegexp|internal:createText)'
+             r'(?![a-zA-Z0-9!:_?])', Keyword.Reserved),
+
+            #DefaultBehaviour Conditions
+            (r'(availableRestarts|bind|error\!|findRestart|handle|'
+             r'invokeRestart|rescue|restart|signal\!|warn\!)'
+             r'(?![a-zA-Z0-9!:_?])', Keyword.Reserved),
+
+            # constants
+            (r'(nil|false|true)(?![a-zA-Z0-9!:_?])', Name.Constant),
+
+            # names
+            (r'(Arity|Base|Call|Condition|DateTime|Aspects|Pointcut|'
+             r'Assignment|BaseBehavior|Boolean|Case|AndCombiner|Else|'
+             r'NAndCombiner|NOrCombiner|NotCombiner|OrCombiner|XOrCombiner|'
+             r'Conditions|Definitions|FlowControl|Internal|Literals|'
+             r'Reflection|DefaultMacro|DefaultMethod|DefaultSyntax|Dict|'
+             r'FileSystem|Ground|Handler|Hook|IO|IokeGround|Struct|'
+             r'LexicalBlock|LexicalMacro|List|Message|Method|Mixins|'
+             r'NativeMethod|Number|Origin|Pair|Range|Reflector|Regexp Match|'
+             r'Regexp|Rescue|Restart|Runtime|Sequence|Set|Symbol|'
+             r'System|Text|Tuple)(?![a-zA-Z0-9!:_?])', Name.Builtin),
+
+            # functions
+            (ur'(generateMatchMethod|aliasMethod|\u03bb|\u028E|fnx|fn|method|'
+             ur'dmacro|dlecro|syntax|macro|dlecrox|lecrox|lecro|syntax)'
+             ur'(?![a-zA-Z0-9!:_?])', Name.Function),
+
+            # Numbers
+            (r'-?0[xX][0-9a-fA-F]+', Number.Hex),
+            (r'-?(\d+\.?\d*|\d*\.\d+)([eE][+-]?[0-9]+)?', Number.Float),
+            (r'-?\d+', Number.Integer),
+
+            (r'#\(', Punctuation),
+
+             # Operators
+            (ur'(&&>>|\|\|>>|\*\*>>|:::|::|\.\.\.|===|\*\*>|\*\*=|&&>|&&=|'
+             ur'\|\|>|\|\|=|\->>|\+>>|!>>|<>>>|<>>|&>>|%>>|#>>|@>>|/>>|\*>>|'
+             ur'\?>>|\|>>|\^>>|~>>|\$>>|=>>|<<=|>>=|<=>|<\->|=~|!~|=>|\+\+|'
+             ur'\-\-|<=|>=|==|!=|&&|\.\.|\+=|\-=|\*=|\/=|%=|&=|\^=|\|=|<\-|'
+             ur'\+>|!>|<>|&>|%>|#>|\@>|\/>|\*>|\?>|\|>|\^>|~>|\$>|<\->|\->|'
+             ur'<<|>>|\*\*|\?\||\?&|\|\||>|<|\*|\/|%|\+|\-|&|\^|\||=|\$|!|~|'
+             ur'\?|#|\u2260|\u2218|\u2208|\u2209)', Operator),
+            (r'(and|nand|or|xor|nor|return|import)(?![a-zA-Z0-9_!?])',
+             Operator),
+
+            # Punctuation
+            (r'(\`\`|\`|\'\'|\'|\.|\,|@|@@|\[|\]|\(|\)|{|})', Punctuation),
+
+            #kinds
+            (r'[A-Z][a-zA-Z0-9_!:?]*', Name.Class),
+
+            #default cellnames
+            (r'[a-z_][a-zA-Z0-9_!:?]*', Name)
+        ]
+    }
--- a/MoinMoin/support/pygments/lexers/compiled.py	Thu Dec 01 02:05:15 2011 +0100
+++ b/MoinMoin/support/pygments/lexers/compiled.py	Thu Dec 01 02:26:16 2011 +0100
@@ -26,7 +26,7 @@
            'ScalaLexer', 'DylanLexer', 'OcamlLexer', 'ObjectiveCLexer',
            'FortranLexer', 'GLShaderLexer', 'PrologLexer', 'CythonLexer',
            'ValaLexer', 'OocLexer', 'GoLexer', 'FelixLexer', 'AdaLexer',
-           'Modula2Lexer']
+           'Modula2Lexer', 'BlitzMaxLexer']
 
 
 class CLexer(RegexLexer):
@@ -43,8 +43,12 @@
 
     tokens = {
         'whitespace': [
-            (r'^\s*#if\s+0', Comment.Preproc, 'if0'),
-            (r'^\s*#', Comment.Preproc, 'macro'),
+            # preprocessor directives: without whitespace
+            ('^#if\s+0', Comment.Preproc, 'if0'),
+            ('^#', Comment.Preproc, 'macro'),
+            # or with whitespace
+            ('^' + _ws + r'#if\s+0', Comment.Preproc, 'if0'),
+            ('^' + _ws + '#', Comment.Preproc, 'macro'),
             (r'^(\s*)([a-zA-Z_][a-zA-Z0-9_]*:(?!:))', bygroups(Text, Name.Label)),
             (r'\n', Text),
             (r'\s+', Text),
@@ -55,11 +59,11 @@
         'statements': [
             (r'L?"', String, 'string'),
             (r"L?'(\\.|\\[0-7]{1,3}|\\x[a-fA-F0-9]{1,2}|[^\\\'\n])'", String.Char),
-            (r'(\d+\.\d*|\.\d+|\d+)[eE][+-]?\d+[lL]?', Number.Float),
+            (r'(\d+\.\d*|\.\d+|\d+)[eE][+-]?\d+[LlUu]*', Number.Float),
             (r'(\d+\.\d*|\.\d+|\d+[fF])[fF]?', Number.Float),
-            (r'0x[0-9a-fA-F]+[Ll]?', Number.Hex),
-            (r'0[0-7]+[Ll]?', Number.Oct),
-            (r'\d+[Ll]?', Number.Integer),
+            (r'0x[0-9a-fA-F]+[LlUu]*', Number.Hex),
+            (r'0[0-7]+[LlUu]*', Number.Oct),
+            (r'\d+[LlUu]*', Number.Integer),
             (r'\*/', Error),
             (r'[~!%^&*+=|?:<>/-]', Operator),
             (r'[()\[\],.]', Punctuation),
@@ -168,10 +172,17 @@
     filenames = ['*.cpp', '*.hpp', '*.c++', '*.h++', '*.cc', '*.hh', '*.cxx', '*.hxx']
     mimetypes = ['text/x-c++hdr', 'text/x-c++src']
 
+    #: optional Comment or Whitespace
+    _ws = r'(?:\s|//.*?\n|/[*].*?[*]/)+'
+
     tokens = {
         'root': [
-            (r'^\s*#if\s+0', Comment.Preproc, 'if0'),
-            (r'^\s*#', Comment.Preproc, 'macro'),
+            # preprocessor directives: without whitespace
+            ('^#if\s+0', Comment.Preproc, 'if0'),
+            ('^#', Comment.Preproc, 'macro'),
+            # or with whitespace
+            ('^' + _ws + r'#if\s+0', Comment.Preproc, 'if0'),
+            ('^' + _ws + '#', Comment.Preproc, 'macro'),
             (r'\n', Text),
             (r'\s+', Text),
             (r'\\\n', Text), # line continuation
@@ -180,11 +191,11 @@
             (r'[{}]', Punctuation),
             (r'L?"', String, 'string'),
             (r"L?'(\\.|\\[0-7]{1,3}|\\x[a-fA-F0-9]{1,2}|[^\\\'\n])'", String.Char),
-            (r'(\d+\.\d*|\.\d+|\d+)[eE][+-]?\d+[lL]?', Number.Float),
+            (r'(\d+\.\d*|\.\d+|\d+)[eE][+-]?\d+[LlUu]*', Number.Float),
             (r'(\d+\.\d*|\.\d+|\d+[fF])[fF]?', Number.Float),
-            (r'0x[0-9a-fA-F]+[Ll]?', Number.Hex),
-            (r'0[0-7]+[Ll]?', Number.Oct),
-            (r'\d+[Ll]?', Number.Integer),
+            (r'0x[0-9a-fA-F]+[LlUu]*', Number.Hex),
+            (r'0[0-7]+[LlUu]*', Number.Oct),
+            (r'\d+[LlUu]*', Number.Integer),
             (r'\*/', Error),
             (r'[~!%^&*+=|?:<>/-]', Operator),
             (r'[()\[\],.;]', Punctuation),
@@ -204,6 +215,8 @@
              r'uuidof|unaligned|super|single_inheritance|raise|noop|'
              r'multiple_inheritance|m128i|m128d|m128|m64|interface|'
              r'identifier|forceinline|event|assume)\b', Keyword.Reserved),
+            # Offload C++ extensions, http://offload.codeplay.com/
+            (r'(__offload|__blockingoffload|__outer)\b', Keyword.Psuedo),
             (r'(true|false)\b', Keyword.Constant),
             (r'NULL\b', Name.Builtin),
             ('[a-zA-Z_][a-zA-Z0-9_]*:(?!:)', Name.Label),
@@ -1038,7 +1051,7 @@
 
     name = 'Dylan'
     aliases = ['dylan']
-    filenames = ['*.dylan']
+    filenames = ['*.dylan', '*.dyl']
     mimetypes = ['text/x-dylan']
 
     flags = re.DOTALL
@@ -1051,10 +1064,10 @@
              r'|open|primary|sealed|si(deways|ngleton)|slot'
              r'|v(ariable|irtual))\b', Name.Builtin),
             (r'<\w+>', Keyword.Type),
-            (r'#?"(?:\\.|[^"])+?"', String.Double),
             (r'//.*?\n', Comment.Single),
             (r'/\*[\w\W]*?\*/', Comment.Multiline),
-            (r'\'.*?\'', String.Single),
+            (r'"', String, 'string'),
+            (r"'(\\.|\\[0-7]{1,3}|\\x[a-fA-F0-9]{1,2}|[^\\\'\n])'", String.Char),
             (r'=>|\b(a(bove|fterwards)|b(e(gin|low)|y)|c(ase|leanup|reate)'
              r'|define|else(|if)|end|f(inally|or|rom)|i[fn]|l(et|ocal)|otherwise'
              r'|rename|s(elect|ignal)|t(hen|o)|u(n(less|til)|se)|wh(en|ile))\b',
@@ -1071,6 +1084,13 @@
             (r'#[a-zA-Z0-9-]+', Keyword),
             (r'[a-zA-Z0-9-]+', Name.Variable),
         ],
+        'string': [
+            (r'"', String, '#pop'),
+            (r'\\([\\abfnrtv"\']|x[a-fA-F0-9]{2,4}|[0-7]{1,3})', String.Escape),
+            (r'[^\\"\n]+', String), # all other characters
+            (r'\\\n', String), # line continuation
+            (r'\\', String), # stray backslash
+        ],
     }
 
 
@@ -1090,8 +1110,12 @@
 
     tokens = {
         'whitespace': [
-            (r'^(\s*)(#if\s+0)', bygroups(Text, Comment.Preproc), 'if0'),
-            (r'^(\s*)(#)', bygroups(Text, Comment.Preproc), 'macro'),
+            # preprocessor directives: without whitespace
+            ('^#if\s+0', Comment.Preproc, 'if0'),
+            ('^#', Comment.Preproc, 'macro'),
+            # or with whitespace
+            ('^' + _ws + r'#if\s+0', Comment.Preproc, 'if0'),
+            ('^' + _ws + '#', Comment.Preproc, 'macro'),
             (r'\n', Text),
             (r'\s+', Text),
             (r'\\\n', Text), # line continuation
@@ -1323,7 +1347,7 @@
         'root': [
             (r'^#.*', Comment.Preproc),
             (r'//.*', Comment.Single),
-            (r'/\*[\w\W]*\*/', Comment.Multiline),
+            (r'/(\\\n)?[*](.|\n)*?[*](\\\n)?/', Comment.Multiline),
             (r'\+|-|~|!=?|\*|/|%|<<|>>|<=?|>=?|==?|&&?|\^|\|\|?',
              Operator),
             (r'[?:]', Operator), # quick hack for ternary
@@ -1333,7 +1357,7 @@
             (r'[+-]?\d*\.\d+([eE][-+]?\d+)?', Number.Float),
             (r'[+-]?\d+\.\d*([eE][-+]?\d+)?', Number.Float),
             (r'0[xX][0-9a-fA-F]*', Number.Hex),
-            (r'0[0-7]*', Number.Octal),
+            (r'0[0-7]*', Number.Oct),
             (r'[1-9][0-9]*', Number.Integer),
             (r'\b(attribute|const|uniform|varying|centroid|break|continue|'
              r'do|for|while|if|else|in|out|inout|float|int|void|bool|true|'
@@ -1346,12 +1370,13 @@
              r'lowp|mediump|highp|precision|input|output|hvec[234]|'
              r'[df]vec[234]|sampler[23]DRect|sampler2DRectShadow|sizeof|'
              r'cast|namespace|using)\b', Keyword), #future use
-            (r'[a-zA-Z_][a-zA-Z_0-9]*', Name.Variable),
+            (r'[a-zA-Z_][a-zA-Z_0-9]*', Name),
             (r'\.', Punctuation),
             (r'\s+', Text),
         ],
     }
 
+
 class PrologLexer(RegexLexer):
     """
     Lexer for Prolog files.
@@ -1371,7 +1396,7 @@
             (r'[0-9]+', Number),
             (r'[\[\](){}|.,;!]', Punctuation),
             (r':-|-->', Punctuation),
-            (r'"(?:\\x[0-9a-fA-F]+\\|\\u[0-9a-fA-F]{4}|\U[0-9a-fA-F]{8}|'
+            (r'"(?:\\x[0-9a-fA-F]+\\|\\u[0-9a-fA-F]{4}|\\U[0-9a-fA-F]{8}|'
              r'\\[0-7]+\\|\\[\w\W]|[^"])*"', String.Double),
             (r"'(?:''|[^'])*'", String.Atom), # quoted atom
             # Needs to not be followed by an atom.
@@ -1707,7 +1732,7 @@
             (r'[:(){}\[\];,]', Punctuation),
 
             (r'0x[0-9a-fA-F]+', Number.Hex),
-            (r'0c[0-9]+', Number.Octal),
+            (r'0c[0-9]+', Number.Oct),
             (r'0b[01]+', Number.Binary),
             (r'[0-9_]\.[0-9_]*(?!\.)', Number.Float),
             (r'[0-9_]+', Number.Decimal),
@@ -2363,3 +2388,95 @@
                     token = Keyword.Pervasive
             # return result
             yield index, token, value
+
+
+class BlitzMaxLexer(RegexLexer):
+    """
+    For `BlitzMax <http://blitzbasic.com>`_ source code.
+
+    *New in Pygments 1.4.*
+    """
+
+    name = 'BlitzMax'
+    aliases = ['blitzmax', 'bmax']
+    filenames = ['*.bmx']
+    mimetypes = ['text/x-bmx']
+
+    bmax_vopwords = r'\b(Shl|Shr|Sar|Mod)\b'
+    bmax_sktypes = r'@{1,2}|[!#$%]'
+    bmax_lktypes = r'\b(Int|Byte|Short|Float|Double|Long)\b'
+    bmax_name = r'[a-z_][a-z0-9_]*'
+    bmax_var = r'(%s)(?:(?:([ \t]*)(%s)|([ \t]*:[ \t]*\b(?:Shl|Shr|Sar|Mod)\b)|([ \t]*)([:])([ \t]*)(?:%s|(%s)))(?:([ \t]*)(Ptr))?)' % (bmax_name, bmax_sktypes, bmax_lktypes, bmax_name)
+    bmax_func = bmax_var + r'?((?:[ \t]|\.\.\n)*)([(])'
+
+    flags = re.MULTILINE | re.IGNORECASE
+    tokens = {
+        'root': [
+            # Text
+            (r'[ \t]+', Text),
+            (r'\.\.\n', Text), # Line continuation
+            # Comments
+            (r"'.*?\n", Comment.Single),
+            (r'([ \t]*)\bRem\n(\n|.)*?\s*\bEnd([ \t]*)Rem', Comment.Multiline),
+            # Data types
+            ('"', String.Double, 'string'),
+            # Numbers
+            (r'[0-9]+\.[0-9]*(?!\.)', Number.Float),
+            (r'\.[0-9]*(?!\.)', Number.Float),
+            (r'[0-9]+', Number.Integer),
+            (r'\$[0-9a-f]+', Number.Hex),
+            (r'\%[10]+', Number), # Binary
+            # Other
+            (r'(?:(?:(:)?([ \t]*)(:?%s|([+\-*/&|~]))|Or|And|Not|[=<>^]))' %
+             (bmax_vopwords), Operator),
+            (r'[(),.:\[\]]', Punctuation),
+            (r'(?:#[\w \t]*)', Name.Label),
+            (r'(?:\?[\w \t]*)', Comment.Preproc),
+            # Identifiers
+            (r'\b(New)\b([ \t]?)([(]?)(%s)' % (bmax_name),
+             bygroups(Keyword.Reserved, Text, Punctuation, Name.Class)),
+            (r'\b(Import|Framework|Module)([ \t]+)(%s\.%s)' %
+             (bmax_name, bmax_name),
+             bygroups(Keyword.Reserved, Text, Keyword.Namespace)),
+            (bmax_func, bygroups(Name.Function, Text, Keyword.Type,
+                                 Operator, Text, Punctuation, Text,
+                                 Keyword.Type, Name.Class, Text,
+                                 Keyword.Type, Text, Punctuation)),
+            (bmax_var, bygroups(Name.Variable, Text, Keyword.Type, Operator,
+                                Text, Punctuation, Text, Keyword.Type,
+                                Name.Class, Text, Keyword.Type)),
+            (r'\b(Type|Extends)([ \t]+)(%s)' % (bmax_name),
+             bygroups(Keyword.Reserved, Text, Name.Class)),
+            # Keywords
+            (r'\b(Ptr)\b', Keyword.Type),
+            (r'\b(Pi|True|False|Null|Self|Super)\b', Keyword.Constant),
+            (r'\b(Local|Global|Const|Field)\b', Keyword.Declaration),
+            (r'\b(TNullMethodException|TNullFunctionException|'
+             r'TNullObjectException|TArrayBoundsException|'
+             r'TRuntimeException)\b', Name.Exception),
+            (r'\b(Strict|SuperStrict|Module|ModuleInfo|'
+             r'End|Return|Continue|Exit|Public|Private|'
+             r'Var|VarPtr|Chr|Len|Asc|SizeOf|Sgn|Abs|Min|Max|'
+             r'New|Release|Delete|'
+             r'Incbin|IncbinPtr|IncbinLen|'
+             r'Framework|Include|Import|Extern|EndExtern|'
+             r'Function|EndFunction|'
+             r'Type|EndType|Extends|'
+             r'Method|EndMethod|'
+             r'Abstract|Final|'
+             r'If|Then|Else|ElseIf|EndIf|'
+             r'For|To|Next|Step|EachIn|'
+             r'While|Wend|EndWhile|'
+             r'Repeat|Until|Forever|'
+             r'Select|Case|Default|EndSelect|'
+             r'Try|Catch|EndTry|Throw|Assert|'
+             r'Goto|DefData|ReadData|RestoreData)\b', Keyword.Reserved),
+            # Final resolve (for variable names and such)
+            (r'(%s)' % (bmax_name), Name.Variable),
+        ],
+        'string': [
+            (r'""', String.Double),
+            (r'"C?', String.Double, '#pop'),
+            (r'[^"]+', String.Double),
+        ],
+    }
--- a/MoinMoin/support/pygments/lexers/dotnet.py	Thu Dec 01 02:05:15 2011 +0100
+++ b/MoinMoin/support/pygments/lexers/dotnet.py	Thu Dec 01 02:26:16 2011 +0100
@@ -234,28 +234,30 @@
              r'(On|Off|Binary|Text)', Keyword.Declaration),
             (r'(?<!\.)(AddHandler|Alias|'
              r'ByRef|ByVal|Call|Case|Catch|CBool|CByte|CChar|CDate|'
-             r'CDec|CDbl|CInt|CLng|CObj|Const|Continue|CSByte|CShort|'
+             r'CDec|CDbl|CInt|CLng|CObj|Continue|CSByte|CShort|'
              r'CSng|CStr|CType|CUInt|CULng|CUShort|Declare|'
-             r'Default|Delegate|Dim|DirectCast|Do|Each|Else|ElseIf|'
-             r'End|EndIf|Enum|Erase|Error|Event|Exit|False|Finally|For|'
-             r'Friend|Function|Get|Global|GoSub|GoTo|Handles|If|'
-             r'Implements|Imports|Inherits|Interface|'
-             r'Let|Lib|Loop|Me|Module|MustInherit|'
-             r'MustOverride|MyBase|MyClass|Namespace|Narrowing|New|Next|'
+             r'Default|Delegate|DirectCast|Do|Each|Else|ElseIf|'
+             r'EndIf|Erase|Error|Event|Exit|False|Finally|For|'
+             r'Friend|Get|Global|GoSub|GoTo|Handles|If|'
+             r'Implements|Inherits|Interface|'
+             r'Let|Lib|Loop|Me|MustInherit|'
+             r'MustOverride|MyBase|MyClass|Narrowing|New|Next|'
              r'Not|Nothing|NotInheritable|NotOverridable|Of|On|'
              r'Operator|Option|Optional|Overloads|Overridable|'
-             r'Overrides|ParamArray|Partial|Private|Property|Protected|'
+             r'Overrides|ParamArray|Partial|Private|Protected|'
              r'Public|RaiseEvent|ReadOnly|ReDim|RemoveHandler|Resume|'
              r'Return|Select|Set|Shadows|Shared|Single|'
-             r'Static|Step|Stop|Structure|Sub|SyncLock|Then|'
+             r'Static|Step|Stop|SyncLock|Then|'
              r'Throw|To|True|Try|TryCast|Wend|'
              r'Using|When|While|Widening|With|WithEvents|'
              r'WriteOnly)\b', Keyword),
+            (r'(?<!\.)End\b', Keyword, 'end'),
+            (r'(?<!\.)(Dim|Const)\b', Keyword, 'dim'),
             (r'(?<!\.)(Function|Sub|Property)(\s+)',
              bygroups(Keyword, Text), 'funcname'),
             (r'(?<!\.)(Class|Structure|Enum)(\s+)',
              bygroups(Keyword, Text), 'classname'),
-            (r'(?<!\.)(Namespace|Imports)(\s+)',
+            (r'(?<!\.)(Module|Namespace|Imports)(\s+)',
              bygroups(Keyword, Text), 'namespace'),
             (r'(?<!\.)(Boolean|Byte|Char|Date|Decimal|Double|Integer|Long|'
              r'Object|SByte|Short|Single|String|Variant|UInteger|ULong|'
@@ -279,15 +281,25 @@
             (r'"C?', String, '#pop'),
             (r'[^"]+', String),
         ],
+        'dim': [
+            (r'[a-z_][a-z0-9_]*', Name.Variable, '#pop'),
+            (r'', Text, '#pop'),  # any other syntax
+        ],
         'funcname': [
-            (r'[a-z_][a-z0-9_]*', Name.Function, '#pop')
+            (r'[a-z_][a-z0-9_]*', Name.Function, '#pop'),
         ],
         'classname': [
-            (r'[a-z_][a-z0-9_]*', Name.Class, '#pop')
+            (r'[a-z_][a-z0-9_]*', Name.Class, '#pop'),
         ],
         'namespace': [
-            (r'[a-z_][a-z0-9_.]*', Name.Namespace, '#pop')
+            (r'[a-z_][a-z0-9_.]*', Name.Namespace, '#pop'),
         ],
+        'end': [
+            (r'\s+', Text),
+            (r'(Function|Sub|Property|Class|Structure|Enum|Module|Namespace)\b',
+             Keyword, '#pop'),
+            (r'', Text, '#pop'),
+        ]
     }
 
 class GenericAspxLexer(RegexLexer):
--- a/MoinMoin/support/pygments/lexers/functional.py	Thu Dec 01 02:05:15 2011 +0100
+++ b/MoinMoin/support/pygments/lexers/functional.py	Thu Dec 01 02:26:16 2011 +0100
@@ -381,6 +381,7 @@
         'import': [
             # Import statements
             (r'\s+', Text),
+            (r'"', String, 'string'),
             # after "funclist" state
             (r'\)', Punctuation, '#pop'),
             (r'qualified\b', Keyword),
@@ -550,7 +551,7 @@
 
     tokens = {
         'escape-sequence': [
-            (r'\\[\"\'ntbr]', String.Escape),
+            (r'\\[\\\"\'ntbr]', String.Escape),
             (r'\\[0-9]{3}', String.Escape),
             (r'\\x[0-9a-fA-F]{2}', String.Escape),
         ],
@@ -601,7 +602,7 @@
             (r'\.', Punctuation),
             (r'[A-Z][A-Za-z0-9_\']*(?=\s*\.)', Name.Namespace),
             (r'[A-Z][A-Za-z0-9_\']*', Name.Class, '#pop'),
-            (r'[a-z][a-z0-9_\']*', Name, '#pop'),
+            (r'[a-z_][A-Za-z0-9_\']*', Name, '#pop'),
         ],
     }
 
@@ -688,7 +689,7 @@
             (r'[+-]?'+base_re+r'#[0-9a-zA-Z]+', Number.Integer),
             (r'[+-]?\d+', Number.Integer),
             (r'[+-]?\d+.\d+', Number.Float),
-            (r'[][:_@\".{}()|;,]', Punctuation),
+            (r'[]\[:_@\".{}()|;,]', Punctuation),
             (variable_re, Name.Variable),
             (atom_re, Name),
             (r'\?'+macro_re, Name.Constant),
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/MoinMoin/support/pygments/lexers/hdl.py	Thu Dec 01 02:26:16 2011 +0100
@@ -0,0 +1,135 @@
+# -*- coding: utf-8 -*-
+"""
+    pygments.lexers.hdl
+    ~~~~~~~~~~~~~~~~~~~
+
+    Lexers for hardware descriptor languages.
+
+    :copyright: Copyright 2010 by the Pygments team, see AUTHORS.
+    :license: BSD, see LICENSE for details.
+"""
+
+import re
+
+from pygments.lexer import RegexLexer, include, bygroups
+from pygments.token import \
+     Text, Comment, Operator, Keyword, Name, String, Number, Punctuation, \
+     Error
+
+__all__ = ['VerilogLexer']
+
+
+class VerilogLexer(RegexLexer):
+    """
+    For verilog source code with preprocessor directives.
+
+    *New in Pygments 1.4.*
+    """
+    name = 'verilog'
+    aliases = ['v']
+    filenames = ['*.v', '*.sv']
+    mimetypes = ['text/x-verilog']
+
+    #: optional Comment or Whitespace
+    _ws = r'(?:\s|//.*?\n|/[*].*?[*]/)+'
+
+    tokens = {
+        'root': [
+            (r'^\s*`define', Comment.Preproc, 'macro'),
+            (r'\n', Text),
+            (r'\s+', Text),
+            (r'\\\n', Text), # line continuation
+            (r'/(\\\n)?/(\n|(.|\n)*?[^\\]\n)', Comment.Single),
+            (r'/(\\\n)?[*](.|\n)*?[*](\\\n)?/', Comment.Multiline),
+            (r'[{}#@]', Punctuation),
+            (r'L?"', String, 'string'),
+            (r"L?'(\\.|\\[0-7]{1,3}|\\x[a-fA-F0-9]{1,2}|[^\\\'\n])'", String.Char),
+            (r'(\d+\.\d*|\.\d+|\d+)[eE][+-]?\d+[lL]?', Number.Float),
+            (r'(\d+\.\d*|\.\d+|\d+[fF])[fF]?', Number.Float),
+            (r'([0-9]+)|(\'h)[0-9a-fA-F]+', Number.Hex),
+            (r'([0-9]+)|(\'b)[0-1]+', Number.Hex),   # should be binary
+            (r'([0-9]+)|(\'d)[0-9]+', Number.Integer),
+            (r'([0-9]+)|(\'o)[0-7]+', Number.Oct),
+            (r'\'[01xz]', Number),
+            (r'\d+[Ll]?', Number.Integer),
+            (r'\*/', Error),
+            (r'[~!%^&*+=|?:<>/-]', Operator),
+            (r'[()\[\],.;\']', Punctuation),
+            (r'`[a-zA-Z_][a-zA-Z0-9_]*', Name.Constant),
+
+            (r'^\s*(package)(\s+)', bygroups(Keyword.Namespace, Text)),
+            (r'^\s*(import)(\s+)', bygroups(Keyword.Namespace, Text), 'import'),
+
+            (r'(always|always_comb|always_ff|always_latch|and|assign|automatic|'
+             r'begin|break|buf|bufif0|bufif1|case|casex|casez|cmos|const|'
+             r'continue|deassign|default|defparam|disable|do|edge|else|end|endcase|'
+             r'endfunction|endgenerate|endmodule|endpackage|endprimitive|endspecify|'
+             r'endtable|endtask|enum|event|final|for|force|forever|fork|function|'
+             r'generate|genvar|highz0|highz1|if|initial|inout|input|'
+             r'integer|join|large|localparam|macromodule|medium|module|'
+             r'nand|negedge|nmos|nor|not|notif0|notif1|or|output|packed|'
+             r'parameter|pmos|posedge|primitive|pull0|pull1|pulldown|pullup|rcmos|'
+             r'ref|release|repeat|return|rnmos|rpmos|rtran|rtranif0|'
+             r'rtranif1|scalared|signed|small|specify|specparam|strength|'
+             r'string|strong0|strong1|struct|table|task|'
+             r'tran|tranif0|tranif1|type|typedef|'
+             r'unsigned|var|vectored|void|wait|weak0|weak1|while|'
+             r'xnor|xor)\b', Keyword),
+
+            (r'(`accelerate|`autoexpand_vectornets|`celldefine|`default_nettype|'
+             r'`else|`elsif|`endcelldefine|`endif|`endprotect|`endprotected|'
+             r'`expand_vectornets|`ifdef|`ifndef|`include|`noaccelerate|`noexpand_vectornets|'
+             r'`noremove_gatenames|`noremove_netnames|`nounconnected_drive|'
+             r'`protect|`protected|`remove_gatenames|`remove_netnames|`resetall|'
+             r'`timescale|`unconnected_drive|`undef)\b', Comment.Preproc),
+
+            (r'(\$bits|\$bitstoreal|\$bitstoshortreal|\$countdrivers|\$display|\$fclose|'
+             r'\$fdisplay|\$finish|\$floor|\$fmonitor|\$fopen|\$fstrobe|\$fwrite|'
+             r'\$getpattern|\$history|\$incsave|\$input|\$itor|\$key|\$list|\$log|'
+             r'\$monitor|\$monitoroff|\$monitoron|\$nokey|\$nolog|\$printtimescale|'
+             r'\$random|\$readmemb|\$readmemh|\$realtime|\$realtobits|\$reset|\$reset_count|'
+             r'\$reset_value|\$restart|\$rtoi|\$save|\$scale|\$scope|\$shortrealtobits|'
+             r'\$showscopes|\$showvariables|\$showvars|\$sreadmemb|\$sreadmemh|'
+             r'\$stime|\$stop|\$strobe|\$time|\$timeformat|\$write)\b', Name.Builtin),
+
+            (r'(class)(\s+)', bygroups(Keyword, Text), 'classname'),
+            (r'(byte|shortint|int|longint|interger|time|'
+             r'bit|logic|reg|'
+             r'supply0|supply1|tri|triand|trior|tri0|tri1|trireg|uwire|wire|wand|wor'
+             r'shortreal|real|realtime)\b', Keyword.Type),
+            ('[a-zA-Z_][a-zA-Z0-9_]*:(?!:)', Name.Label),
+            ('[a-zA-Z_][a-zA-Z0-9_]*', Name),
+        ],
+        'classname': [
+            (r'[a-zA-Z_][a-zA-Z0-9_]*', Name.Class, '#pop'),
+        ],
+        'string': [
+            (r'"', String, '#pop'),
+            (r'\\([\\abfnrtv"\']|x[a-fA-F0-9]{2,4}|[0-7]{1,3})', String.Escape),
+            (r'[^\\"\n]+', String), # all other characters
+            (r'\\\n', String), # line continuation
+            (r'\\', String), # stray backslash
+        ],
+        'macro': [
+            (r'[^/\n]+', Comment.Preproc),
+            (r'/[*](.|\n)*?[*]/', Comment.Multiline),
+            (r'//.*?\n', Comment.Single, '#pop'),
+            (r'/', Comment.Preproc),
+            (r'(?<=\\)\n', Comment.Preproc),
+            (r'\n', Comment.Preproc, '#pop'),
+        ],
+        'import': [
+            (r'[a-zA-Z0-9_:]+\*?', Name.Namespace, '#pop')
+        ]
+    }
+
+    def get_tokens_unprocessed(self, text):
+        for index, token, value in \
+            RegexLexer.get_tokens_unprocessed(self, text):
+            # Convention: mark all upper case names as constants
+            if token is Name:
+                if value.isupper():
+                    token = Name.Constant
+            yield index, token, value
+
+
--- a/MoinMoin/support/pygments/lexers/math.py	Thu Dec 01 02:05:15 2011 +0100
+++ b/MoinMoin/support/pygments/lexers/math.py	Thu Dec 01 02:26:16 2011 +0100
@@ -153,10 +153,10 @@
             (r'%.*$', Comment),
             (r'^\s*function', Keyword, 'deffunc'),
 
-            # from 'iskeyword' on version 7.4.0.336 (R2007a):
-            (r'(break|case|catch|classdef|continue|else|elseif|end|for|function|'
-             r'global|if|otherwise|parfor|persistent|return|switch|try|while)\b',
-             Keyword),
+            # from 'iskeyword' on version 7.11 (R2010):
+            (r'(break|case|catch|classdef|continue|else|elseif|end|enumerated|'
+             r'events|for|function|global|if|methods|otherwise|parfor|'
+             r'persistent|properties|return|spmd|switch|try|while)\b', Keyword),
 
             ("(" + "|".join(elfun+specfun+elmat) + r')\b',  Name.Builtin),
 
@@ -228,7 +228,7 @@
                 # without is showing error on same line as before...?
                 line = "\n" + line
                 token = (0, Generic.Traceback, line)
-                insertions.append(  (idx, [token,]) )
+                insertions.append((idx, [token]))
 
             else:
                 if curcode:
--- a/MoinMoin/support/pygments/lexers/other.py	Thu Dec 01 02:05:15 2011 +0100
+++ b/MoinMoin/support/pygments/lexers/other.py	Thu Dec 01 02:26:16 2011 +0100
@@ -13,7 +13,7 @@
 
 from pygments.lexer import Lexer, RegexLexer, include, bygroups, using, \
      this, do_insertions
-from pygments.token import Error, Punctuation, \
+from pygments.token import Error, Punctuation, Literal, Token, \
      Text, Comment, Operator, Keyword, Name, String, Number, Generic
 from pygments.util import shebang_matches
 from pygments.lexers.web import HtmlLexer
@@ -24,7 +24,9 @@
            'MOOCodeLexer', 'SmalltalkLexer', 'TcshLexer', 'LogtalkLexer',
            'GnuplotLexer', 'PovrayLexer', 'AppleScriptLexer',
            'BashSessionLexer', 'ModelicaLexer', 'RebolLexer', 'ABAPLexer',
-           'NewspeakLexer', 'GherkinLexer', 'AsymptoteLexer']
+           'NewspeakLexer', 'GherkinLexer', 'AsymptoteLexer',
+           'PostScriptLexer', 'AutohotkeyLexer', 'GoodDataCLLexer',
+           'MaqlLexer', 'ProtoBufLexer', 'HybrisLexer']
 
 line_re  = re.compile('.*?\n')
 
@@ -355,7 +357,7 @@
             (r'\\[\w\W]', String.Escape),
             (r'(\b\w+)(\s*)(=)', bygroups(Name.Variable, Text, Operator)),
             (r'[\[\]{}()=]', Operator),
-            (r'<<\s*(\'?)\\?(\w+)[\w\W]+?\2', String),
+            (r'<<-?\s*(\'?)\\?(\w+)[\w\W]+?\2', String),
             (r'&&|\|\|', Operator),
         ],
         'data': [
@@ -2095,7 +2097,7 @@
 
 class GherkinLexer(RegexLexer):
     """
-    For `Gherkin <http://cukes.info/>` syntax.
+    For `Gherkin <http://github.com/aslakhellesoy/gherkin/>` syntax.
 
     *New in Pygments 1.2.*
     """
@@ -2104,82 +2106,99 @@
     filenames = ['*.feature']
     mimetypes = ['text/x-gherkin']
 
-    feature_keywords_regexp  = ur'^(기능|機能|功能|フィーチャ|خاصية|תכונה|Функционалност|Функционал|Особина|Могућност|Özellik|Właściwość|Tính năng|Savybė|Požiadavka|Požadavek|Osobina|Ominaisuus|Omadus|OH HAI|Mogućnost|Mogucnost|Jellemző|Fīča|Funzionalità|Funktionalität|Funkcionalnost|Funkcionalitāte|Funcționalitate|Functionaliteit|Functionalitate|Funcionalidade|Fonctionnalité|Fitur|Feature|Egenskap|Egenskab|Crikey|Característica|Arwedd)(:)(.*)$'
-    scenario_keywords_regexp = ur'^(\s*)(시나리오 개요|시나리오|배경|背景|場景大綱|場景|场景大纲|场景|劇本大綱|劇本|テンプレ|シナリオテンプレート|シナリオテンプレ|シナリオアウトライン|シナリオ|سيناريو مخطط|سيناريو|الخلفية|תרחיש|תבנית תרחיש|רקע|Тарих|Сценарио|Сценарий структураси|Сценарий|Структура сценарија|Структура сценария|Скица|Рамка на сценарий|Пример|Предыстория|Предистория|Позадина|Основа|Концепт|Контекст|Założenia|Tình huống|Tausta|Taust|Tapausaihio|Tapaus|Szenariogrundriss|Szenario|Szablon scenariusza|Stsenaarium|Struktura scenarija|Skica|Skenario konsep|Skenario|Situācija|Senaryo taslağı|Senaryo|Scénář|Scénario|Schema dello scenario|Scenārijs pēc parauga|Scenārijs|Scenár|Scenariusz|Scenariul de şablon|Scenariul de sablon|Scenariu|Scenario Outline|Scenario Amlinellol|Scenario|Scenarijus|Scenarijaus šablonas|Scenarij|Scenarie|Rerefons|Raamstsenaarium|Primer|Pozadí|Pozadina|Pozadie|Plan du scénario|Plan du Scénario|Osnova scénáře|Osnova|Náčrt Scénáře|Náčrt Scenáru|Mate|MISHUN SRSLY|MISHUN|Kịch bản|Kontext|Konteksts|Kontekstas|Kontekst|Koncept|Khung tình huống|Khung kịch bản|Háttér|Grundlage|Geçmiş|Forgatókönyv vázlat|Forgatókönyv|Esquema do Cenário|Esquema do Cenario|Esquema del escenario|Esquema de l\'escenari|Escenario|Escenari|Dasar|Contexto|Contexte|Contesto|Condiţii|Conditii|Cenário|Cenario|Cefndir|Bối cảnh|Blokes|Bakgrunn|Bakgrund|Baggrund|Background|B4|Antecedents|Antecedentes|All y\'all|Achtergrond|Abstrakt Scenario|Abstract Scenario)(:)(.*)$'
-    examples_regexp          = ur'^(\s*)(예|例子|例|サンプル|امثلة|דוגמאות|Сценарији|Примери|Мисоллар|Значения|Örnekler|Voorbeelden|Variantai|Tapaukset|Scenarios|Scenariji|Scenarijai|Příklady|Példák|Príklady|Przykłady|Primjeri|Primeri|Piemēri|Pavyzdžiai|Paraugs|Juhtumid|Exemplos|Exemples|Exemplele|Exempel|Examples|Esempi|Enghreifftiau|Eksempler|Ejemplos|EXAMPLZ|Dữ liệu|Contoh|Cobber|Beispiele)(:)(.*)$'
-    step_keywords_regexp     = ur'^(\s*)(하지만|조건|만일|그리고|그러면|那麼|那么|而且|當|当|前提|假設|假如|但是|但し|並且|もし|ならば|ただし|しかし|かつ|و |متى |لكن |عندما |ثم |بفرض |اذاً |כאשר |וגם |בהינתן |אזי |אז |אבל |Унда |То |Онда |Но |Лекин |Когато |Када |Кад |К тому же |И |Задато |Задати |Задате |Если |Допустим |Дадено |Ва |Бирок |Аммо |Али |Агар |А |Și |És |anrhegedig a |Zatati |Zakładając |Zadato |Zadate |Zadano |Zadani |Zadan |Yna |Ya know how |Ya gotta |Y |Wtedy |When y\'all |When |Wenn |WEN |Và |Ve |Und |Un |Thì |Then y\'all |Then |Tapi |Tak |Tada |Tad |Så |Stel |Soit |Siis |Si |Quando |Quand |Quan |Pryd |Pokud |Pokiaľ |Però |Pero |Pak |Oraz |Onda |Ond |Oletetaan |Og |Och |O zaman |Når |När |Niin |Nhưng |N |Mutta |Men |Mas |Maka |Majd |Mais |Maar |Ma |Lorsque |Lorsqu\'|Kun |Kuid |Kui |Khi |Keď |Ketika |Když |Kai |Kada |Kad |Jeżeli |Ja |Ir |I CAN HAZ |I |Ha |Givet |Given y\'all |Given |Gitt |Gegeven |Gegeben sei |Fakat |Eğer ki |Etant donné |Et |Então |Entonces |Entao |En |Eeldades |E |Duota |Donat |Donada |Diyelim ki |Dengan |De |Dato |Dar |Dann |Dan |Dado |Dacă |Daca |DEN |Când |Cuando |Cho |Cept |Cand |But y\'all |But |Biết |Bet |BUT |Atunci |And y\'all |And |Ama |Als |Alors |Allora |Ali |Aleshores |Ale |Akkor |Aber |AN |A také |A |\* )'
+    feature_keywords         = ur'^(기능|機能|功能|フィーチャ|خاصية|תכונה|Функціонал|Функционалност|Функционал|Фича|Особина|Могућност|Özellik|Właściwość|Tính năng|Trajto|Savybė|Požiadavka|Požadavek|Osobina|Ominaisuus|Omadus|OH HAI|Mogućnost|Mogucnost|Jellemző|Fīča|Funzionalità|Funktionalität|Funkcionalnost|Funkcionalitāte|Funcționalitate|Functionaliteit|Functionalitate|Funcionalitat|Funcionalidade|Fonctionnalité|Fitur|Feature|Egenskap|Egenskab|Crikey|Característica|Arwedd)(:)(.*)$'
+    feature_element_keywords = ur'^(\s*)(시나리오 개요|시나리오|배경|背景|場景大綱|場景|场景大纲|场景|劇本大綱|劇本|テンプレ|シナリオテンプレート|シナリオテンプレ|シナリオアウトライン|シナリオ|سيناريو مخطط|سيناريو|الخلفية|תרחיש|תבנית תרחיש|רקע|Тарих|Сценарій|Сценарио|Сценарий структураси|Сценарий|Структура сценарію|Структура сценарија|Структура сценария|Скица|Рамка на сценарий|Пример|Предыстория|Предистория|Позадина|Передумова|Основа|Концепт|Контекст|Założenia|Wharrimean is|Tình huống|The thing of it is|Tausta|Taust|Tapausaihio|Tapaus|Szenariogrundriss|Szenario|Szablon scenariusza|Stsenaarium|Struktura scenarija|Skica|Skenario konsep|Skenario|Situācija|Senaryo taslağı|Senaryo|Scénář|Scénario|Schema dello scenario|Scenārijs pēc parauga|Scenārijs|Scenár|Scenaro|Scenariusz|Scenariul de şablon|Scenariul de sablon|Scenariu|Scenario Outline|Scenario Amlinellol|Scenario|Scenarijus|Scenarijaus šablonas|Scenarij|Scenarie|Rerefons|Raamstsenaarium|Primer|Pozadí|Pozadina|Pozadie|Plan du scénario|Plan du Scénario|Osnova scénáře|Osnova|Náčrt Scénáře|Náčrt Scenáru|Mate|MISHUN SRSLY|MISHUN|Kịch bản|Konturo de la scenaro|Kontext|Konteksts|Kontekstas|Kontekst|Koncept|Khung tình huống|Khung kịch bản|Háttér|Grundlage|Geçmiş|Forgatókönyv vázlat|Forgatókönyv|Fono|Esquema do Cenário|Esquema do Cenario|Esquema del escenario|Esquema de l\'escenari|Escenario|Escenari|Dis is what went down|Dasar|Contexto|Contexte|Contesto|Condiţii|Conditii|Cenário|Cenario|Cefndir|Bối cảnh|Blokes|Bakgrunn|Bakgrund|Baggrund|Background|B4|Antecedents|Antecedentes|All y\'all|Achtergrond|Abstrakt Scenario|Abstract Scenario)(:)(.*)$'
+    examples_keywords        = ur'^(\s*)(예|例子|例|サンプル|امثلة|דוגמאות|Сценарији|Примери|Приклади|Мисоллар|Значения|Örnekler|Voorbeelden|Variantai|Tapaukset|Scenarios|Scenariji|Scenarijai|Příklady|Példák|Príklady|Przykłady|Primjeri|Primeri|Piemēri|Pavyzdžiai|Paraugs|Juhtumid|Exemplos|Exemples|Exemplele|Exempel|Examples|Esempi|Enghreifftiau|Ekzemploj|Eksempler|Ejemplos|EXAMPLZ|Dữ liệu|Contoh|Cobber|Beispiele)(:)(.*)$'
+    step_keywords            = ur'^(\s*)(하지만|조건|먼저|만일|만약|단|그리고|그러면|那麼|那么|而且|當|当|前提|假設|假如|但是|但し|並且|もし|ならば|ただし|しかし|かつ|و |متى |لكن |عندما |ثم |بفرض |اذاً |כאשר |וגם |בהינתן |אזי |אז |אבל |Якщо |Унда |То |Припустимо, що |Припустимо |Онда |Но |Нехай |Лекин |Когато |Када |Кад |К тому же |И |Задато |Задати |Задате |Если |Допустим |Дадено |Ва |Бирок |Аммо |Али |Але |Агар |А |І |Și |És |Zatati |Zakładając |Zadato |Zadate |Zadano |Zadani |Zadan |Youse know when youse got |Youse know like when |Yna |Ya know how |Ya gotta |Y |Wun |Wtedy |When y\'all |When |Wenn |WEN |Và |Ve |Und |Un |Thì |Then y\'all |Then |Tapi |Tak |Tada |Tad |Så |Stel |Soit |Siis |Si |Sed |Se |Quando |Quand |Quan |Pryd |Pokud |Pokiaľ |Però |Pero |Pak |Oraz |Onda |Ond |Oletetaan |Og |Och |O zaman |Når |När |Niin |Nhưng |N |Mutta |Men |Mas |Maka |Majd |Mais |Maar |Ma |Lorsque |Lorsqu\'|Kun |Kuid |Kui |Khi |Keď |Ketika |Když |Kaj |Kai |Kada |Kad |Jeżeli |Ja |Ir |I CAN HAZ |I |Ha |Givun |Givet |Given y\'all |Given |Gitt |Gegeven |Gegeben sei |Fakat |Eğer ki |Etant donné |Et |Então |Entonces |Entao |En |Eeldades |E |Duota |Dun |Donitaĵo |Donat |Donada |Do |Diyelim ki |Dengan |Den youse gotta |De |Dato |Dar |Dann |Dan |Dado |Dacă |Daca |DEN |Când |Cuando |Cho |Cept |Cand |Cal |But y\'all |But |Buh |Biết |Bet |BUT |Atès |Atunci |Atesa |Anrhegedig a |Angenommen |And y\'all |And |An |Ama |Als |Alors |Allora |Ali |Aleshores |Ale |Akkor |Aber |AN |A také |A |\* )'
 
     tokens = {
         'comments': [
-            (r'#.*$', Comment)
+            (r'#.*$', Comment),
           ],
-        'multiline_descriptions' : [
-            (step_keywords_regexp, Keyword, "#pop"),
-            include('comments'),
-            (r"(\s|.)", Name.Constant)
-          ],
-        'multiline_descriptions_on_stack' : [
-            (step_keywords_regexp, Keyword, "#pop:2"),
+        'feature_elements' : [
+            (step_keywords, Keyword, "step_content_stack"),
             include('comments'),
-            (r"(\s|.)", Name.Constant)
+            (r"(\s|.)", Name.Function),
           ],
-        'scenario_table_description': [
-            (r"\s+\|", Text, 'scenario_table_header'),
+        'feature_elements_on_stack' : [
+            (step_keywords, Keyword, "#pop:2"),
             include('comments'),
-            (r"(\s|.)", Name.Constant)
+            (r"(\s|.)", Name.Function),
           ],
-        'scenario_table_header': [
-            (r"\s+\|\s*$", Text, "#pop:2"),
-            (r"(\s+\|\s*)(#.*)$", bygroups(Text, Comment), "#pop:2"),
+        'examples_table': [
+            (r"\s+\|", Keyword, 'examples_table_header'),
             include('comments'),
-            (r"\s+\|", Text),
-            (r"[^\|]", Name.Variable)
+            (r"(\s|.)", Name.Function),
+          ],
+        'examples_table_header': [
+            (r"\s+\|\s*$", Keyword, "#pop:2"),
+            include('comments'),
+            (r"\s*\|", Keyword),
+            (r"[^\|]", Name.Variable),
           ],
         'scenario_sections_on_stack': [
-            (scenario_keywords_regexp,
-             bygroups(Text, Name.Class, Name.Class, Name.Constant),
-             "multiline_descriptions_on_stack")
-            ],
+            (feature_element_keywords, bygroups(Name.Function, Keyword, Keyword, Name.Function), "feature_elements_on_stack"),
+          ],
         'narrative': [
             include('scenario_sections_on_stack'),
-            (r"(\s|.)", Name.Builtin)
+            (r"(\s|.)", Name.Function),
           ],
         'table_vars': [
-            (r'(<[^>]*>)', bygroups(Name.Variable))
+            (r'(<[^>]+>)', Name.Variable),
+          ],
+        'numbers': [
+            (r'(\d+\.?\d*|\d*\.\d+)([eE][+-]?[0-9]+)?', String),
           ],
         'string': [
             include('table_vars'),
             (r'(\s|.)', String),
           ],
         'py_string': [
-            (r'"""', String, "#pop"),
+            (r'"""', Keyword, "#pop"),
+            include('string'),
+          ],
+          'step_content_root':[
+            (r"$", Keyword, "#pop"),
+            include('step_content'),
+          ],
+          'step_content_stack':[
+            (r"$", Keyword, "#pop:2"),
+            include('step_content'),
+          ],
+          'step_content':[
+            (r'"', Name.Function, "double_string"),
+            include('table_vars'),
+            include('numbers'),
+            include('comments'),
+            (r'(\s|.)', Name.Function),
+          ],
+          'table_content': [
+            (r"\s+\|\s*$", Keyword, "#pop"),
+            include('comments'),
+            (r"\s*\|", Keyword),
             include('string'),
           ],
         'double_string': [
-            (r'"', String, "#pop"),
+            (r'"', Name.Function, "#pop"),
             include('string'),
           ],
         'root': [
-            (r'\n', Text),
+            (r'\n', Name.Function),
             include('comments'),
-            (r'"""', String, "py_string"),
-            (r'"', String, "double_string"),
+            (r'"""', Keyword, "py_string"),
+            (r'\s+\|', Keyword, 'table_content'),
+            (r'"', Name.Function, "double_string"),
             include('table_vars'),
-            (r'@[^@\s]+', Name.Namespace),
-            (step_keywords_regexp, bygroups(Text, Keyword)),
-            (feature_keywords_regexp,
-             bygroups(Name.Class, Name.Class, Name.Constant), 'narrative'),
-            (scenario_keywords_regexp,
-             bygroups(Text, Name.Class, Name.Class, Name.Constant),
-             "multiline_descriptions"),
-            (examples_regexp,
-             bygroups(Text, Name.Class, Name.Class, Name.Constant),
-             "scenario_table_description"),
-            (r'(\s|.)', Text)
+            include('numbers'),
+            (r'(\s*)(@[^@\r\n\t ]+)', bygroups(Name.Function, Name.Tag)),
+            (step_keywords, bygroups(Name.Function, Keyword), "step_content_root"),
+            (feature_keywords, bygroups(Keyword, Keyword, Name.Function), 'narrative'),
+            (feature_element_keywords, bygroups(Name.Function, Keyword, Keyword, Name.Function), "feature_elements"),
+            (examples_keywords, bygroups(Name.Function, Keyword, Keyword, Name.Function), "examples_table"),
+            (r'(\s|.)', Name.Function),
         ]
     }
 
-
 class AsymptoteLexer(RegexLexer):
     """
     For `Asymptote <http://asymptote.sf.net/>`_ source code.
@@ -2295,3 +2314,530 @@
            elif token is Name and value in ASYVARNAME:
                token = Name.Variable
            yield index, token, value
+
+
+class PostScriptLexer(RegexLexer):
+    """
+    Lexer for PostScript files.
+
+    The PostScript Language Reference published by Adobe at
+    <http://partners.adobe.com/public/developer/en/ps/PLRM.pdf>
+    is the authority for this.
+
+    *New in Pygments 1.4.*
+    """
+    name = 'PostScript'
+    aliases = ['postscript']
+    filenames = ['*.ps', '*.eps']
+    mimetypes = ['application/postscript']
+
+    delimiter = r'\(\)\<\>\[\]\{\}\/\%\s'
+    delimiter_end = r'(?=[%s])' % delimiter
+
+    valid_name_chars = r'[^%s]' % delimiter
+    valid_name = r"%s+%s" % (valid_name_chars, delimiter_end)
+
+    tokens = {
+        'root': [
+            # All comment types
+            (r'^%!.+\n', Comment.Preproc),
+            (r'%%.*\n', Comment.Special),
+            (r'(^%.*\n){2,}', Comment.Multiline),
+            (r'%.*\n', Comment.Single),
+
+            # String literals are awkward; enter separate state.
+            (r'\(', String, 'stringliteral'),
+
+            (r'[\{\}(\<\<)(\>\>)\[\]]', Punctuation),
+
+            # Numbers
+            (r'<[0-9A-Fa-f]+>' + delimiter_end, Number.Hex),
+            # Slight abuse: use Oct to signify any explicit base system
+            (r'[0-9]+\#(\-|\+)?([0-9]+\.?|[0-9]*\.[0-9]+|[0-9]+\.[0-9]*)'
+             r'((e|E)[0-9]+)?' + delimiter_end, Number.Oct),
+            (r'(\-|\+)?([0-9]+\.?|[0-9]*\.[0-9]+|[0-9]+\.[0-9]*)((e|E)[0-9]+)?'
+             + delimiter_end, Number.Float),
+            (r'(\-|\+)?[0-9]+' + delimiter_end, Number.Integer),
+
+            # References
+            (r'\/%s' % valid_name, Name.Variable),
+
+            # Names
+            (valid_name, Name.Function),      # Anything else is executed
+
+            # These keywords taken from
+            # <http://www.math.ubc.ca/~cass/graphics/manual/pdf/a1.pdf>
+            # Is there an authoritative list anywhere that doesn't involve
+            # trawling documentation?
+
+            (r'(false|true)' + delimiter_end, Keyword.Constant),
+
+            # Conditionals / flow control
+            (r'(eq|ne|ge|gt|le|lt|and|or|not|if|ifelse|for|forall)'
+             + delimiter_end, Keyword.Reserved),
+
+            ('(abs|add|aload|arc|arcn|array|atan|begin|bind|ceiling|charpath|'
+             'clip|closepath|concat|concatmatrix|copy|cos|currentlinewidth|'
+             'currentmatrix|currentpoint|curveto|cvi|cvs|def|defaultmatrix|'
+             'dict|dictstackoverflow|div|dtransform|dup|end|exch|exec|exit|exp|'
+             'fill|findfont|floor|get|getinterval|grestore|gsave|gt|'
+             'identmatrix|idiv|idtransform|index|invertmatrix|itransform|'
+             'length|lineto|ln|load|log|loop|matrix|mod|moveto|mul|neg|newpath|'
+             'pathforall|pathbbox|pop|print|pstack|put|quit|rand|rangecheck|'
+             'rcurveto|repeat|restore|rlineto|rmoveto|roll|rotate|round|run|'
+             'save|scale|scalefont|setdash|setfont|setgray|setlinecap|'
+             'setlinejoin|setlinewidth|setmatrix|setrgbcolor|shfill|show|'
+             'showpage|sin|sqrt|stack|stringwidth|stroke|strokepath|sub|'
+             'syntaxerror|transform|translate|truncate|typecheck|undefined|'
+             'undefinedfilename|undefinedresult)' + delimiter_end,
+             Name.Builtin),
+
+            (r'\s+', Text),
+        ],
+
+        'stringliteral': [
+            (r'[^\(\)\\]+', String),
+            (r'\\', String.Escape, 'escape'),
+            (r'\(', String, '#push'),
+            (r'\)', String, '#pop'),
+        ],
+
+        'escape': [
+            (r'([0-8]{3}|n|r|t|b|f|\\|\(|\)|)', String.Escape, '#pop'),
+        ],
+    }
+
+
+class AutohotkeyLexer(RegexLexer):
+    """
+    For `autohotkey <http://www.autohotkey.com/>`_ source code.
+
+    *New in Pygments 1.4.*
+    """
+    name = 'autohotkey'
+    aliases = ['ahk']
+    filenames = ['*.ahk', '*.ahkl']
+    mimetypes = ['text/x-autohotkey']
+
+    flags = re.IGNORECASE | re.DOTALL | re.MULTILINE
+
+    tokens = {
+        'root': [
+            include('whitespace'),
+            (r'^\(', String, 'continuation'),
+            include('comments'),
+            (r'(^\s*)(\w+)(\s*)(=)',
+             bygroups(Text.Whitespace, Name, Text.Whitespace, Operator),
+             'command'),
+            (r'([\w#@$?\[\]]+)(\s*)(\()',
+             bygroups(Name.Function, Text.Whitespace, Punctuation),
+             'parameters'),
+            include('directives'),
+            include('labels'),
+            include('commands'),
+            include('expressions'),
+            include('numbers'),
+            include('literals'),
+            include('keynames'),
+            include('keywords'),
+        ],
+        'command': [
+            include('comments'),
+            include('whitespace'),
+            (r'^\(', String, 'continuation'),
+            (r'[^\n]*?(?=;*|$)', String, '#pop'),
+            include('numbers'),
+            include('literals'),
+        ],
+
+        'expressions': [
+            include('comments'),
+            include('whitespace'),
+            include('numbers'),
+            include('literals'),
+            (r'([]\w#@$?[]+)(\s*)(\()',
+             bygroups(Name.Function, Text.Whitespace, Punctuation),
+             'parameters'),
+            (r'A_\w+', Name.Builtin),
+            (r'%[]\w#@$?[]+?%', Name.Variable),
+            # blocks: if, else, function definitions
+            (r'{', Punctuation, 'block'),
+            # parameters in function calls
+            ],
+        'literals': [
+            (r'"', String, 'string'),
+            (r'A_\w+', Name.Builtin),
+            (r'%[]\w#@$?[]+?%', Name.Variable),
+            (r'[-~!%^&*+|?:<>/=]=?', Operator, 'expressions'),
+            (r'==', Operator, 'expressions'),
+            ('[{()},.%#`;]', Punctuation),
+            (r'\\', Punctuation),
+            include('keywords'),
+            (r'\w+', Text),
+            ],
+        'string': [
+            (r'"', String, '#pop'),
+            (r'""|`.', String.Escape),
+            (r'[^\`"\n]+', String), # all other characters
+        ],
+        'block': [
+            include('root'),
+            ('{', Punctuation, '#push'),
+            ('}', Punctuation, '#pop'),
+        ],
+        'parameters': [
+            (r'\)', Punctuation, '#pop'),
+            (r'\(', Punctuation, '#push'),
+            include('numbers'),
+            include('literals'),
+            include('whitespace'),
+        ],
+        'keywords': [
+            (r'(static|global|local)\b', Keyword.Type),
+            (r'(if|else|and|or)\b', Keyword.Reserved),
+            ],
+        'directives': [
+            (r'#\w+?\s', Keyword),
+            ],
+        'labels': [
+            # hotkeys and labels
+            # technically, hotkey names are limited to named keys and buttons
+            (r'(^\s*)([^:\s]+?:{1,2})', bygroups(Text.Whitespace, Name.Label)),
+             # hotstrings
+            (r'(^\s*)(::[]\w#@$?[]+?::)', bygroups(Text.Whitespace, Name.Label)),
+            ],
+        'comments': [
+            (r'^;+.*?$', Comment.Single),  # beginning of line comments
+            (r'(?<=\s);+.*?$', Comment.Single),    # end of line comments
+            (r'^/\*.*?\n\*/', Comment.Multiline),
+            (r'(?<!\n)/\*.*?\n\*/', Error),  # must be at start of line
+            ],
+        'whitespace': [
+            (r'[ \t]+', Text.Whitespace),
+            ],
+        'numbers': [
+            (r'(\d+\.\d*|\d*\.\d+)([eE][+-]?[0-9]+)?', Number.Float),
+            (r'\d+[eE][+-]?[0-9]+', Number.Float),
+            (r'0[0-7]+', Number.Oct),
+            (r'0[xX][a-fA-F0-9]+', Number.Hex),
+            (r'\d+L', Number.Integer.Long),
+            (r'\d+', Number.Integer)
+        ],
+        'continuation': [
+            (r'\n\)', Punctuation, '#pop'),
+            (r'\s[^\n\)]+', String),
+        ],
+        'keynames': [
+            (r'\[[^\]]+\]', Keyword, 'keynames')
+        ],
+        'commands': [
+            (r'(autotrim|blockinput|break|click|'
+             r'clipwait|continue|control|'
+             r'controlclick|controlfocus|controlget|'
+             r'controlgetfocus|controlgetpos|controlgettext|'
+             r'controlmove|controlsend|controlsendraw|'
+             r'controlsettext|coordmode|critical|'
+             r'detecthiddentext|detecthiddenwindows|'
+             r'dllcall|drive|'
+             r'driveget|drivespacefree|'
+             r'else|envadd|envdiv|'
+             r'envget|envmult|envset|'
+             r'envsub|envupdate|exit|'
+             r'exitapp|fileappend|filecopy|'
+             r'filecopydir|filecreatedir|filecreateshortcut|'
+             r'filedelete|filegetattrib|filegetshortcut|'
+             r'filegetsize|filegettime|filegetversion|'
+             r'fileinstall|filemove|filemovedir|'
+             r'fileread|filereadline|filerecycle|'
+             r'filerecycleempty|fileremovedir|fileselectfile|'
+             r'fileselectfolder|filesetattrib|filesettime|'
+             r'formattime|gosub|'
+             r'goto|groupactivate|groupadd|'
+             r'groupclose|groupdeactivate|gui|'
+             r'guicontrol|guicontrolget|hotkey|'
+             r'ifexist|ifgreater|ifgreaterorequal|'
+             r'ifinstring|ifless|iflessorequal|'
+             r'ifmsgbox|ifnotequal|ifnotexist|'
+             r'ifnotinstring|ifwinactive|ifwinexist|'
+             r'ifwinnotactive|ifwinnotexist|imagesearch|'
+             r'inidelete|iniread|iniwrite|'
+             r'input|inputbox|keyhistory|'
+             r'keywait|listhotkeys|listlines|'
+             r'listvars|loop|'
+             r'menu|mouseclick|mouseclickdrag|'
+             r'mousegetpos|mousemove|msgbox|'
+             r'onmessage|onexit|outputdebug|'
+             r'pixelgetcolor|pixelsearch|postmessage|'
+             r'process|progress|random|'
+             r'regexmatch|regexreplace|registercallback|'
+             r'regdelete|regread|regwrite|'
+             r'reload|repeat|return|'
+             r'run|runas|runwait|'
+             r'send|sendevent|sendinput|'
+             r'sendmessage|sendmode|sendplay|'
+             r'sendraw|setbatchlines|setcapslockstate|'
+             r'setcontroldelay|setdefaultmousespeed|setenv|'
+             r'setformat|setkeydelay|setmousedelay|'
+             r'setnumlockstate|setscrolllockstate|'
+             r'setstorecapslockmode|'
+             r'settimer|settitlematchmode|setwindelay|'
+             r'setworkingdir|shutdown|sleep|'
+             r'sort|soundbeep|soundget|'
+             r'soundgetwavevolume|soundplay|soundset|'
+             r'soundsetwavevolume|splashimage|splashtextoff|'
+             r'splashtexton|splitpath|statusbargettext|'
+             r'statusbarwait|stringcasesense|stringgetpos|'
+             r'stringleft|stringlen|stringlower|'
+             r'stringmid|stringreplace|stringright|'
+             r'stringsplit|stringtrimleft|stringtrimright|'
+             r'stringupper|suspend|sysget|'
+             r'thread|tooltip|transform|'
+             r'traytip|urldownloadtofile|while|'
+             r'varsetcapacity|'
+             r'winactivate|winactivatebottom|winclose|'
+             r'winget|wingetactivestats|wingetactivetitle|'
+             r'wingetclass|wingetpos|wingettext|'
+             r'wingettitle|winhide|winkill|'
+             r'winmaximize|winmenuselectitem|winminimize|'
+             r'winminimizeall|winminimizeallundo|winmove|'
+             r'winrestore|winset|winsettitle|'
+             r'winshow|winwait|winwaitactive|'
+             r'winwaitclose|winwaitnotactive'
+             r'true|false|NULL)\b', Keyword, 'command'),
+            ],
+
+        }
+
+class MaqlLexer(RegexLexer):
+    """
+    Lexer for `GoodData MAQL <https://secure.gooddata.com/docs/html/advanced.metric.tutorial.html>`_
+    scripts.
+
+    *New in Pygments 1.4.*
+    """
+
+    name = 'MAQL'
+    aliases = ['maql']
+    filenames = ['*.maql']
+    mimetypes = ['text/x-gooddata-maql','application/x-gooddata-maql']
+
+    flags = re.IGNORECASE
+    tokens = {
+        'root': [
+            # IDENTITY
+            (r'IDENTIFIER\b', Name.Builtin),
+            # IDENTIFIER
+            (r'\{[^}]+\}', Name.Variable),
+            # NUMBER
+            (r'[0-9]+(?:\.[0-9]+)?(?:[eE][+-]?[0-9]{1,3})?', Literal.Number),
+            # STRING
+            (r'"', Literal.String, 'string-literal'),
+            #  RELATION
+            (r'\<\>|\!\=', Operator),
+            (r'\=|\>\=|\>|\<\=|\<', Operator),
+            # :=
+            (r'\:\=', Operator),
+            # OBJECT
+            (r'\[[^]]+\]', Name.Variable.Class),
+            # keywords
+            (r'(DIMENSIONS?|BOTTOM|METRIC|COUNT|OTHER|FACT|WITH|TOP|OR|'
+             r'ATTRIBUTE|CREATE|PARENT|FALSE|ROWS?|FROM|ALL|AS|PF|'
+             r'COLUMNS?|DEFINE|REPORT|LIMIT|TABLE|LIKE|AND|BY|'
+             r'BETWEEN|EXCEPT|SELECT|MATCH|WHERE|TRUE|FOR|IN|'
+             r'WITHOUT|FILTER|ALIAS|ORDER|FACT|WHEN|NOT|ON|'
+             r'KEYS|KEY|FULLSET|PRIMARY|LABELS|LABEL|VISUAL|'
+             r'TITLE|DESCRIPTION|FOLDER|ALTER|DROP|ADD|DATASET|'
+             r'DATATYPE|INT|BIGINT|DOUBLE|DATE|VARCHAR|DECIMAL|'
+             r'SYNCHRONIZE|TYPE|DEFAULT|ORDER|ASC|DESC|HYPERLINK|'
+             r'INCLUDE|TEMPLATE|MODIFY)\b', Keyword),
+            # FUNCNAME
+            (r'[a-zA-Z]\w*\b', Name.Function),
+            # Comments
+            (r'#.*', Comment.Single),
+            # Punctuation
+            (r'[,;\(\)]', Token.Punctuation),
+            # Space is not significant
+            (r'\s+', Text)
+        ],
+        'string-literal': [
+            (r'\\[tnrfbae"\\]', String.Escape),
+            (r'"', Literal.String, '#pop'),
+            (r'[^\\"]+', Literal.String)
+        ],
+    }
+
+
+class GoodDataCLLexer(RegexLexer):
+    """
+    Lexer for `GoodData-CL <http://github.com/gooddata/GoodData-CL/raw/master/cli/src/main/resources/com/gooddata/processor/COMMANDS.txt>`_
+    script files.
+
+    *New in Pygments 1.4.*
+    """
+
+    name = 'GoodData-CL'
+    aliases = ['gooddata-cl']
+    filenames = ['*.gdc']
+    mimetypes = ['text/x-gooddata-cl']
+
+    flags = re.IGNORECASE
+    tokens = {
+        'root': [
+            # Comments
+            (r'#.*', Comment.Single),
+            # Function call
+            (r'[a-zA-Z]\w*', Name.Function),
+            # Argument list
+            (r'\(', Token.Punctuation, 'args-list'),
+            # Punctuation
+            (r';', Token.Punctuation),
+            # Space is not significant
+            (r'\s+', Text)
+        ],
+        'args-list': [
+            (r'\)', Token.Punctuation, '#pop'),
+            (r',', Token.Punctuation),
+            (r'[a-zA-Z]\w*', Name.Variable),
+            (r'=', Operator),
+            (r'"', Literal.String, 'string-literal'),
+            (r'[0-9]+(?:\.[0-9]+)?(?:[eE][+-]?[0-9]{1,3})?', Literal.Number),
+            # Space is not significant
+            (r'\s', Text)
+        ],
+        'string-literal': [
+            (r'\\[tnrfbae"\\]', String.Escape),
+            (r'"', Literal.String, '#pop'),
+            (r'[^\\"]+', Literal.String)
+        ]
+    }
+
+
+class ProtoBufLexer(RegexLexer):
+    """
+    Lexer for `Protocol Buffer <http://code.google.com/p/protobuf/>`_
+    definition files.
+
+    *New in Pygments 1.4.*
+    """
+
+    name = 'Protocol Buffer'
+    aliases = ['protobuf']
+    filenames = ['*.proto']
+
+    tokens = {
+        'root': [
+            (r'[ \t]+', Text),
+            (r'[,;{}\[\]\(\)]', Punctuation),
+            (r'/(\\\n)?/(\n|(.|\n)*?[^\\]\n)', Comment.Single),
+            (r'/(\\\n)?[*](.|\n)*?[*](\\\n)?/', Comment.Multiline),
+            (r'\b(import|option|optional|required|repeated|default|packed|'
+             r'ctype|extensions|to|max|rpc|returns)\b', Keyword),
+            (r'(int32|int64|uint32|uint64|sint32|sint64|'
+             r'fixed32|fixed64|sfixed32|sfixed64|'
+             r'float|double|bool|string|bytes)\b', Keyword.Type),
+            (r'(true|false)\b', Keyword.Constant),
+            (r'(package)(\s+)', bygroups(Keyword.Namespace, Text), 'package'),
+            (r'(message|extend)(\s+)',
+             bygroups(Keyword.Declaration, Text), 'message'),
+            (r'(enum|group|service)(\s+)',
+             bygroups(Keyword.Declaration, Text), 'type'),
+            (r'\".*\"', String),
+            (r'(\d+\.\d*|\.\d+|\d+)[eE][+-]?\d+[LlUu]*', Number.Float),
+            (r'(\d+\.\d*|\.\d+|\d+[fF])[fF]?', Number.Float),
+            (r'(\-?(inf|nan))', Number.Float),
+            (r'0x[0-9a-fA-F]+[LlUu]*', Number.Hex),
+            (r'0[0-7]+[LlUu]*', Number.Oct),
+            (r'\d+[LlUu]*', Number.Integer),
+            (r'[+-=]', Operator),
+            (r'([a-zA-Z_][a-zA-Z0-9_\.]*)([ \t]*)(=)',
+             bygroups(Name.Attribute, Text, Operator)),
+            ('[a-zA-Z_][a-zA-Z0-9_\.]*', Name),
+        ],
+        'package': [
+            (r'[a-zA-Z_][a-zA-Z0-9_]*', Name.Namespace, '#pop')
+        ],
+        'message': [
+            (r'[a-zA-Z_][a-zA-Z0-9_]*', Name.Class, '#pop')
+        ],
+        'type': [
+            (r'[a-zA-Z_][a-zA-Z0-9_]*', Name, '#pop')
+        ],
+    }
+
+
+class HybrisLexer(RegexLexer):
+    """
+    For `Hybris <http://www.hybris-lang.org>`_ source code.
+
+    *New in Pygments 1.4.*
+    """
+
+    name = 'Hybris'
+    aliases = ['hybris', 'hy']
+    filenames = ['*.hy', '*.hyb']
+    mimetypes = ['text/x-hybris', 'application/x-hybris']
+
+    flags = re.MULTILINE | re.DOTALL
+
+    tokens = {
+        'root': [
+            # method names
+            (r'^(\s*(?:function|method|operator\s+)+?)'
+             r'([a-zA-Z_][a-zA-Z0-9_]*)'
+             r'(\s*)(\()', bygroups(Name.Function, Text, Operator)),
+            (r'[^\S\n]+', Text),
+            (r'//.*?\n', Comment.Single),
+            (r'/\*.*?\*/', Comment.Multiline),
+            (r'@[a-zA-Z_][a-zA-Z0-9_\.]*', Name.Decorator),
+            (r'(break|case|catch|next|default|do|else|finally|for|foreach|of|'
+             r'unless|if|new|return|switch|me|throw|try|while)\b', Keyword),
+            (r'(extends|private|protected|public|static|throws|function|method|'
+             r'operator)\b', Keyword.Declaration),
+            (r'(true|false|null|__FILE__|__LINE__|__VERSION__|__LIB_PATH__|'
+             r'__INC_PATH__)\b', Keyword.Constant),
+            (r'(class|struct)(\s+)',
+             bygroups(Keyword.Declaration, Text), 'class'),
+            (r'(import|include)(\s+)',
+             bygroups(Keyword.Namespace, Text), 'import'),
+            (r'(gc_collect|gc_mm_items|gc_mm_usage|gc_collect_threshold|'
+             r'urlencode|urldecode|base64encode|base64decode|sha1|crc32|sha2|'
+             r'md5|md5_file|acos|asin|atan|atan2|ceil|cos|cosh|exp|fabs|floor|'
+             r'fmod|log|log10|pow|sin|sinh|sqrt|tan|tanh|isint|isfloat|ischar|'
+             r'isstring|isarray|ismap|isalias|typeof|sizeof|toint|tostring|'
+             r'fromxml|toxml|binary|pack|load|eval|var_names|var_values|'
+             r'user_functions|dyn_functions|methods|call|call_method|mknod|'
+             r'mkfifo|mount|umount2|umount|ticks|usleep|sleep|time|strtime|'
+             r'strdate|dllopen|dlllink|dllcall|dllcall_argv|dllclose|env|exec|'
+             r'fork|getpid|wait|popen|pclose|exit|kill|pthread_create|'
+             r'pthread_create_argv|pthread_exit|pthread_join|pthread_kill|'
+             r'smtp_send|http_get|http_post|http_download|socket|bind|listen|'
+             r'accept|getsockname|getpeername|settimeout|connect|server|recv|'
+             r'send|close|print|println|printf|input|readline|serial_open|'
+             r'serial_fcntl|serial_get_attr|serial_get_ispeed|serial_get_ospeed|'
+             r'serial_set_attr|serial_set_ispeed|serial_set_ospeed|serial_write|'
+             r'serial_read|serial_close|xml_load|xml_parse|fopen|fseek|ftell|'
+             r'fsize|fread|fwrite|fgets|fclose|file|readdir|pcre_replace|size|'
+             r'pop|unmap|has|keys|values|length|find|substr|replace|split|trim|'
+             r'remove|contains|join)\b', Name.Builtin),
+            (r'(MethodReference|Runner|Dll|Thread|Pipe|Process|Runnable|'
+             r'CGI|ClientSocket|Socket|ServerSocket|File|Console|Directory|'
+             r'Exception)\b', Keyword.Type),
+            (r'"(\\\\|\\"|[^"])*"', String),
+            (r"'\\.'|'[^\\]'|'\\u[0-9a-f]{4}'", String.Char),
+            (r'(\.)([a-zA-Z_][a-zA-Z0-9_]*)',
+             bygroups(Operator, Name.Attribute)),
+            (r'[a-zA-Z_][a-zA-Z0-9_]*:', Name.Label),
+            (r'[a-zA-Z_\$][a-zA-Z0-9_]*', Name),
+            (r'[~\^\*!%&\[\]\(\)\{\}<>\|+=:;,./?\-@]+', Operator),
+            (r'[0-9][0-9]*\.[0-9]+([eE][0-9]+)?[fd]?', Number.Float),
+            (r'0x[0-9a-f]+', Number.Hex),
+            (r'[0-9]+L?', Number.Integer),
+            (r'\n', Text),
+        ],
+        'class': [
+            (r'[a-zA-Z_][a-zA-Z0-9_]*', Name.Class, '#pop')
+        ],
+        'import': [
+            (r'[a-zA-Z0-9_.]+\*?', Name.Namespace, '#pop')
+        ],
+    }
--- a/MoinMoin/support/pygments/lexers/templates.py	Thu Dec 01 02:05:15 2011 +0100
+++ b/MoinMoin/support/pygments/lexers/templates.py	Thu Dec 01 02:26:16 2011 +0100
@@ -13,7 +13,7 @@
 
 from pygments.lexers.web import \
      PhpLexer, HtmlLexer, XmlLexer, JavascriptLexer, CssLexer
-from pygments.lexers.agile import PythonLexer
+from pygments.lexers.agile import PythonLexer, PerlLexer
 from pygments.lexers.compiled import JavaLexer
 from pygments.lexer import Lexer, DelegatingLexer, RegexLexer, bygroups, \
      include, using, this
@@ -30,12 +30,14 @@
            'JavascriptDjangoLexer', 'GenshiLexer', 'HtmlGenshiLexer',
            'GenshiTextLexer', 'CssGenshiLexer', 'JavascriptGenshiLexer',
            'MyghtyLexer', 'MyghtyHtmlLexer', 'MyghtyXmlLexer',
-           'MyghtyCssLexer', 'MyghtyJavascriptLexer', 'MakoLexer',
+           'MyghtyCssLexer', 'MyghtyJavascriptLexer', 'MasonLexer', 'MakoLexer',
            'MakoHtmlLexer', 'MakoXmlLexer', 'MakoJavascriptLexer',
            'MakoCssLexer', 'JspLexer', 'CheetahLexer', 'CheetahHtmlLexer',
            'CheetahXmlLexer', 'CheetahJavascriptLexer',
            'EvoqueLexer', 'EvoqueHtmlLexer', 'EvoqueXmlLexer',
-           'ColdfusionLexer', 'ColdfusionHtmlLexer']
+           'ColdfusionLexer', 'ColdfusionHtmlLexer',
+           'VelocityLexer', 'VelocityHtmlLexer', 'VelocityXmlLexer',
+           'SspLexer']
 
 
 class ErbLexer(Lexer):
@@ -188,6 +190,121 @@
         return rv
 
 
+class VelocityLexer(RegexLexer):
+    """
+    Generic `Velocity <http://velocity.apache.org/>`_ template lexer.
+
+    Just highlights velocity directives and variable references, other
+    data is left untouched by the lexer.
+    """
+
+    name = 'Velocity'
+    aliases = ['velocity']
+    filenames = ['*.vm','*.fhtml']
+
+    flags = re.MULTILINE | re.DOTALL
+
+    identifier = r'[a-zA-Z_][a-zA-Z0-9_]*'
+
+    tokens = {
+        'root': [
+            (r'[^{#$]+', Other),
+            (r'(#)(\*.*?\*)(#)',
+             bygroups(Comment.Preproc, Comment, Comment.Preproc)),
+            (r'(##)(.*?$)',
+             bygroups(Comment.Preproc, Comment)),
+            (r'(#\{?)(' + identifier + r')(\}?)(\s?\()',
+             bygroups(Comment.Preproc, Name.Function, Comment.Preproc, Punctuation),
+             'directiveparams'),
+            (r'(#\{?)(' + identifier + r')(\}|\b)',
+             bygroups(Comment.Preproc, Name.Function, Comment.Preproc)),
+            (r'\$\{?', Punctuation, 'variable')
+        ],
+        'variable': [
+            (identifier, Name.Variable),
+            (r'\(', Punctuation, 'funcparams'),
+            (r'(\.)(' + identifier + r')', bygroups(Punctuation, Name.Variable), '#push'),
+            (r'\}', Punctuation, '#pop'),
+            (r'', Other, '#pop')
+        ],
+        'directiveparams': [
+            (r'(&&|\|\||==?|!=?|[-<>+*%&\|\^/])|\b(eq|ne|gt|lt|ge|le|not|in)\b', Operator),
+            (r'\[', Operator, 'rangeoperator'),
+            (r'\b' + identifier + r'\b', Name.Function),
+            include('funcparams')
+        ],
+        'rangeoperator': [
+            (r'\.\.', Operator),
+            include('funcparams'),
+            (r'\]', Operator, '#pop')
+        ],
+        'funcparams': [
+            (r'\$\{?', Punctuation, 'variable'),
+            (r'\s+', Text),
+            (r',', Punctuation),
+            (r'"(\\\\|\\"|[^"])*"', String.Double),
+            (r"'(\\\\|\\'|[^'])*'", String.Single),
+            (r"0[xX][0-9a-fA-F]+[Ll]?", Number),
+            (r"\b[0-9]+\b", Number),
+            (r'(true|false|null)\b', Keyword.Constant),
+            (r'\(', Punctuation, '#push'),
+            (r'\)', Punctuation, '#pop')
+        ]
+    }
+
+    def analyse_text(text):
+        rv = 0.0
+        if re.search(r'#\{?macro\}?\(.*?\).*?#\{?end\}?', text):
+            rv += 0.25
+        if re.search(r'#\{?if\}?\(.+?\).*?#\{?end\}?', text):
+            rv += 0.15
+        if re.search(r'#\{?foreach\}?\(.+?\).*?#\{?end\}?', text):
+            rv += 0.15
+        if re.search(r'\$\{?[a-zA-Z_][a-zA-Z0-9_]*(\([^)]*\))?(\.[a-zA-Z0-9_]+(\([^)]*\))?)*\}?', text):
+            rv += 0.01
+        return rv
+
+
+class VelocityHtmlLexer(DelegatingLexer):
+    """
+    Subclass of the `VelocityLexer` that highlights unlexer data
+    with the `HtmlLexer`.
+
+    """
+
+    name = 'HTML+Velocity'
+    aliases = ['html+velocity']
+    alias_filenames = ['*.html','*.fhtml']
+    mimetypes = ['text/html+velocity']
+
+    def __init__(self, **options):
+        super(VelocityHtmlLexer, self).__init__(HtmlLexer, VelocityLexer,
+                                              **options)
+
+
+class VelocityXmlLexer(DelegatingLexer):
+    """
+    Subclass of the `VelocityLexer` that highlights unlexer data
+    with the `XmlLexer`.
+
+    """
+
+    name = 'XML+Velocity'
+    aliases = ['xml+velocity']
+    alias_filenames = ['*.xml','*.vm']
+    mimetypes = ['application/xml+velocity']
+
+    def __init__(self, **options):
+        super(VelocityXmlLexer, self).__init__(XmlLexer, VelocityLexer,
+                                               **options)
+
+    def analyse_text(text):
+        rv = VelocityLexer.analyse_text(text) - 0.01
+        if looks_like_xml(text):
+            rv += 0.5
+        return rv
+
+
 class DjangoLexer(RegexLexer):
     """
     Generic `django <http://www.djangoproject.com/documentation/templates/>`_
@@ -239,7 +356,7 @@
              r'with(?:(?:out)?\s*context)?|scoped|ignore\s+missing)\b',
              Keyword),
             (r'(loop|block|super|forloop)\b', Name.Builtin),
-            (r'[a-zA-Z][a-zA-Z0-9_]*', Name.Variable),
+            (r'[a-zA-Z][a-zA-Z0-9_-]*', Name.Variable),
             (r'\.[a-zA-Z0-9_]+', Name.Variable),
             (r':?"(\\\\|\\"|[^"])*"', String.Double),
             (r":?'(\\\\|\\'|[^'])*'", String.Single),
@@ -389,6 +506,61 @@
                                              **options)
 
 
+class MasonLexer(RegexLexer):
+    """
+    Generic `mason templates`_ lexer. Stolen from Myghty lexer. Code that isn't
+    Mason markup is HTML.
+
+    .. _mason templates: http://www.masonhq.com/
+
+    *New in Pygments 1.4.*
+    """
+    name = 'Mason'
+    aliases = ['mason']
+    filenames = ['*.m', '*.mhtml', '*.mc', '*.mi', 'autohandler', 'dhandler']
+    mimetypes = ['application/x-mason']
+
+    tokens = {
+        'root': [
+            (r'\s+', Text),
+            (r'(<%doc>)(.*?)(</%doc>)(?s)',
+             bygroups(Name.Tag, Comment.Multiline, Name.Tag)),
+            (r'(<%(def|method))(\s*)(.*?)(>)(.*?)(</%\2\s*>)(?s)',
+             bygroups(Name.Tag, None, Text, Name.Function, Name.Tag,
+                      using(this), Name.Tag)),
+            (r'(<%(\w+))(.*?)(>)(.*?)(</%\2\s*>)(?s)',
+             bygroups(Name.Tag, None, Name.Function, Name.Tag,
+                      using(PerlLexer), Name.Tag)),
+            (r'(<&[^|])(.*?)(,.*?)?(&>)(?s)',
+             bygroups(Name.Tag, Name.Function, using(PerlLexer), Name.Tag)),
+            (r'(<&\|)(.*?)(,.*?)?(&>)(?s)',
+             bygroups(Name.Tag, Name.Function, using(PerlLexer), Name.Tag)),
+            (r'</&>', Name.Tag),
+            (r'(<%!?)(.*?)(%>)(?s)',
+             bygroups(Name.Tag, using(PerlLexer), Name.Tag)),
+            (r'(?<=^)#[^\n]*(\n|\Z)', Comment),
+            (r'(?<=^)(%)([^\n]*)(\n|\Z)',
+             bygroups(Name.Tag, using(PerlLexer), Other)),
+            (r"""(?sx)
+                 (.+?)               # anything, followed by:
+                 (?:
+                  (?<=\n)(?=[%#]) |  # an eval or comment line
+                  (?=</?[%&]) |      # a substitution or block or
+                                     # call start or end
+                                     # - don't consume
+                  (\\\n) |           # an escaped newline
+                  \Z                 # end of string
+                 )""", bygroups(using(HtmlLexer), Operator)),
+        ]
+    }
+
+    def analyse_text(text):
+        rv = 0.0
+        if re.search('<&', text) is not None:
+            rv = 1.0
+        return rv
+
+
 class MakoLexer(RegexLexer):
     """
     Generic `mako templates`_ lexer. Code that isn't Mako
@@ -1376,7 +1548,7 @@
     """
     Coldfusion markup in html
     """
-    name = 'Coldufsion HTML'
+    name = 'Coldfusion HTML'
     aliases = ['cfm']
     filenames = ['*.cfm', '*.cfml', '*.cfc']
     mimetypes = ['application/x-coldfusion']
@@ -1385,3 +1557,27 @@
         super(ColdfusionHtmlLexer, self).__init__(HtmlLexer, ColdfusionMarkupLexer,
                                                   **options)
 
+
+class SspLexer(DelegatingLexer):
+    """
+    Lexer for Scalate Server Pages.
+
+    *New in Pygments 1.4.*
+    """
+    name = 'Scalate Server Page'
+    aliases = ['ssp']
+    filenames = ['*.ssp']
+    mimetypes = ['application/x-ssp']
+
+    def __init__(self, **options):
+        super(SspLexer, self).__init__(XmlLexer, JspRootLexer, **options)
+
+    def analyse_text(text):
+        rv = 0.0
+        if re.search('val \w+\s*:', text):
+            rv += 0.6
+        if looks_like_xml(text):
+            rv += 0.2
+        if '<%' in text and '%>' in text:
+            rv += 0.1
+        return rv
--- a/MoinMoin/support/pygments/lexers/text.py	Thu Dec 01 02:05:15 2011 +0100
+++ b/MoinMoin/support/pygments/lexers/text.py	Thu Dec 01 02:26:16 2011 +0100
@@ -19,7 +19,7 @@
 from pygments.util import get_bool_opt
 from pygments.lexers.other import BashLexer
 
-__all__ = ['IniLexer', 'SourcesListLexer', 'BaseMakefileLexer',
+__all__ = ['IniLexer', 'PropertiesLexer', 'SourcesListLexer', 'BaseMakefileLexer',
            'MakefileLexer', 'DiffLexer', 'IrcLogsLexer', 'TexLexer',
            'GroffLexer', 'ApacheConfLexer', 'BBCodeLexer', 'MoinWikiLexer',
            'RstLexer', 'VimLexer', 'GettextLexer', 'SquidConfLexer',
@@ -34,7 +34,7 @@
 
     name = 'INI'
     aliases = ['ini', 'cfg']
-    filenames = ['*.ini', '*.cfg', '*.properties']
+    filenames = ['*.ini', '*.cfg']
     mimetypes = ['text/x-ini']
 
     tokens = {
@@ -42,7 +42,7 @@
             (r'\s+', Text),
             (r'[;#].*?$', Comment),
             (r'\[.*?\]$', Keyword),
-            (r'(.*?)([ \t]*)(=)([ \t]*)(.*?)$',
+            (r'(.*?)([ \t]*)(=)([ \t]*)(.*(?:\n[ \t].+)*)',
              bygroups(Name.Attribute, Text, Operator, Text, String))
         ]
     }
@@ -54,6 +54,28 @@
         return text[0] == '[' and text[npos-1] == ']'
 
 
+class PropertiesLexer(RegexLexer):
+    """
+    Lexer for configuration files in Java's properties format.
+
+    *New in Pygments 1.4.*
+    """
+
+    name = 'Properties'
+    aliases = ['properties']
+    filenames = ['*.properties']
+    mimetypes = ['text/x-java-properties']
+
+    tokens = {
+        'root': [
+            (r'\s+', Text),
+            (r'(?:[;#]|//).*$', Comment),
+            (r'(.*?)([ \t]*)([=:])([ \t]*)(.*(?:(?<=\\)\n.*)*)',
+             bygroups(Name.Attribute, Text, Operator, Text, String)),
+        ],
+    }
+
+
 class SourcesListLexer(RegexLexer):
     """
     Lexer that highlights debian sources.list files.
@@ -830,7 +852,7 @@
             (r'^#:\s.*?$', Keyword.Declaration),
             #(r'^#$', Comment),
             (r'^(#|#\.\s|#\|\s|#~\s|#\s).*$', Comment.Single),
-            (r'^(")([\w-]*:)(.*")$',
+            (r'^(")([A-Za-z-]+:)(.*")$',
              bygroups(String, Name.Property, String)),
             (r'^".*"$', String),
             (r'^(msgid|msgid_plural|msgstr)(\s+)(".*")$',
@@ -1524,7 +1546,7 @@
     """
     name = 'CMake'
     aliases = ['cmake']
-    filenames = ['*.cmake']
+    filenames = ['*.cmake', 'CMakeLists.txt']
     mimetypes = ['text/x-cmake']
 
     tokens = {
--- a/MoinMoin/support/pygments/lexers/web.py	Thu Dec 01 02:05:15 2011 +0100
+++ b/MoinMoin/support/pygments/lexers/web.py	Thu Dec 01 02:26:16 2011 +0100
@@ -10,20 +10,23 @@
 """
 
 import re
+import copy
 
 from pygments.lexer import RegexLexer, ExtendedRegexLexer, bygroups, using, \
      include, this
-from pygments.token import \
-     Text, Comment, Operator, Keyword, Name, String, Number, Other, Punctuation
+from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
+     Number, Other, Punctuation, Literal
 from pygments.util import get_bool_opt, get_list_opt, looks_like_xml, \
                           html_doctype_matches
 from pygments.lexers.agile import RubyLexer
+from pygments.lexers.compiled import ScalaLexer
 
 
 __all__ = ['HtmlLexer', 'XmlLexer', 'JavascriptLexer', 'CssLexer',
            'PhpLexer', 'ActionScriptLexer', 'XsltLexer', 'ActionScript3Lexer',
-           'MxmlLexer', 'HaxeLexer', 'HamlLexer', 'SassLexer',
-           'ObjectiveJLexer', 'CoffeeScriptLexer']
+           'MxmlLexer', 'HaxeLexer', 'HamlLexer', 'SassLexer', 'ScssLexer',
+           'ObjectiveJLexer', 'CoffeeScriptLexer', 'DuelLexer', 'ScamlLexer',
+           'JadeLexer', 'XQueryLexer']
 
 
 class JavascriptLexer(RegexLexer):
@@ -34,7 +37,8 @@
     name = 'JavaScript'
     aliases = ['js', 'javascript']
     filenames = ['*.js']
-    mimetypes = ['application/x-javascript', 'text/x-javascript', 'text/javascript']
+    mimetypes = ['application/javascript', 'application/x-javascript',
+                 'text/x-javascript', 'text/javascript']
 
     flags = re.DOTALL
     tokens = {
@@ -378,7 +382,7 @@
             (r'\!important', Comment.Preproc),
             (r'/\*(?:.|\n)*?\*/', Comment),
             (r'\#[a-zA-Z0-9]{1,6}', Number),
-            (r'[\.-]?[0-9]*[\.]?[0-9]+(em|px|\%|pt|pc|in|mm|cm|ex)', Number),
+            (r'[\.-]?[0-9]*[\.]?[0-9]+(em|px|\%|pt|pc|in|mm|cm|ex|s)\b', Number),
             (r'-?[0-9]+', Number),
             (r'[~\^\*!%&<>\|+=@:,./?-]+', Operator),
             (r'[\[\]();]+', Punctuation),
@@ -748,8 +752,11 @@
             (r'\$\{\$+[a-zA-Z_][a-zA-Z0-9_]*\}', Name.Variable),
             (r'\$+[a-zA-Z_][a-zA-Z0-9_]*', Name.Variable),
             (r'[\\a-zA-Z_][\\a-zA-Z0-9_]*', Name.Other),
-            (r"[0-9](\.[0-9]*)?(eE[+-][0-9])?[flFLdD]?|"
-             r"0[xX][0-9a-fA-F]+[Ll]?", Number),
+            (r'(\d+\.\d*|\d*\.\d+)([eE][+-]?[0-9]+)?', Number.Float),
+            (r'\d+[eE][+-]?[0-9]+', Number.Float),
+            (r'0[0-7]+', Number.Oct),
+            (r'0[xX][a-fA-F0-9]+', Number.Hex),
+            (r'\d+', Number.Integer),
             (r"'([^'\\]*(?:\\.[^'\\]*)*)'", String.Single),
             (r'`([^`\\]*(?:\\.[^`\\]*)*)`', String.Backtick),
             (r'"', String.Double, 'string'),
@@ -763,7 +770,7 @@
         'string': [
             (r'"', String.Double, '#pop'),
             (r'[^{$"\\]+', String.Double),
-            (r'\\([nrt\"$]|[0-7]{1,3}|x[0-9A-Fa-f]{1,2})', String.Escape),
+            (r'\\([nrt\"$\\]|[0-7]{1,3}|x[0-9A-Fa-f]{1,2})', String.Escape),
             (r'\$[a-zA-Z_][a-zA-Z0-9_]*(\[\S+\]|->[a-zA-Z_][a-zA-Z0-9_]*)?',
              String.Interpol),
             (r'(\{\$\{)(.*?)(\}\})',
@@ -1217,6 +1224,10 @@
     # which is ignored and used to wrap long lines.
     # To accomodate this, use this custom faux dot instead.
     _dot = r'(?: \|\n(?=.* \|)|.)'
+
+    # In certain places, a comma at the end of the line
+    # allows line wrapping as well.
+    _comma_dot = r'(?:,\s*\n|' + _dot + ')'
     tokens = {
         'root': [
             (r'[ \t]*\n', Text),
@@ -1230,7 +1241,7 @@
 
         'eval-or-plain': [
             (r'[&!]?==', Punctuation, 'plain'),
-            (r'([&!]?[=~])(' + _dot + '*\n)',
+            (r'([&!]?[=~])(' + _comma_dot + '*\n)',
              bygroups(Punctuation, using(RubyLexer)),
              'root'),
             (r'', Text, 'plain'),
@@ -1247,7 +1258,7 @@
              '#pop'),
             (r'-#' + _dot + '*\n', _starts_block(Comment.Preproc,
                                                  'haml-comment-block'), '#pop'),
-            (r'(-)(' + _dot + '*\n)',
+            (r'(-)(' + _comma_dot + '*\n)',
              bygroups(Punctuation, using(RubyLexer)),
              '#pop'),
             (r':' + _dot + '*\n', _starts_block(Name.Decorator, 'filter-block'),
@@ -1307,6 +1318,172 @@
     }
 
 
+common_sass_tokens = {
+    'value': [
+        (r'[ \t]+', Text),
+        (r'[!$][\w-]+', Name.Variable),
+        (r'url\(', String.Other, 'string-url'),
+        (r'[a-z_-][\w-]*(?=\()', Name.Function),
+        (r'(azimuth|background-attachment|background-color|'
+         r'background-image|background-position|background-repeat|'
+         r'background|border-bottom-color|border-bottom-style|'
+         r'border-bottom-width|border-left-color|border-left-style|'
+         r'border-left-width|border-right|border-right-color|'
+         r'border-right-style|border-right-width|border-top-color|'
+         r'border-top-style|border-top-width|border-bottom|'
+         r'border-collapse|border-left|border-width|border-color|'
+         r'border-spacing|border-style|border-top|border|caption-side|'
+         r'clear|clip|color|content|counter-increment|counter-reset|'
+         r'cue-after|cue-before|cue|cursor|direction|display|'
+         r'elevation|empty-cells|float|font-family|font-size|'
+         r'font-size-adjust|font-stretch|font-style|font-variant|'
+         r'font-weight|font|height|letter-spacing|line-height|'
+         r'list-style-type|list-style-image|list-style-position|'
+         r'list-style|margin-bottom|margin-left|margin-right|'
+         r'margin-top|margin|marker-offset|marks|max-height|max-width|'
+         r'min-height|min-width|opacity|orphans|outline|outline-color|'
+         r'outline-style|outline-width|overflow|padding-bottom|'
+         r'padding-left|padding-right|padding-top|padding|page|'
+         r'page-break-after|page-break-before|page-break-inside|'
+         r'pause-after|pause-before|pause|pitch|pitch-range|'
+         r'play-during|position|quotes|richness|right|size|'
+         r'speak-header|speak-numeral|speak-punctuation|speak|'
+         r'speech-rate|stress|table-layout|text-align|text-decoration|'
+         r'text-indent|text-shadow|text-transform|top|unicode-bidi|'
+         r'vertical-align|visibility|voice-family|volume|white-space|'
+         r'widows|width|word-spacing|z-index|bottom|left|'
+         r'above|absolute|always|armenian|aural|auto|avoid|baseline|'
+         r'behind|below|bidi-override|blink|block|bold|bolder|both|'
+         r'capitalize|center-left|center-right|center|circle|'
+         r'cjk-ideographic|close-quote|collapse|condensed|continuous|'
+         r'crop|crosshair|cross|cursive|dashed|decimal-leading-zero|'
+         r'decimal|default|digits|disc|dotted|double|e-resize|embed|'
+         r'extra-condensed|extra-expanded|expanded|fantasy|far-left|'
+         r'far-right|faster|fast|fixed|georgian|groove|hebrew|help|'
+         r'hidden|hide|higher|high|hiragana-iroha|hiragana|icon|'
+         r'inherit|inline-table|inline|inset|inside|invert|italic|'
+         r'justify|katakana-iroha|katakana|landscape|larger|large|'
+         r'left-side|leftwards|level|lighter|line-through|list-item|'
+         r'loud|lower-alpha|lower-greek|lower-roman|lowercase|ltr|'
+         r'lower|low|medium|message-box|middle|mix|monospace|'
+         r'n-resize|narrower|ne-resize|no-close-quote|no-open-quote|'
+         r'no-repeat|none|normal|nowrap|nw-resize|oblique|once|'
+         r'open-quote|outset|outside|overline|pointer|portrait|px|'
+         r'relative|repeat-x|repeat-y|repeat|rgb|ridge|right-side|'
+         r'rightwards|s-resize|sans-serif|scroll|se-resize|'
+         r'semi-condensed|semi-expanded|separate|serif|show|silent|'
+         r'slow|slower|small-caps|small-caption|smaller|soft|solid|'
+         r'spell-out|square|static|status-bar|super|sw-resize|'
+         r'table-caption|table-cell|table-column|table-column-group|'
+         r'table-footer-group|table-header-group|table-row|'
+         r'table-row-group|text|text-bottom|text-top|thick|thin|'
+         r'transparent|ultra-condensed|ultra-expanded|underline|'
+         r'upper-alpha|upper-latin|upper-roman|uppercase|url|'
+         r'visible|w-resize|wait|wider|x-fast|x-high|x-large|x-loud|'
+         r'x-low|x-small|x-soft|xx-large|xx-small|yes)\b', Name.Constant),
+        (r'(indigo|gold|firebrick|indianred|darkolivegreen|'
+         r'darkseagreen|mediumvioletred|mediumorchid|chartreuse|'
+         r'mediumslateblue|springgreen|crimson|lightsalmon|brown|'
+         r'turquoise|olivedrab|cyan|skyblue|darkturquoise|'
+         r'goldenrod|darkgreen|darkviolet|darkgray|lightpink|'
+         r'darkmagenta|lightgoldenrodyellow|lavender|yellowgreen|thistle|'
+         r'violet|orchid|ghostwhite|honeydew|cornflowerblue|'
+         r'darkblue|darkkhaki|mediumpurple|cornsilk|bisque|slategray|'
+         r'darkcyan|khaki|wheat|deepskyblue|darkred|steelblue|aliceblue|'
+         r'gainsboro|mediumturquoise|floralwhite|coral|lightgrey|'
+         r'lightcyan|darksalmon|beige|azure|lightsteelblue|oldlace|'
+         r'greenyellow|royalblue|lightseagreen|mistyrose|sienna|'
+         r'lightcoral|orangered|navajowhite|palegreen|burlywood|'
+         r'seashell|mediumspringgreen|papayawhip|blanchedalmond|'
+         r'peru|aquamarine|darkslategray|ivory|dodgerblue|'
+         r'lemonchiffon|chocolate|orange|forestgreen|slateblue|'
+         r'mintcream|antiquewhite|darkorange|cadetblue|moccasin|'
+         r'limegreen|saddlebrown|darkslateblue|lightskyblue|deeppink|'
+         r'plum|darkgoldenrod|sandybrown|magenta|tan|'
+         r'rosybrown|pink|lightblue|palevioletred|mediumseagreen|'
+         r'dimgray|powderblue|seagreen|snow|mediumblue|midnightblue|'
+         r'paleturquoise|palegoldenrod|whitesmoke|darkorchid|salmon|'
+         r'lightslategray|lawngreen|lightgreen|tomato|hotpink|'
+         r'lightyellow|lavenderblush|linen|mediumaquamarine|'
+         r'blueviolet|peachpuff)\b', Name.Entity),
+        (r'(black|silver|gray|white|maroon|red|purple|fuchsia|green|'
+         r'lime|olive|yellow|navy|blue|teal|aqua)\b', Name.Builtin),
+        (r'\!(important|default)', Name.Exception),
+        (r'(true|false)', Name.Pseudo),
+        (r'(and|or|not)', Operator.Word),
+        (r'/\*', Comment.Multiline, 'inline-comment'),
+        (r'//[^\n]*', Comment.Single),
+        (r'\#[a-z0-9]{1,6}', Number.Hex),
+        (r'(-?\d+)(\%|[a-z]+)?', bygroups(Number.Integer, Keyword.Type)),
+        (r'(-?\d*\.\d+)(\%|[a-z]+)?', bygroups(Number.Float, Keyword.Type)),
+        (r'#{', String.Interpol, 'interpolation'),
+        (r'[~\^\*!&%<>\|+=@:,./?-]+', Operator),
+        (r'[\[\]()]+', Punctuation),
+        (r'"', String.Double, 'string-double'),
+        (r"'", String.Single, 'string-single'),
+        (r'[a-z_-][\w-]*', Name),
+    ],
+
+    'interpolation': [
+        (r'\}', String.Interpol, '#pop'),
+        include('value'),
+    ],
+
+    'selector': [
+        (r'[ \t]+', Text),
+        (r'\:', Name.Decorator, 'pseudo-class'),
+        (r'\.', Name.Class, 'class'),
+        (r'\#', Name.Namespace, 'id'),
+        (r'[a-zA-Z0-9_-]+', Name.Tag),
+        (r'#\{', String.Interpol, 'interpolation'),
+        (r'&', Keyword),
+        (r'[~\^\*!&\[\]\(\)<>\|+=@:;,./?-]', Operator),
+        (r'"', String.Double, 'string-double'),
+        (r"'", String.Single, 'string-single'),
+    ],
+
+    'string-double': [
+        (r'(\\.|#(?=[^\n{])|[^\n"#])+', String.Double),
+        (r'#\{', String.Interpol, 'interpolation'),
+        (r'"', String.Double, '#pop'),
+    ],
+
+    'string-single': [
+        (r"(\\.|#(?=[^\n{])|[^\n'#])+", String.Double),
+        (r'#\{', String.Interpol, 'interpolation'),
+        (r"'", String.Double, '#pop'),
+    ],
+
+    'string-url': [
+        (r'(\\#|#(?=[^\n{])|[^\n#)])+', String.Other),
+        (r'#\{', String.Interpol, 'interpolation'),
+        (r'\)', String.Other, '#pop'),
+    ],
+
+    'pseudo-class': [
+        (r'[\w-]+', Name.Decorator),
+        (r'#\{', String.Interpol, 'interpolation'),
+        (r'', Text, '#pop'),
+    ],
+
+    'class': [
+        (r'[\w-]+', Name.Class),
+        (r'#\{', String.Interpol, 'interpolation'),
+        (r'', Text, '#pop'),
+    ],
+
+    'id': [
+        (r'[\w-]+', Name.Namespace),
+        (r'#\{', String.Interpol, 'interpolation'),
+        (r'', Text, '#pop'),
+    ],
+
+    'for': [
+        (r'(from|to|through)', Operator.Word),
+        include('value'),
+    ],
+}
+
 class SassLexer(ExtendedRegexLexer):
     """
     For Sass stylesheets.
@@ -1333,14 +1510,17 @@
              'root'),
             (r'@import', Keyword, 'import'),
             (r'@for', Keyword, 'for'),
-            (r'@(debug|if|while)', Keyword, 'script'),
+            (r'@(debug|warn|if|while)', Keyword, 'value'),
+            (r'(@mixin)( [\w-]+)', bygroups(Keyword, Name.Function), 'value'),
+            (r'(@include)( [\w-]+)', bygroups(Keyword, Name.Decorator), 'value'),
+            (r'@extend', Keyword, 'selector'),
             (r'@[a-z0-9_-]+', Keyword, 'selector'),
-            (r'=[\w-]+', Name.Function, 'script'),
-            (r'\+[\w-]+', Name.Decorator, 'script'),
-            (r'(![a-z_]\w*)([ \t]*(?:\|\|)?=)',
-             bygroups(Name.Variable, Operator), 'script'),
+            (r'=[\w-]+', Name.Function, 'value'),
+            (r'\+[\w-]+', Name.Decorator, 'value'),
+            (r'([!$][\w-]\w*)([ \t]*(?:(?:\|\|)?=|:))',
+             bygroups(Name.Variable, Operator), 'value'),
             (r':', Name.Attribute, 'old-style-attr'),
-            (r'(?=[^\s:"\[]+\s*[=:]([ \t]|$))', Name.Attribute, 'new-style-attr'),
+            (r'(?=.+?[=:]([^a-z]|$))', Name.Attribute, 'new-style-attr'),
             (r'', Text, 'selector'),
         ],
 
@@ -1360,210 +1540,83 @@
             (r'\n', Text, 'root'),
         ],
 
-        'for': [
-            (r'(from|to|through)', Operator.Word),
-            include('script'),
-        ],
-
         'old-style-attr': [
             (r'[^\s:="\[]+', Name.Attribute),
             (r'#{', String.Interpol, 'interpolation'),
-            (r'[ \t]*=', Operator, 'script'),
+            (r'[ \t]*=', Operator, 'value'),
             (r'', Text, 'value'),
         ],
 
         'new-style-attr': [
             (r'[^\s:="\[]+', Name.Attribute),
             (r'#{', String.Interpol, 'interpolation'),
-            (r'[ \t]*=', Operator, 'script'),
-            (r':', Name.Attribute, 'value'),
-        ],
-
-        'value': [
-            (r'[ \t]+', Text),
-            (r'url\(', String.Other, 'string-url'),
-            (r'(azimuth|background-attachment|background-color|'
-             r'background-image|background-position|background-repeat|'
-             r'background|border-bottom-color|border-bottom-style|'
-             r'border-bottom-width|border-left-color|border-left-style|'
-             r'border-left-width|border-right|border-right-color|'
-             r'border-right-style|border-right-width|border-top-color|'
-             r'border-top-style|border-top-width|border-bottom|'
-             r'border-collapse|border-left|border-width|border-color|'
-             r'border-spacing|border-style|border-top|border|caption-side|'
-             r'clear|clip|color|content|counter-increment|counter-reset|'
-             r'cue-after|cue-before|cue|cursor|direction|display|'
-             r'elevation|empty-cells|float|font-family|font-size|'
-             r'font-size-adjust|font-stretch|font-style|font-variant|'
-             r'font-weight|font|height|letter-spacing|line-height|'
-             r'list-style-type|list-style-image|list-style-position|'
-             r'list-style|margin-bottom|margin-left|margin-right|'
-             r'margin-top|margin|marker-offset|marks|max-height|max-width|'
-             r'min-height|min-width|opacity|orphans|outline|outline-color|'
-             r'outline-style|outline-width|overflow|padding-bottom|'
-             r'padding-left|padding-right|padding-top|padding|page|'
-             r'page-break-after|page-break-before|page-break-inside|'
-             r'pause-after|pause-before|pause|pitch|pitch-range|'
-             r'play-during|position|quotes|richness|right|size|'
-             r'speak-header|speak-numeral|speak-punctuation|speak|'
-             r'speech-rate|stress|table-layout|text-align|text-decoration|'
-             r'text-indent|text-shadow|text-transform|top|unicode-bidi|'
-             r'vertical-align|visibility|voice-family|volume|white-space|'
-             r'widows|width|word-spacing|z-index|bottom|left|'
-             r'above|absolute|always|armenian|aural|auto|avoid|baseline|'
-             r'behind|below|bidi-override|blink|block|bold|bolder|both|'
-             r'capitalize|center-left|center-right|center|circle|'
-             r'cjk-ideographic|close-quote|collapse|condensed|continuous|'
-             r'crop|crosshair|cross|cursive|dashed|decimal-leading-zero|'
-             r'decimal|default|digits|disc|dotted|double|e-resize|embed|'
-             r'extra-condensed|extra-expanded|expanded|fantasy|far-left|'
-             r'far-right|faster|fast|fixed|georgian|groove|hebrew|help|'
-             r'hidden|hide|higher|high|hiragana-iroha|hiragana|icon|'
-             r'inherit|inline-table|inline|inset|inside|invert|italic|'
-             r'justify|katakana-iroha|katakana|landscape|larger|large|'
-             r'left-side|leftwards|level|lighter|line-through|list-item|'
-             r'loud|lower-alpha|lower-greek|lower-roman|lowercase|ltr|'
-             r'lower|low|medium|message-box|middle|mix|monospace|'
-             r'n-resize|narrower|ne-resize|no-close-quote|no-open-quote|'
-             r'no-repeat|none|normal|nowrap|nw-resize|oblique|once|'
-             r'open-quote|outset|outside|overline|pointer|portrait|px|'
-             r'relative|repeat-x|repeat-y|repeat|rgb|ridge|right-side|'
-             r'rightwards|s-resize|sans-serif|scroll|se-resize|'
-             r'semi-condensed|semi-expanded|separate|serif|show|silent|'
-             r'slow|slower|small-caps|small-caption|smaller|soft|solid|'
-             r'spell-out|square|static|status-bar|super|sw-resize|'
-             r'table-caption|table-cell|table-column|table-column-group|'
-             r'table-footer-group|table-header-group|table-row|'
-             r'table-row-group|text|text-bottom|text-top|thick|thin|'
-             r'transparent|ultra-condensed|ultra-expanded|underline|'
-             r'upper-alpha|upper-latin|upper-roman|uppercase|url|'
-             r'visible|w-resize|wait|wider|x-fast|x-high|x-large|x-loud|'
-             r'x-low|x-small|x-soft|xx-large|xx-small|yes)\b', Name.Constant),
-            (r'(indigo|gold|firebrick|indianred|yellow|darkolivegreen|'
-             r'darkseagreen|mediumvioletred|mediumorchid|chartreuse|'
-             r'mediumslateblue|black|springgreen|crimson|lightsalmon|brown|'
-             r'turquoise|olivedrab|cyan|silver|skyblue|gray|darkturquoise|'
-             r'goldenrod|darkgreen|darkviolet|darkgray|lightpink|teal|'
-             r'darkmagenta|lightgoldenrodyellow|lavender|yellowgreen|thistle|'
-             r'violet|navy|orchid|blue|ghostwhite|honeydew|cornflowerblue|'
-             r'darkblue|darkkhaki|mediumpurple|cornsilk|red|bisque|slategray|'
-             r'darkcyan|khaki|wheat|deepskyblue|darkred|steelblue|aliceblue|'
-             r'gainsboro|mediumturquoise|floralwhite|coral|purple|lightgrey|'
-             r'lightcyan|darksalmon|beige|azure|lightsteelblue|oldlace|'
-             r'greenyellow|royalblue|lightseagreen|mistyrose|sienna|'
-             r'lightcoral|orangered|navajowhite|lime|palegreen|burlywood|'
-             r'seashell|mediumspringgreen|fuchsia|papayawhip|blanchedalmond|'
-             r'peru|aquamarine|white|darkslategray|ivory|dodgerblue|'
-             r'lemonchiffon|chocolate|orange|forestgreen|slateblue|olive|'
-             r'mintcream|antiquewhite|darkorange|cadetblue|moccasin|'
-             r'limegreen|saddlebrown|darkslateblue|lightskyblue|deeppink|'
-             r'plum|aqua|darkgoldenrod|maroon|sandybrown|magenta|tan|'
-             r'rosybrown|pink|lightblue|palevioletred|mediumseagreen|'
-             r'dimgray|powderblue|seagreen|snow|mediumblue|midnightblue|'
-             r'paleturquoise|palegoldenrod|whitesmoke|darkorchid|salmon|'
-             r'lightslategray|lawngreen|lightgreen|tomato|hotpink|'
-             r'lightyellow|lavenderblush|linen|mediumaquamarine|green|'
-             r'blueviolet|peachpuff)\b', Name.Entity),
-            (r'\!important', Name.Exception),
-            (r'/\*', Comment, 'inline-comment'),
-            (r'\#[a-z0-9]{1,6}', Number.Hex),
-            (r'(-?\d+)(\%|[a-z]+)?', bygroups(Number.Integer, Keyword.Type)),
-            (r'(-?\d*\.\d+)(\%|[a-z]+)?', bygroups(Number.Float, Keyword.Type)),
-            (r'#{', String.Interpol, 'interpolation'),
-            (r'[~\^\*!&%<>\|+=@:,./?-]+', Operator),
-            (r'[\[\]();]+', Punctuation),
-            (r'"', String.Double, 'string-double'),
-            (r"'", String.Single, 'string-single'),
-            (r'[a-z][\w-]*', Name),
-            (r'\n', Text, 'root'),
-        ],
-
-        'script': [
-            (r'[ \t]+', Text),
-            (r'![\w_]+', Name.Variable),
-            (r'[+\-*/%=(),!><]', Operator),
-            (r'"', String.Double, 'string-double'),
-            (r"'", String.Single, 'string-single'),
-            (r'\#[a-z0-9]{1,6}', Number.Hex),
-            (r'(-?\d+)(\%|[a-z]+)?', bygroups(Number.Integer, Keyword.Type)),
-            (r'(-?\d*\.\d+)(\%|[a-z]+)?', bygroups(Number.Float, Keyword.Type)),
-            (r'(black|silver|gray|white|maroon|red|purple|fuchsia|green|'
-             r'lime|olive|yellow|navy|blue|teal|aqua)\b', Name.Builtin),
-            (r'(true|false)', Name.Pseudo),
-            (r'(and|or|not)', Operator.Word),
-            (r'(\\.|[^\s\\+*\/%(),=!])+(?=[ \t]*\()', Name.Function),
-            (r'(\\.|[^\s\\+*\/%(),=!])+', Name),
-            (r'\n', Text, 'root'),
-        ],
-
-        'interpolation': [
-            (r'\}', String.Interpol, '#pop'),
-            include('script'),
-        ],
-
-        'selector': [
-            (r'[ \t]+', Text),
-            (r'\:', Name.Decorator, 'pseudo-class'),
-            (r'\.', Name.Class, 'class'),
-            (r'\#', Name.Namespace, 'id'),
-            (r'[a-zA-Z0-9_-]+', Name.Tag),
-            (r'#\{', String.Interpol, 'interpolation'),
-            (r'&', Keyword),
-            (r'[~\^\*!&\[\]\(\)<>\|+=@:;,./?-]', Operator),
-            (r'"', String.Double, 'string-double'),
-            (r"'", String.Single, 'string-single'),
-            (r'\n', Text, 'root'),
-        ],
-
-        'string-double': [
-            (r'(\\.|#(?=[^\n{])|[^\n"#])+', String.Double),
-            (r'#\{', String.Interpol, 'interpolation'),
-            (r'"', String.Double, '#pop'),
-        ],
-
-        'string-single': [
-            (r"(\\.|#(?=[^\n{])|[^\n'#])+", String.Double),
-            (r'#\{', String.Interpol, 'interpolation'),
-            (r"'", String.Double, '#pop'),
-        ],
-
-        'string-url': [
-            (r'(\\#|#(?=[^\n{])|[^\n#)])+', String.Other),
-            (r'#\{', String.Interpol, 'interpolation'),
-            (r'\)', String.Other, '#pop'),
+            (r'[ \t]*[=:]', Operator, 'value'),
         ],
 
         'inline-comment': [
-            (r"(\\#|#(?=[^\n{])|\*(?=[^\n/])|[^\n#*])+", Comment),
+            (r"(\\#|#(?=[^\n{])|\*(?=[^\n/])|[^\n#*])+", Comment.Multiline),
             (r'#\{', String.Interpol, 'interpolation'),
             (r"\*/", Comment, '#pop'),
         ],
+    }
+    for group, common in common_sass_tokens.iteritems():
+        tokens[group] = copy.copy(common)
+    tokens['value'].append((r'\n', Text, 'root'))
+    tokens['selector'].append((r'\n', Text, 'root'))
 
-        'pseudo-class': [
-            (r'[\w-]+', Name.Decorator),
-            (r'#\{', String.Interpol, 'interpolation'),
-            (r'', Text, '#pop'),
+
+class ScssLexer(RegexLexer):
+    """
+    For SCSS stylesheets.
+    """
+
+    name = 'SCSS'
+    aliases = ['scss']
+    filenames = ['*.scss']
+    mimetypes = ['text/x-scss']
+
+    flags = re.IGNORECASE | re.DOTALL
+    tokens = {
+        'root': [
+            (r'\s+', Text),
+            (r'//.*?\n', Comment.Single),
+            (r'/\*.*?\*/', Comment.Multiline),
+            (r'@import', Keyword, 'value'),
+            (r'@for', Keyword, 'for'),
+            (r'@(debug|warn|if|while)', Keyword, 'value'),
+            (r'(@mixin)( [\w-]+)', bygroups(Keyword, Name.Function), 'value'),
+            (r'(@include)( [\w-]+)', bygroups(Keyword, Name.Decorator), 'value'),
+            (r'@extend', Keyword, 'selector'),
+            (r'@[a-z0-9_-]+', Keyword, 'selector'),
+            (r'(\$[\w-]\w*)([ \t]*:)', bygroups(Name.Variable, Operator), 'value'),
+            (r'(?=[^;{}][;}])', Name.Attribute, 'attr'),
+            (r'(?=[^;{}:]+:[^a-z])', Name.Attribute, 'attr'),
+            (r'', Text, 'selector'),
         ],
 
-        'class': [
-            (r'[\w-]+', Name.Class),
-            (r'#\{', String.Interpol, 'interpolation'),
-            (r'', Text, '#pop'),
+        'attr': [
+            (r'[^\s:="\[]+', Name.Attribute),
+            (r'#{', String.Interpol, 'interpolation'),
+            (r'[ \t]*:', Operator, 'value'),
         ],
 
-        'id': [
-            (r'[\w-]+', Name.Namespace),
+        'inline-comment': [
+            (r"(\\#|#(?=[^{])|\*(?=[^/])|[^#*])+", Comment.Multiline),
             (r'#\{', String.Interpol, 'interpolation'),
-            (r'', Text, '#pop'),
+            (r"\*/", Comment, '#pop'),
         ],
     }
+    for group, common in common_sass_tokens.iteritems():
+        tokens[group] = copy.copy(common)
+    tokens['value'].extend([(r'\n', Text), (r'[;{}]', Punctuation, 'root')])
+    tokens['selector'].extend([(r'\n', Text), (r'[;{}]', Punctuation, 'root')])
 
 
 class CoffeeScriptLexer(RegexLexer):
     """
     For `CoffeeScript`_ source code.
 
-    .. _CoffeeScript: http://jashkenas.github.com/coffee-script/
+    .. _CoffeeScript: http://coffeescript.org
 
     *New in Pygments 1.3.*
     """
@@ -1592,24 +1645,27 @@
         'root': [
             (r'^(?=\s|/|<!--)', Text, 'slashstartsregex'),
             include('commentsandwhitespace'),
-            (r'\+\+|--|~|&&|\band\b|\bor\b|\bis\b|\bisnt\b|\bnot\b|\?|:|'
+            (r'\+\+|--|~|&&|\band\b|\bor\b|\bis\b|\bisnt\b|\bnot\b|\?|:|=|'
              r'\|\||\\(?=\n)|(<<|>>>?|==?|!=?|[-<>+*`%&\|\^/])=?',
              Operator, 'slashstartsregex'),
+            (r'\([^()]*\)\s*->', Name.Function),
             (r'[{(\[;,]', Punctuation, 'slashstartsregex'),
             (r'[})\].]', Punctuation),
             (r'(for|in|of|while|break|return|continue|switch|when|then|if|else|'
              r'throw|try|catch|finally|new|delete|typeof|instanceof|super|'
-             r'extends|this)\b', Keyword, 'slashstartsregex'),
+             r'extends|this|class|by)\b', Keyword, 'slashstartsregex'),
             (r'(true|false|yes|no|on|off|null|NaN|Infinity|undefined)\b',
              Keyword.Constant),
             (r'(Array|Boolean|Date|Error|Function|Math|netscape|'
              r'Number|Object|Packages|RegExp|String|sun|decodeURI|'
              r'decodeURIComponent|encodeURI|encodeURIComponent|'
-             r'Error|eval|isFinite|isNaN|parseFloat|parseInt|document|this|'
-             r'window)\b', Name.Builtin),
-            (r'[$a-zA-Z_][a-zA-Z0-9_\.:]*:\s', Name.Variable,
-             'slashstartsregex'),
-            (r'[$a-zA-Z_][a-zA-Z0-9_]*', Name.Other),
+             r'eval|isFinite|isNaN|parseFloat|parseInt|document|window)\b',
+             Name.Builtin),
+            (r'[$a-zA-Z_][a-zA-Z0-9_\.:]*\s*[:=]\s', Name.Variable,
+              'slashstartsregex'),
+            (r'@[$a-zA-Z_][a-zA-Z0-9_\.:]*\s*[:=]\s', Name.Variable.Instance,
+              'slashstartsregex'),
+            (r'@?[$a-zA-Z_][a-zA-Z0-9_]*', Name.Other, 'slashstartsregex'),
             (r'[0-9][0-9]*\.[0-9]+([eE][0-9]+)?[fd]?', Number.Float),
             (r'0x[0-9a-fA-F]+', Number.Hex),
             (r'[0-9]+', Number.Integer),
@@ -1617,3 +1673,900 @@
             (r"'(\\\\|\\'|[^'])*'", String.Single),
         ]
     }
+
+class DuelLexer(RegexLexer):
+    """
+    Lexer for Duel Views Engine (formerly JBST) markup with JavaScript code blocks.
+    See http://duelengine.org/.
+    See http://jsonml.org/jbst/.
+
+    *New in Pygments 1.4.*
+    """
+
+    name = 'Duel'
+    aliases = ['duel', 'Duel Engine', 'Duel View', 'JBST', 'jbst', 'JsonML+BST']
+    filenames = ['*.duel','*.jbst']
+    mimetypes = ['text/x-duel','text/x-jbst']
+
+    flags = re.DOTALL
+
+    tokens = {
+        'root': [
+            (r'(<%[@=#!:]?)(.*?)(%>)',
+             bygroups(Name.Tag, using(JavascriptLexer), Name.Tag)),
+            (r'(<%\$)(.*?)(:)(.*?)(%>)',
+             bygroups(Name.Tag, Name.Function, Punctuation, String, Name.Tag)),
+            (r'(<%--)(.*?)(--%>)',
+             bygroups(Name.Tag, Comment.Multiline, Name.Tag)),
+            (r'(<script.*?>)(.*?)(</script>)',
+             bygroups(using(HtmlLexer),
+                      using(JavascriptLexer), using(HtmlLexer))),
+            (r'(.+?)(?=<)', using(HtmlLexer)),
+            (r'.+', using(HtmlLexer)),
+        ],
+    }
+
+
+class ScamlLexer(ExtendedRegexLexer):
+    """
+    For `Scaml markup <http://scalate.fusesource.org/>`_.  Scaml is Haml for Scala.
+
+    *New in Pygments 1.4.*
+    """
+
+    name = 'Scaml'
+    aliases = ['scaml', 'SCAML']
+    filenames = ['*.scaml']
+    mimetypes = ['text/x-scaml']
+
+    flags = re.IGNORECASE
+    # Scaml does not yet support the " |\n" notation to
+    # wrap long lines.  Once it does, use the custom faux
+    # dot instead.
+    # _dot = r'(?: \|\n(?=.* \|)|.)'
+    _dot = r'.'
+
+    tokens = {
+        'root': [
+            (r'[ \t]*\n', Text),
+            (r'[ \t]*', _indentation),
+        ],
+
+        'css': [
+            (r'\.[a-z0-9_:-]+', Name.Class, 'tag'),
+            (r'\#[a-z0-9_:-]+', Name.Function, 'tag'),
+        ],
+
+        'eval-or-plain': [
+            (r'[&!]?==', Punctuation, 'plain'),
+            (r'([&!]?[=~])(' + _dot + '*\n)',
+             bygroups(Punctuation, using(ScalaLexer)),
+             'root'),
+            (r'', Text, 'plain'),
+        ],
+
+        'content': [
+            include('css'),
+            (r'%[a-z0-9_:-]+', Name.Tag, 'tag'),
+            (r'!!!' + _dot + '*\n', Name.Namespace, '#pop'),
+            (r'(/)(\[' + _dot + '*?\])(' + _dot + '*\n)',
+             bygroups(Comment, Comment.Special, Comment),
+             '#pop'),
+            (r'/' + _dot + '*\n', _starts_block(Comment, 'html-comment-block'),
+             '#pop'),
+            (r'-#' + _dot + '*\n', _starts_block(Comment.Preproc,
+                                                 'scaml-comment-block'), '#pop'),
+            (r'(-@\s*)(import)?(' + _dot + '*\n)',
+             bygroups(Punctuation, Keyword, using(ScalaLexer)),
+             '#pop'),
+            (r'(-)(' + _dot + '*\n)',
+             bygroups(Punctuation, using(ScalaLexer)),
+             '#pop'),
+            (r':' + _dot + '*\n', _starts_block(Name.Decorator, 'filter-block'),
+             '#pop'),
+            include('eval-or-plain'),
+        ],
+
+        'tag': [
+            include('css'),
+            (r'\{(,\n|' + _dot + ')*?\}', using(ScalaLexer)),
+            (r'\[' + _dot + '*?\]', using(ScalaLexer)),
+            (r'\(', Text, 'html-attributes'),
+            (r'/[ \t]*\n', Punctuation, '#pop:2'),
+            (r'[<>]{1,2}(?=[ \t=])', Punctuation),
+            include('eval-or-plain'),
+        ],
+
+        'plain': [
+            (r'([^#\n]|#[^{\n]|(\\\\)*\\#\{)+', Text),
+            (r'(#\{)(' + _dot + '*?)(\})',
+             bygroups(String.Interpol, using(ScalaLexer), String.Interpol)),
+            (r'\n', Text, 'root'),
+        ],
+
+        'html-attributes': [
+            (r'\s+', Text),
+            (r'[a-z0-9_:-]+[ \t]*=', Name.Attribute, 'html-attribute-value'),
+            (r'[a-z0-9_:-]+', Name.Attribute),
+            (r'\)', Text, '#pop'),
+        ],
+
+        'html-attribute-value': [
+            (r'[ \t]+', Text),
+            (r'[a-z0-9_]+', Name.Variable, '#pop'),
+            (r'@[a-z0-9_]+', Name.Variable.Instance, '#pop'),
+            (r'\$[a-z0-9_]+', Name.Variable.Global, '#pop'),
+            (r"'(\\\\|\\'|[^'\n])*'", String, '#pop'),
+            (r'"(\\\\|\\"|[^"\n])*"', String, '#pop'),
+        ],
+
+        'html-comment-block': [
+            (_dot + '+', Comment),
+            (r'\n', Text, 'root'),
+        ],
+
+        'scaml-comment-block': [
+            (_dot + '+', Comment.Preproc),
+            (r'\n', Text, 'root'),
+        ],
+
+        'filter-block': [
+            (r'([^#\n]|#[^{\n]|(\\\\)*\\#\{)+', Name.Decorator),
+            (r'(#\{)(' + _dot + '*?)(\})',
+             bygroups(String.Interpol, using(ScalaLexer), String.Interpol)),
+            (r'\n', Text, 'root'),
+        ],
+    }
+
+
+class JadeLexer(ExtendedRegexLexer):
+    """
+    For Jade markup.
+    Jade is a variant of Scaml, see:
+    http://scalate.fusesource.org/documentation/scaml-reference.html
+
+    *New in Pygments 1.4.*
+    """
+
+    name = 'Jade'
+    aliases = ['jade', 'JADE']
+    filenames = ['*.jade']
+    mimetypes = ['text/x-jade']
+
+    flags = re.IGNORECASE
+    _dot = r'.'
+
+    tokens = {
+        'root': [
+            (r'[ \t]*\n', Text),
+            (r'[ \t]*', _indentation),
+        ],
+
+        'css': [
+            (r'\.[a-z0-9_:-]+', Name.Class, 'tag'),
+            (r'\#[a-z0-9_:-]+', Name.Function, 'tag'),
+        ],
+
+        'eval-or-plain': [
+            (r'[&!]?==', Punctuation, 'plain'),
+            (r'([&!]?[=~])(' + _dot + '*\n)',
+             bygroups(Punctuation, using(ScalaLexer)),  'root'),
+            (r'', Text, 'plain'),
+        ],
+
+        'content': [
+            include('css'),
+            (r'!!!' + _dot + '*\n', Name.Namespace, '#pop'),
+            (r'(/)(\[' + _dot + '*?\])(' + _dot + '*\n)',
+             bygroups(Comment, Comment.Special, Comment),
+             '#pop'),
+            (r'/' + _dot + '*\n', _starts_block(Comment, 'html-comment-block'),
+             '#pop'),
+            (r'-#' + _dot + '*\n', _starts_block(Comment.Preproc,
+                                                 'scaml-comment-block'), '#pop'),
+            (r'(-@\s*)(import)?(' + _dot + '*\n)',
+             bygroups(Punctuation, Keyword, using(ScalaLexer)),
+             '#pop'),
+            (r'(-)(' + _dot + '*\n)',
+             bygroups(Punctuation, using(ScalaLexer)),
+             '#pop'),
+            (r':' + _dot + '*\n', _starts_block(Name.Decorator, 'filter-block'),
+             '#pop'),
+            (r'[a-z0-9_:-]+', Name.Tag, 'tag'),
+            (r'|', Text, 'eval-or-plain'),
+        ],
+
+        'tag': [
+            include('css'),
+            (r'\{(,\n|' + _dot + ')*?\}', using(ScalaLexer)),
+            (r'\[' + _dot + '*?\]', using(ScalaLexer)),
+            (r'\(', Text, 'html-attributes'),
+            (r'/[ \t]*\n', Punctuation, '#pop:2'),
+            (r'[<>]{1,2}(?=[ \t=])', Punctuation),
+            include('eval-or-plain'),
+        ],
+
+        'plain': [
+            (r'([^#\n]|#[^{\n]|(\\\\)*\\#\{)+', Text),
+            (r'(#\{)(' + _dot + '*?)(\})',
+             bygroups(String.Interpol, using(ScalaLexer), String.Interpol)),
+            (r'\n', Text, 'root'),
+        ],
+
+        'html-attributes': [
+            (r'\s+', Text),
+            (r'[a-z0-9_:-]+[ \t]*=', Name.Attribute, 'html-attribute-value'),
+            (r'[a-z0-9_:-]+', Name.Attribute),
+            (r'\)', Text, '#pop'),
+        ],
+
+        'html-attribute-value': [
+            (r'[ \t]+', Text),
+            (r'[a-z0-9_]+', Name.Variable, '#pop'),
+            (r'@[a-z0-9_]+', Name.Variable.Instance, '#pop'),
+            (r'\$[a-z0-9_]+', Name.Variable.Global, '#pop'),
+            (r"'(\\\\|\\'|[^'\n])*'", String, '#pop'),
+            (r'"(\\\\|\\"|[^"\n])*"', String, '#pop'),
+        ],
+
+        'html-comment-block': [
+            (_dot + '+', Comment),
+            (r'\n', Text, 'root'),
+        ],
+
+        'scaml-comment-block': [
+            (_dot + '+', Comment.Preproc),
+            (r'\n', Text, 'root'),
+        ],
+
+        'filter-block': [
+            (r'([^#\n]|#[^{\n]|(\\\\)*\\#\{)+', Name.Decorator),
+            (r'(#\{)(' + _dot + '*?)(\})',
+             bygroups(String.Interpol, using(ScalaLexer), String.Interpol)),
+            (r'\n', Text, 'root'),
+        ],
+    }
+
+
+class XQueryLexer(ExtendedRegexLexer):
+    """
+    An XQuery lexer, parsing a stream and outputting the tokens needed to
+    highlight xquery code.
+
+    *New in Pygments 1.4.*
+    """
+    name = 'XQuery'
+    aliases = ['xquery', 'xqy']
+    filenames = ['*.xqy', '*.xquery']
+    mimetypes = ['text/xquery', 'application/xquery']
+
+    xquery_parse_state = []
+
+    # FIX UNICODE LATER
+    #ncnamestartchar = (
+    #    ur"[A-Z]|_|[a-z]|[\u00C0-\u00D6]|[\u00D8-\u00F6]|[\u00F8-\u02FF]|"
+    #    ur"[\u0370-\u037D]|[\u037F-\u1FFF]|[\u200C-\u200D]|[\u2070-\u218F]|"
+    #    ur"[\u2C00-\u2FEF]|[\u3001-\uD7FF]|[\uF900-\uFDCF]|[\uFDF0-\uFFFD]|"
+    #    ur"[\u10000-\uEFFFF]"
+    #)
+    ncnamestartchar = r"[A-Z]|_|[a-z]"
+    # FIX UNICODE LATER
+    #ncnamechar = ncnamestartchar + (ur"|-|\.|[0-9]|\u00B7|[\u0300-\u036F]|"
+    #                                ur"[\u203F-\u2040]")
+    ncnamechar = ncnamestartchar + r"|-|\.|[0-9]"
+    ncname = "((%s)+(%s)*)" % (ncnamestartchar, ncnamechar)
+    pitarget_namestartchar = r"[A-KN-WY-Z]|_|:|[a-kn-wy-z]"
+    pitarget_namechar = pitarget_namestartchar + r"|-|\.|[0-9]"
+    pitarget = "(%s)+(%s)*" % (pitarget_namestartchar, pitarget_namechar)
+    prefixedname = "%s:%s" % (ncname, ncname)
+    unprefixedname = ncname
+    qname = "((%s)|(%s))" %(prefixedname, unprefixedname)
+
+    entityref = r'&(lt|gt|amp|quot|apos|nbsp);'
+    charref = r'&#[0-9]+;|&#x[0-9a-fA-F]+;'
+
+    stringdouble = r'("((' + entityref + r')|(' + charref + r')|("")|([^&"]))*")'
+    stringsingle = r"('((" + entityref + r")|(" + charref + r")|('')|([^&']))*')"
+
+    # FIX UNICODE LATER
+    #elementcontentchar = (ur'\t|\r|\n|[\u0020-\u0025]|[\u0028-\u003b]|'
+    #                      ur'[\u003d-\u007a]|\u007c|[\u007e-\u007F]')
+    elementcontentchar = r'[A-Za-z]|\s|\d|[!"#$%\(\)\*\+,\-\./\:;=\?\@\[\\\]^_\'`\|~]'
+    #quotattrcontentchar = (ur'\t|\r|\n|[\u0020-\u0021]|[\u0023-\u0025]|'
+    #                       ur'[\u0027-\u003b]|[\u003d-\u007a]|\u007c|[\u007e-\u007F]')
+    quotattrcontentchar = r'[A-Za-z]|\s|\d|[!#$%\(\)\*\+,\-\./\:;=\?\@\[\\\]^_\'`\|~]'
+    #aposattrcontentchar = (ur'\t|\r|\n|[\u0020-\u0025]|[\u0028-\u003b]|'
+    #                       ur'[\u003d-\u007a]|\u007c|[\u007e-\u007F]')
+    aposattrcontentchar = r'[A-Za-z]|\s|\d|[!"#$%\(\)\*\+,\-\./\:;=\?\@\[\\\]^_`\|~]'
+
+
+    # CHAR elements - fix the above elementcontentchar, quotattrcontentchar,
+    #                 aposattrcontentchar
+    #x9 | #xA | #xD | [#x20-#xD7FF] | [#xE000-#xFFFD] | [#x10000-#x10FFFF]
+
+    flags = re.DOTALL | re.MULTILINE | re.UNICODE
+
+    def operator_root_callback(lexer, match, ctx):
+        yield match.start(), Operator, match.group(1)
+        # transition to root always - don't pop off stack
+        ctx.stack = ['root']
+        ctx.pos = match.end()
+
+    def popstate_tag_callback(lexer, match, ctx):
+        yield match.start(), Name.Tag, match.group(1)
+        ctx.stack.append(lexer.xquery_parse_state.pop())
+        ctx.pos = match.end()
+
+    def popstate_xmlcomment_callback(lexer, match, ctx):
+        yield match.start(), String.Doc, match.group(1)
+        ctx.stack.append(lexer.xquery_parse_state.pop())
+        ctx.pos = match.end()
+
+    def popstate_kindtest_callback(lexer, match, ctx):
+        yield match.start(), Punctuation, match.group(1)
+        next_state = lexer.xquery_parse_state.pop()
+        if next_state == 'occurrenceindicator':
+            if re.match("[?*+]+", match.group(2)):
+                yield match.start(), Punctuation, match.group(2)
+                ctx.stack.append('operator')
+                ctx.pos = match.end()
+            else:
+                ctx.stack.append('operator')
+                ctx.pos = match.end(1)
+        else:
+            ctx.stack.append(next_state)
+            ctx.pos = match.end(1)
+
+    def popstate_callback(lexer, match, ctx):
+        yield match.start(), Punctuation, match.group(1)
+        # if we have run out of our state stack, pop whatever is on the pygments
+        # state stack
+        if len(lexer.xquery_parse_state) == 0:
+            ctx.stack.pop()
+        elif len(ctx.stack) > 1:
+            ctx.stack.append(lexer.xquery_parse_state.pop())
+        else:
+            # i don't know if i'll need this, but in case, default back to root
+            ctx.stack = ['root']
+        ctx.pos = match.end()
+
+    def pushstate_element_content_starttag_callback(lexer, match, ctx):
+        yield match.start(), Name.Tag, match.group(1)
+        lexer.xquery_parse_state.append('element_content')
+        ctx.stack.append('start_tag')
+        ctx.pos = match.end()
+
+    def pushstate_cdata_section_callback(lexer, match, ctx):
+        yield match.start(), String.Doc, match.group(1)
+        ctx.stack.append('cdata_section')
+        lexer.xquery_parse_state.append(ctx.state.pop)
+        ctx.pos = match.end()
+
+    def pushstate_starttag_callback(lexer, match, ctx):
+        yield match.start(), Name.Tag, match.group(1)
+        lexer.xquery_parse_state.append(ctx.state.pop)
+        ctx.stack.append('start_tag')
+        ctx.pos = match.end()
+
+    def pushstate_operator_order_callback(lexer, match, ctx):
+        yield match.start(), Keyword, match.group(1)
+        yield match.start(), Text, match.group(2)
+        yield match.start(), Punctuation, match.group(3)
+        ctx.stack = ['root']
+        lexer.xquery_parse_state.append('operator')
+        ctx.pos = match.end()
+
+    def pushstate_operator_root_validate(lexer, match, ctx):
+        yield match.start(), Keyword, match.group(1)
+        yield match.start(), Text, match.group(2)
+        yield match.start(), Punctuation, match.group(3)
+        ctx.stack = ['root']
+        lexer.xquery_parse_state.append('operator')
+        ctx.pos = match.end()
+
+    def pushstate_operator_root_validate_withmode(lexer, match, ctx):
+        yield match.start(), Keyword, match.group(1)
+        yield match.start(), Text, match.group(2)
+        yield match.start(), Keyword, match.group(3)
+        ctx.stack = ['root']
+        lexer.xquery_parse_state.append('operator')
+        ctx.pos = match.end()
+
+    def pushstate_operator_processing_instruction_callback(lexer, match, ctx):
+        yield match.start(), String.Doc, match.group(1)
+        ctx.stack.append('processing_instruction')
+        lexer.xquery_parse_state.append('operator')
+        ctx.pos = match.end()
+
+    def pushstate_element_content_processing_instruction_callback(lexer, match, ctx):
+        yield match.start(), String.Doc, match.group(1)
+        ctx.stack.append('processing_instruction')
+        lexer.xquery_parse_state.append('element_content')
+        ctx.pos = match.end()
+
+    def pushstate_element_content_cdata_section_callback(lexer, match, ctx):
+        yield match.start(), String.Doc, match.group(1)
+        ctx.stack.append('cdata_section')
+        lexer.xquery_parse_state.append('element_content')
+        ctx.pos = match.end()
+
+    def pushstate_operator_cdata_section_callback(lexer, match, ctx):
+        yield match.start(), String.Doc, match.group(1)
+        ctx.stack.append('cdata_section')
+        lexer.xquery_parse_state.append('operator')
+        ctx.pos = match.end()
+
+    def pushstate_element_content_xmlcomment_callback(lexer, match, ctx):
+        yield match.start(), String.Doc, match.group(1)
+        ctx.stack.append('xml_comment')
+        lexer.xquery_parse_state.append('element_content')
+        ctx.pos = match.end()
+
+    def pushstate_operator_xmlcomment_callback(lexer, match, ctx):
+        yield match.start(), String.Doc, match.group(1)
+        ctx.stack.append('xml_comment')
+        lexer.xquery_parse_state.append('operator')
+        ctx.pos = match.end()
+
+    def pushstate_kindtest_callback(lexer, match, ctx):
+        yield match.start(), Keyword, match.group(1)
+        yield match.start(), Text, match.group(2)
+        yield match.start(), Punctuation, match.group(3)
+        lexer.xquery_parse_state.append('kindtest')
+        ctx.stack.append('kindtest')
+        ctx.pos = match.end()
+
+    def pushstate_operator_kindtestforpi_callback(lexer, match, ctx):
+        yield match.start(), Keyword, match.group(1)
+        yield match.start(), Text, match.group(2)
+        yield match.start(), Punctuation, match.group(3)
+        lexer.xquery_parse_state.append('operator')
+        ctx.stack.append('kindtestforpi')
+        ctx.pos = match.end()
+
+    def pushstate_operator_kindtest_callback(lexer, match, ctx):
+        yield match.start(), Keyword, match.group(1)
+        yield match.start(), Text, match.group(2)
+        yield match.start(), Punctuation, match.group(3)
+        lexer.xquery_parse_state.append('operator')
+        ctx.stack.append('kindtest')
+        ctx.pos = match.end()
+
+    def pushstate_occurrenceindicator_kindtest_callback(lexer, match, ctx):
+        yield match.start(), Name.Tag, match.group(1)
+        yield match.start(), Text, match.group(2)
+        yield match.start(), Punctuation, match.group(3)
+        lexer.xquery_parse_state.append('occurrenceindicator')
+        ctx.stack.append('kindtest')
+        ctx.pos = match.end()
+
+    def pushstate_operator_starttag_callback(lexer, match, ctx):
+        yield match.start(), Name.Tag, match.group(1)
+        lexer.xquery_parse_state.append('operator')
+        ctx.stack.append('start_tag')
+        ctx.pos = match.end()
+
+    def pushstate_operator_root_callback(lexer, match, ctx):
+        yield match.start(), Punctuation, match.group(1)
+        lexer.xquery_parse_state.append('operator')
+        ctx.stack = ['root']#.append('root')
+        ctx.pos = match.end()
+
+    def pushstate_operator_root_construct_callback(lexer, match, ctx):
+        yield match.start(), Keyword, match.group(1)
+        yield match.start(), Text, match.group(2)
+        yield match.start(), Punctuation, match.group(3)
+        lexer.xquery_parse_state.append('operator')
+        ctx.stack = ['root']
+        ctx.pos = match.end()
+
+    def pushstate_root_callback(lexer, match, ctx):
+        yield match.start(), Punctuation, match.group(1)
+        cur_state = ctx.stack.pop()
+        lexer.xquery_parse_state.append(cur_state)
+        ctx.stack = ['root']#.append('root')
+        ctx.pos = match.end()
+
+    def pushstate_operator_callback(lexer, match, ctx):
+        yield match.start(), Keyword, match.group(1)
+        yield match.start(), Text, match.group(2)
+        yield match.start(), Punctuation, match.group(3)
+        lexer.xquery_parse_state.append('operator')
+        ctx.pos = match.end()
+
+    tokens = {
+        'comment': [
+            # xquery comments
+            (r'(:\))', Comment, '#pop'),
+            (r'(\(:)', Comment, '#push'),
+            (r'[^:)]', Comment),
+            (r'([^:)]|:|\))', Comment),
+        ],
+        'whitespace': [
+            (r'\s+', Text),
+        ],
+        'operator': [
+            include('whitespace'),
+            (r'(\})', popstate_callback),
+            (r'\(:', Comment, 'comment'),
+
+            (r'(\{)', pushstate_root_callback),
+            (r'then|else|external|at|div|except', Keyword, 'root'),
+            (r'is|mod|order\s+by|stable\s+order\s+by', Keyword, 'root'),
+            (r'and|or', Operator.Word, 'root'),
+            (r'(eq|ge|gt|le|lt|ne|idiv|intersect|in)(?=\b)',
+             Operator.Word, 'root'),
+            (r'return|satisfies|to|union|where|preserve\s+strip',
+             Keyword, 'root'),
+            (r'(::|;|>=|>>|>|\[|<=|<<|<|-|\*|!=|\+|//|/|\||:=|,|=)',
+             operator_root_callback),
+            (r'(castable|cast)(\s+)(as)',
+             bygroups(Keyword, Text, Keyword), 'singletype'),
+            (r'(instance)(\s+)(of)|(treat)(\s+)(as)',
+             bygroups(Keyword, Text, Keyword), 'itemtype'),
+            (r'(case)|(as)', Keyword, 'itemtype'),
+            (r'(\))(\s*)(as)',
+             bygroups(Punctuation, Text, Keyword), 'itemtype'),
+            (r'\$', Name.Variable, 'varname'),
+            (r'(for|let)(\s+)(\$)',
+             bygroups(Keyword, Text, Name.Variable), 'varname'),
+            #(r'\)|\?|\]', Punctuation, '#push'),
+            (r'\)|\?|\]', Punctuation),
+            (r'(empty)(\s+)(greatest|least)', bygroups(Keyword, Text, Keyword)),
+            (r'ascending|descending|default', Keyword, '#push'),
+            (r'external', Keyword),
+            (r'collation', Keyword, 'uritooperator'),
+            # finally catch all string literals and stay in operator state
+            (stringdouble, String.Double),
+            (stringsingle, String.Single),
+
+            (r'(catch)(\s*)', bygroups(Keyword, Text), 'root'),
+        ],
+        'uritooperator': [
+            (stringdouble, String.Double, '#pop'),
+            (stringsingle, String.Single, '#pop'),
+        ],
+        'namespacedecl': [
+            include('whitespace'),
+            (r'\(:', Comment, 'comment'),
+            (r'(at)(\s+)'+stringdouble, bygroups(Keyword, Text, String.Double)),
+            (r"(at)(\s+)"+stringsingle, bygroups(Keyword, Text, String.Single)),
+            (stringdouble, String.Double),
+            (stringsingle, String.Single),
+            (r',', Punctuation),
+            (r'=', Operator),
+            (r';', Punctuation, 'root'),
+            (ncname, Name.Namespace),
+        ],
+        'namespacekeyword': [
+            include('whitespace'),
+            (r'\(:', Comment, 'comment'),
+            (stringdouble, String.Double, 'namespacedecl'),
+            (stringsingle, String.Single, 'namespacedecl'),
+            (r'inherit|no-inherit', Keyword, 'root'),
+            (r'namespace', Keyword, 'namespacedecl'),
+            (r'(default)(\s+)(element)', bygroups(Keyword, Text, Keyword)),
+            (r'preserve|no-preserve', Keyword),
+            (r',', Punctuation),
+        ],
+        'varname': [
+            (r'\(:', Comment, 'comment'),
+            (qname, Name.Variable, 'operator'),
+        ],
+        'singletype': [
+            (r'\(:', Comment, 'comment'),
+            (ncname + r'(:\*)', Name.Variable, 'operator'),
+            (qname, Name.Variable, 'operator'),
+        ],
+        'itemtype': [
+            include('whitespace'),
+            (r'\(:', Comment, 'comment'),
+            (r'\$', Punctuation, 'varname'),
+            (r'void\s*\(\s*\)',
+             bygroups(Keyword, Text, Punctuation, Text, Punctuation), 'operator'),
+            (r'(element|attribute|schema-element|schema-attribute|comment|text|'
+             r'node|binary|document-node)(\s*)(\()',
+             pushstate_occurrenceindicator_kindtest_callback),
+            # Marklogic specific type?
+            (r'(processing-instruction)(\s*)(\()',
+             bygroups(Keyword, Text, Punctuation),
+             ('occurrenceindicator', 'kindtestforpi')),
+            (r'(item)(\s*)(\()(\s*)(\))(?=[*+?])',
+             bygroups(Keyword, Text, Punctuation, Text, Punctuation),
+             'occurrenceindicator'),
+            (r'\(\#', Punctuation, 'pragma'),
+            (r';', Punctuation, '#pop'),
+            (r'then|else', Keyword, '#pop'),
+            (r'(at)(\s+)' + stringdouble,
+             bygroups(Keyword, Text, String.Double), 'namespacedecl'),
+            (r'(at)(\s+)' + stringsingle,
+             bygroups(Keyword, Text, String.Single), 'namespacedecl'),
+            (r'except|intersect|in|is|return|satisfies|to|union|where',
+             Keyword, 'root'),
+            (r'and|div|eq|ge|gt|le|lt|ne|idiv|mod|or', Operator.Word, 'root'),
+            (r':=|=|,|>=|>>|>|\[|\(|<=|<<|<|-|!=|\|', Operator, 'root'),
+            (r'external|at', Keyword, 'root'),
+            (r'(stable)(\s+)(order)(\s+)(by)',
+             bygroups(Keyword, Text, Keyword, Text, Keyword), 'root'),
+            (r'(castable|cast)(\s+)(as)',
+             bygroups(Keyword, Text, Keyword), 'singletype'),
+            (r'(instance)(\s+)(of)|(treat)(\s+)(as)',
+             bygroups(Keyword, Text, Keyword)),
+            (r'case|as', Keyword, 'itemtype'),
+            (r'(\))(\s*)(as)', bygroups(Operator, Text, Keyword), 'itemtype'),
+            (ncname + r'(:\*)', Keyword.Type, 'operator'),
+            (qname, Keyword.Type, 'occurrenceindicator'),
+        ],
+        'kindtest': [
+            (r'\(:', Comment, 'comment'),
+            (r'({)', Punctuation, 'root'),
+            (r'(\))([*+?]?)', popstate_kindtest_callback),
+            (r'\*', Name, 'closekindtest'),
+            (qname, Name, 'closekindtest'),
+            (r'(element|schema-element)(\s*)(\()', pushstate_kindtest_callback),
+        ],
+        'kindtestforpi': [
+            (r'\(:', Comment, 'comment'),
+            (r'\)', Punctuation, '#pop'),
+            (ncname, bygroups(Name.Variable, Name.Variable)),
+            (stringdouble, String.Double),
+            (stringsingle, String.Single),
+        ],
+        'closekindtest': [
+            (r'\(:', Comment, 'comment'),
+            (r'(\))', popstate_callback),
+            (r',', Punctuation),
+            (r'(\{)', pushstate_operator_root_callback),
+            (r'\?', Punctuation),
+        ],
+        'xml_comment': [
+            (r'(-->)', popstate_xmlcomment_callback),
+            (r'[^-]{1,2}', Literal),
+            (r'\u009|\u00A|\u00D|[\u0020-\u00D7FF]|[\u00E000-\u00FFFD]|'
+             r'[\u0010000-\u0010FFFF]', Literal),
+        ],
+        'processing_instruction': [
+            (r'\s+', Text, 'processing_instruction_content'),
+            (r'\?>', String.Doc, '#pop'),
+            (pitarget, Name),
+        ],
+        'processing_instruction_content': [
+            (r'\?>', String.Doc, '#pop'),
+            (r'\u009|\u00A|\u00D|[\u0020-\uD7FF]|[\uE000-\uFFFD]|'
+             r'[\u10000-\u10FFFF]', Literal),
+        ],
+        'cdata_section': [
+            (r']]>', String.Doc, '#pop'),
+            (r'\u009|\u00A|\u00D|[\u0020-\uD7FF]|[\uE000-\uFFFD]|'
+             r'[\u10000-\u10FFFF]', Literal),
+        ],
+        'start_tag': [
+            include('whitespace'),
+            (r'(/>)', popstate_tag_callback),
+            (r'>', Name.Tag, 'element_content'),
+            (r'"', Punctuation, 'quot_attribute_content'),
+            (r"'", Punctuation, 'apos_attribute_content'),
+            (r'=', Operator),
+            (qname, Name.Tag),
+        ],
+        'quot_attribute_content': [
+            (r'"', Punctuation, 'start_tag'),
+            (r'(\{)', pushstate_root_callback),
+            (r'""', Name.Attribute),
+            (quotattrcontentchar, Name.Attribute),
+            (entityref, Name.Attribute),
+            (charref, Name.Attribute),
+            (r'\{\{|\}\}', Name.Attribute),
+        ],
+        'apos_attribute_content': [
+            (r"'", Punctuation, 'start_tag'),
+            (r'\{', Punctuation, 'root'),
+            (r"''", Name.Attribute),
+            (aposattrcontentchar, Name.Attribute),
+            (entityref, Name.Attribute),
+            (charref, Name.Attribute),
+            (r'\{\{|\}\}', Name.Attribute),
+        ],
+        'element_content': [
+            (r'</', Name.Tag, 'end_tag'),
+            (r'(\{)', pushstate_root_callback),
+            (r'(<!--)', pushstate_element_content_xmlcomment_callback),
+            (r'(<\?)', pushstate_element_content_processing_instruction_callback),
+            (r'(<!\[CDATA\[)', pushstate_element_content_cdata_section_callback),
+            (r'(<)', pushstate_element_content_starttag_callback),
+            (elementcontentchar, Literal),
+            (entityref, Literal),
+            (charref, Literal),
+            (r'\{\{|\}\}', Literal),
+        ],
+        'end_tag': [
+            include('whitespace'),
+            (r'(>)', popstate_tag_callback),
+            (qname, Name.Tag),
+        ],
+        'xmlspace_decl': [
+            (r'\(:', Comment, 'comment'),
+            (r'preserve|strip', Keyword, '#pop'),
+        ],
+        'declareordering': [
+            (r'\(:', Comment, 'comment'),
+            include('whitespace'),
+            (r'ordered|unordered', Keyword, '#pop'),
+        ],
+        'xqueryversion': [
+            include('whitespace'),
+            (r'\(:', Comment, 'comment'),
+            (stringdouble, String.Double),
+            (stringsingle, String.Single),
+            (r'encoding', Keyword),
+            (r';', Punctuation, '#pop'),
+        ],
+        'pragma': [
+            (qname, Name.Variable, 'pragmacontents'),
+        ],
+        'pragmacontents': [
+            (r'#\)', Punctuation, 'operator'),
+            (r'\u009|\u00A|\u00D|[\u0020-\u00D7FF]|[\u00E000-\u00FFFD]|'
+             r'[\u0010000-\u0010FFFF]', Literal),
+            (r'(\s*)', Text),
+        ],
+        'occurrenceindicator': [
+            include('whitespace'),
+            (r'\(:', Comment, 'comment'),
+            (r'\*|\?|\+', Operator, 'operator'),
+            (r':=', Operator, 'root'),
+            (r'', Text, 'operator'),
+        ],
+        'option': [
+            include('whitespace'),
+            (qname, Name.Variable, '#pop'),
+        ],
+        'qname_braren': [
+            include('whitespace'),
+            (r'(\{)', pushstate_operator_root_callback),
+            (r'(\()', Punctuation, 'root'),
+        ],
+        'element_qname': [
+            (qname, Name.Variable, 'root'),
+        ],
+        'attribute_qname': [
+            (qname, Name.Variable, 'root'),
+        ],
+        'root': [
+            include('whitespace'),
+            (r'\(:', Comment, 'comment'),
+
+            # handle operator state
+            # order on numbers matters - handle most complex first
+            (r'\d+(\.\d*)?[eE][\+\-]?\d+', Number.Double, 'operator'),
+            (r'(\.\d+)[eE][\+\-]?\d+', Number.Double, 'operator'),
+            (r'(\.\d+|\d+\.\d*)', Number, 'operator'),
+            (r'(\d+)', Number.Integer, 'operator'),
+            (r'(\.\.|\.|\)|\*)', Punctuation, 'operator'),
+            (r'(declare)(\s+)(construction)',
+             bygroups(Keyword, Text, Keyword), 'operator'),
+            (r'(declare)(\s+)(default)(\s+)(order)',
+             bygroups(Keyword, Text, Keyword, Text, Keyword), 'operator'),
+            (ncname + ':\*', Name, 'operator'),
+            (stringdouble, String.Double, 'operator'),
+            (stringsingle, String.Single, 'operator'),
+
+            (r'(\})', popstate_callback),
+
+            #NAMESPACE DECL
+            (r'(declare)(\s+)(default)(\s+)(collation)',
+             bygroups(Keyword, Text, Keyword, Text, Keyword)),
+            (r'(module|declare)(\s+)(namespace)',
+             bygroups(Keyword, Text, Keyword), 'namespacedecl'),
+            (r'(declare)(\s+)(base-uri)',
+             bygroups(Keyword, Text, Keyword), 'namespacedecl'),
+
+            #NAMESPACE KEYWORD
+            (r'(declare)(\s+)(default)(\s+)(element|function)',
+             bygroups(Keyword, Text, Keyword, Text, Keyword), 'namespacekeyword'),
+            (r'(import)(\s+)(schema|module)',
+             bygroups(Keyword.Pseudo, Text, Keyword.Pseudo), 'namespacekeyword'),
+            (r'(declare)(\s+)(copy-namespaces)',
+             bygroups(Keyword, Text, Keyword), 'namespacekeyword'),
+
+            #VARNAMEs
+            (r'(for|let|some|every)(\s+)(\$)',
+             bygroups(Keyword, Text, Name.Variable), 'varname'),
+            (r'\$', Name.Variable, 'varname'),
+            (r'(declare)(\s+)(variable)(\s+)(\$)',
+             bygroups(Keyword, Text, Keyword, Text, Name.Variable), 'varname'),
+
+            #ITEMTYPE
+            (r'(\))(\s+)(as)', bygroups(Operator, Text, Keyword), 'itemtype'),
+
+            (r'(element|attribute|schema-element|schema-attribute|comment|'
+             r'text|node|document-node)(\s+)(\()',
+             pushstate_operator_kindtest_callback),
+
+            (r'(processing-instruction)(\s+)(\()',
+             pushstate_operator_kindtestforpi_callback),
+
+            (r'(<!--)', pushstate_operator_xmlcomment_callback),
+
+            (r'(<\?)', pushstate_operator_processing_instruction_callback),
+
+            (r'(<!\[CDATA\[)', pushstate_operator_cdata_section_callback),
+
+            # (r'</', Name.Tag, 'end_tag'),
+            (r'(<)', pushstate_operator_starttag_callback),
+
+            (r'(declare)(\s+)(boundary-space)',
+             bygroups(Keyword, Text, Keyword), 'xmlspace_decl'),
+
+            (r'(validate)(\s+)(lax|strict)',
+             pushstate_operator_root_validate_withmode),
+            (r'(validate)(\s*)(\{)', pushstate_operator_root_validate),
+            (r'(typeswitch)(\s*)(\()', bygroups(Keyword, Text, Punctuation)),
+            (r'(element|attribute)(\s*)(\{)',
+             pushstate_operator_root_construct_callback),
+
+            (r'(document|text|processing-instruction|comment)(\s*)(\{)',
+             pushstate_operator_root_construct_callback),
+            #ATTRIBUTE
+            (r'(attribute)(\s+)(?=' + qname + r')',
+             bygroups(Keyword, Text), 'attribute_qname'),
+            #ELEMENT
+            (r'(element)(\s+)(?=' +qname+ r')',
+             bygroups(Keyword, Text), 'element_qname'),
+            #PROCESSING_INSTRUCTION
+            (r'(processing-instruction)(\s+)' + ncname + r'(\s*)(\{)',
+             bygroups(Keyword, Text, Name.Variable, Text, Punctuation), 'operator'),
+
+            (r'(declare|define)(\s+)(function)',
+             bygroups(Keyword, Text, Keyword)),
+
+            (r'(\{)', pushstate_operator_root_callback),
+
+            (r'(unordered|ordered)(\s*)(\{)',
+             pushstate_operator_order_callback),
+
+            (r'(declare)(\s+)(ordering)',
+             bygroups(Keyword, Text, Keyword), 'declareordering'),
+
+            (r'(xquery)(\s+)(version)',
+             bygroups(Keyword.Pseudo, Text, Keyword.Pseudo), 'xqueryversion'),
+
+            (r'(\(#)', Punctuation, 'pragma'),
+
+            # sometimes return can occur in root state
+            (r'return', Keyword),
+
+            (r'(declare)(\s+)(option)', bygroups(Keyword, Text, Keyword),
+             'option'),
+
+            #URI LITERALS - single and double quoted
+            (r'(at)(\s+)('+stringdouble+')', String.Double, 'namespacedecl'),
+            (r'(at)(\s+)('+stringsingle+')', String.Single, 'namespacedecl'),
+
+            (r'(ancestor-or-self|ancestor|attribute|child|descendant-or-self)(::)',
+             bygroups(Keyword, Punctuation)),
+            (r'(descendant|following-sibling|following|parent|preceding-sibling'
+             r'|preceding|self)(::)', bygroups(Keyword, Punctuation)),
+
+            (r'(if)(\s*)(\()', bygroups(Keyword, Text, Punctuation)),
+
+            (r'then|else', Keyword),
+
+            # ML specific
+            (r'(try)(\s*)', bygroups(Keyword, Text), 'root'),
+            (r'(catch)(\s*)(\()(\$)',
+             bygroups(Keyword, Text, Punctuation, Name.Variable), 'varname'),
+
+            (r'@' + qname, Name.Attribute),
+            (r'@\*', Name.Attribute),
+            (r'@' + ncname, Name.Attribute),
+
+            (r'//|/|\+|-|;|,|\(|\)', Punctuation),
+
+            # STANDALONE QNAMES
+            (qname + r'(?=\s*[{])', Name.Variable, 'qname_braren'),
+            (qname + r'(?=\s*[(])', Name.Function, 'qname_braren'),
+            (qname, Name.Variable, 'operator'),
+        ]
+    }
+
--- a/MoinMoin/support/pygments/styles/__init__.py	Thu Dec 01 02:05:15 2011 +0100
+++ b/MoinMoin/support/pygments/styles/__init__.py	Thu Dec 01 02:26:16 2011 +0100
@@ -30,6 +30,7 @@
     'native':   'native::NativeStyle',
     'fruity':   'fruity::FruityStyle',
     'bw':       'bw::BlackWhiteStyle',
+    'vim':      'vim::VimStyle',
     'vs':       'vs::VisualStudioStyle',
     'tango':    'tango::TangoStyle',
 }
--- a/MoinMoin/support/pygments/token.py	Thu Dec 01 02:05:15 2011 +0100
+++ b/MoinMoin/support/pygments/token.py	Thu Dec 01 02:26:16 2011 +0100
@@ -40,9 +40,6 @@
         new.parent = self
         return new
 
-    def __hash__(self):
-        return hash(tuple(self))
-
     def __repr__(self):
         return 'Token' + (self and '.' or '') + '.'.join(self)
 
--- a/MoinMoin/support/pygments/util.py	Thu Dec 01 02:05:15 2011 +0100
+++ b/MoinMoin/support/pygments/util.py	Thu Dec 01 02:26:16 2011 +0100
@@ -110,10 +110,16 @@
     returns float values.
     """
     def text_analyse(text):
-        rv = f(text)
+        try:
+            rv = f(text)
+        except Exception:
+            return 0.0
         if not rv:
             return 0.0
-        return min(1.0, max(0.0, float(rv)))
+        try:
+            return min(1.0, max(0.0, float(rv)))
+        except ValueError:
+            return 0.0
     text_analyse.__doc__ = f.__doc__
     return staticmethod(text_analyse)
 
--- a/docs/REQUIREMENTS	Thu Dec 01 02:05:15 2011 +0100
+++ b/docs/REQUIREMENTS	Thu Dec 01 02:26:16 2011 +0100
@@ -96,7 +96,7 @@
 
 pygments (highlighting for all sorts of source code and other text files)
 =========================================================================
-shipped: 1.3.1
+shipped: 1.4
 minimum: 1.1.1 will work, maybe even older versions