changeset 746:0d3e9b79dde3

moved SecurityPolicy stuff to security/*
author Thomas Waldmann <tw AT waldmann-edv DOT de>
date Tue, 06 Jun 2006 21:39:16 +0200
parents 7697385bbb7f
children e178ada80ead
files MoinMoin/i18n/POTFILES.in MoinMoin/security.py MoinMoin/security/__init__.py MoinMoin/security/antispam.py MoinMoin/security/autoadmin.py MoinMoin/util/antispam.py MoinMoin/util/autoadmin.py docs/CHANGES setup.py wiki/config/more_samples/ldap_smb_farmconfig.py wiki/config/wikiconfig.py wiki/config/wikifarm/farmconfig.py
diffstat 12 files changed, 430 insertions(+), 425 deletions(-) [+]
line wrap: on
line diff
--- a/MoinMoin/i18n/POTFILES.in	Mon Jun 05 17:52:35 2006 +0200
+++ b/MoinMoin/i18n/POTFILES.in	Tue Jun 06 21:39:16 2006 +0200
@@ -6,7 +6,6 @@
 config.py
 error.py
 search.py
-security.py
 user.py
 userform.py
 version.py
@@ -108,6 +107,10 @@
 parser/text_moin_wiki.py
 parser/text_xslt.py
 
+security/__init__.py
+security/antispam.py
+security/autoadmin.py
+
 server/__init__.py
 server/daemon.py
 server/twistedmoin.py
@@ -133,8 +136,6 @@
 
 util/ParserBase.py
 util/__init__.py
-util/antispam.py
-util/autoadmin.py
 util/chartypes.py
 util/dataset.py
 util/diff.py
--- a/MoinMoin/security.py	Mon Jun 05 17:52:35 2006 +0200
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,59 +0,0 @@
-# -*- coding: iso-8859-1 -*-
-"""
-    MoinMoin - Wiki Security Interface
-
-    This implements the basic interface for user permissions and
-    system policy. If you want to define your own policy, inherit
-    from the base class 'Permissions', so that when new permissions
-    are defined, you get the defaults.
-
-    Then assign your new class to "SecurityPolicy" in wikiconfig;
-    and I mean the class, not an instance of it!
-
-    @copyright: 2000-2004 by Jürgen Hermann <jh@web.de>
-    @license: GNU GPL, see COPYING for details.
-"""
-
-#############################################################################
-### Basic Permissions Interface -- most features enabled by default
-#############################################################################
-
-
-class Permissions:
-    """ Basic interface for user permissions and system policy.
-
-        Note that you still need to allow some of the related actions, this
-        just controls their behaviour, not their activation.
-    """
-
-    def __init__(self, user):
-        """ Calculate the permissons `user` has.
-        """
-        from MoinMoin.Page import Page
-        self.Page = Page
-        self.name = user.name
-        self.request = user._request
-
-    def save(self, editor, newtext, rev, **kw):
-        """ Check whether user may save a page.
-
-            `editor` is the PageEditor instance, the other arguments are
-            those of the `PageEditor.saveText` method.
-        """
-        return self.write(editor.page_name)
-
-    def __getattr__(self, attr):
-        """ if attr is one of the rights in acl_rights_valid, then return a
-            checking function for it. Else raise an error.
-        """
-        request = self.request
-        Page = self.Page
-        if attr in request.cfg.acl_rights_valid:
-            return lambda pagename, Page=Page, request=request, attr=attr: Page(request, pagename).getACL(request).may(request, self.name, attr)
-        else:
-            raise AttributeError, attr
-        
-
-# make an alias for the default policy
-Default = Permissions
-
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/MoinMoin/security/__init__.py	Tue Jun 06 21:39:16 2006 +0200
@@ -0,0 +1,59 @@
+# -*- coding: iso-8859-1 -*-
+"""
+    MoinMoin - Wiki Security Interface
+
+    This implements the basic interface for user permissions and
+    system policy. If you want to define your own policy, inherit
+    from the base class 'Permissions', so that when new permissions
+    are defined, you get the defaults.
+
+    Then assign your new class to "SecurityPolicy" in wikiconfig;
+    and I mean the class, not an instance of it!
+
+    @copyright: 2000-2004 by Jürgen Hermann <jh@web.de>
+    @license: GNU GPL, see COPYING for details.
+"""
+
+#############################################################################
+### Basic Permissions Interface -- most features enabled by default
+#############################################################################
+
+
+class Permissions:
+    """ Basic interface for user permissions and system policy.
+
+        Note that you still need to allow some of the related actions, this
+        just controls their behaviour, not their activation.
+    """
+
+    def __init__(self, user):
+        """ Calculate the permissons `user` has.
+        """
+        from MoinMoin.Page import Page
+        self.Page = Page
+        self.name = user.name
+        self.request = user._request
+
+    def save(self, editor, newtext, rev, **kw):
+        """ Check whether user may save a page.
+
+            `editor` is the PageEditor instance, the other arguments are
+            those of the `PageEditor.saveText` method.
+        """
+        return self.write(editor.page_name)
+
+    def __getattr__(self, attr):
+        """ if attr is one of the rights in acl_rights_valid, then return a
+            checking function for it. Else raise an error.
+        """
+        request = self.request
+        Page = self.Page
+        if attr in request.cfg.acl_rights_valid:
+            return lambda pagename, Page=Page, request=request, attr=attr: Page(request, pagename).getACL(request).may(request, self.name, attr)
+        else:
+            raise AttributeError, attr
+        
+
+# make an alias for the default policy
+Default = Permissions
+
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/MoinMoin/security/antispam.py	Tue Jun 06 21:39:16 2006 +0200
@@ -0,0 +1,245 @@
+#!/usr/bin/env python
+# -*- coding: iso-8859-1 -*-
+"""
+    This implements a global (and a local) blacklist against wiki spammers.
+
+    If started from commandline, it prints a merged list (moinmaster + MT) on
+    stdout, and what it got additionally from MT on stderr.
+    
+    @copyright: 2005 by Thomas Waldmann
+    @license: GNU GPL, see COPYING for details
+"""
+
+# give some log entries to stderr
+debug = 1
+
+import re, sys, time, datetime
+import sets
+
+if __name__ == '__main__':
+    sys.path.insert(0, "../..")
+
+from MoinMoin.security import Permissions
+from MoinMoin import caching, wikiutil
+
+# Errors ---------------------------------------------------------------
+
+class Error(Exception):
+    """Base class for antispam errors."""
+
+    def __str__(self):
+        return repr(self)
+
+class WikirpcError(Error):
+    """ Raised when we get xmlrpclib.Fault """
+
+    def __init__(self, msg, fault):
+        """ Init with msg and xmlrpclib.Fault dict """
+        self.msg = msg
+        self.fault = fault
+
+    def __str__(self):
+        """ Format the using description and data from the fault """
+        return self.msg + ": [%(faultCode)s]  %(faultString)s" % self.fault
+
+
+# Functions ------------------------------------------------------------
+
+def dprint(s):
+    if debug:
+        if isinstance(s, unicode):
+            s = s.encode('utf-8')
+        sys.stderr.write('%s\n' % s)
+
+
+def makelist(text):
+    """ Split text into lines, strip them, skip # comments """
+    lines = text.splitlines()
+    list = []
+    for line in lines:
+        line = line.split(' # ', 1)[0] # rest of line comment
+        line = line.strip()
+        if line and not line.startswith('#'):
+            list.append(line)
+    return list
+
+
+def getblacklist(request, pagename, do_update):
+    """ Get blacklist, possibly downloading new copy
+
+    @param request: current request (request instance)
+    @param pagename: bad content page name (unicode)
+    @rtype: list
+    @return: list of blacklisted regular expressions
+    """
+    from MoinMoin.PageEditor import PageEditor
+    p = PageEditor(request, pagename, uid_override="Antispam subsystem")
+    invalidate_cache = False
+    if do_update:
+        tooold = time.time() - 3600
+        mymtime = wikiutil.version2timestamp(p.mtime_usecs())
+        failure = caching.CacheEntry(request, "antispam", "failure", scope='wiki')
+        fail_time = failure.mtime() # only update if no failure in last hour
+        if (mymtime < tooold) and (fail_time < tooold):
+            dprint("%d *BadContent too old, have to check for an update..." % tooold)
+            import xmlrpclib
+            import socket
+
+            timeout = 15 # time out for reaching the master server via xmlrpc
+            old_timeout = socket.getdefaulttimeout()
+            socket.setdefaulttimeout(timeout)
+            
+            # For production code
+            uri = "http://moinmaster.wikiwikiweb.de:8000/?action=xmlrpc2"
+            # For testing (use your test wiki as BadContent source)
+            ##uri = "http://localhost/main/?action=xmlrpc2")
+            master = xmlrpclib.ServerProxy(uri)
+
+            try:
+                # Get BadContent info
+                master.putClientInfo('ANTISPAM-CHECK',
+                                     request.http_host+request.script_name)
+                response = master.getPageInfo(pagename)
+
+                # It seems that response is always a dict
+                if isinstance(response, dict) and 'faultCode' in response:
+                    raise WikirpcError("failed to get BadContent information",
+                                       response)
+                
+                # Compare date against local BadContent copy
+                masterdate = response['lastModified']
+
+                if isinstance(masterdate, datetime.datetime): 
+                    # for python 2.5a
+                    mydate = datetime.datetime(*tuple(time.gmtime(mymtime))[0:6])
+                else:
+                    # for python <= 2.4.x
+                    mydate = xmlrpclib.DateTime(tuple(time.gmtime(mymtime)))
+                                                    
+                dprint("master: %s mine: %s" % (masterdate, mydate))
+                if mydate < masterdate:
+                    # Get new copy and save
+                    dprint("Fetching page from master...")
+                    master.putClientInfo('ANTISPAM-FETCH',
+                                         request.http_host + request.script_name)
+                    response = master.getPage(pagename)
+                    if isinstance(response, dict) and 'faultCode' in response:
+                        raise WikirpcError("failed to get BadContent data",
+                                           response)
+                    p._write_file(response)
+
+                invalidate_cache = True
+
+            except (socket.error, xmlrpclib.ProtocolError), err:
+                # Log the error
+                # TODO: check if this does not fill the logs!
+                dprint('Timeout / socket / protocol error when accessing'
+                       ' moinmaster: %s' % str(err))
+                # update cache to wait before the next try
+                failure.update("")
+
+            except Error, err:
+                # In case of Error, we log the error and use the local
+                # BadContent copy.
+                dprint(str(err))
+
+            # set back socket timeout
+            socket.setdefaulttimeout(old_timeout)
+                
+    blacklist = p.get_raw_body()
+    return invalidate_cache, makelist(blacklist)
+
+
+class SecurityPolicy(Permissions):
+    """ Extend the default security policy with antispam feature """
+    
+    def save(self, editor, newtext, rev, **kw):
+        BLACKLISTPAGES = ["BadContent", "LocalBadContent"]
+        if not editor.page_name in BLACKLISTPAGES:
+            request = editor.request
+
+            # Start timing of antispam operation
+            request.clock.start('antispam')
+            
+            blacklist = []
+            invalidate_cache = not getattr(request.cfg, "_mmblcache", None)
+            for pn in BLACKLISTPAGES:
+                do_update = (pn != "LocalBadContent")
+                invalidate_cache_necessary, blacklist_entries = getblacklist(request, pn, do_update)
+                blacklist += blacklist_entries
+                invalidate_cache |= invalidate_cache_necessary
+
+            if blacklist:
+                if invalidate_cache:
+                    mmblcache = []
+                    for blacklist_re in blacklist:
+                        try:
+                            mmblcache.append(re.compile(blacklist_re, re.I))
+                        except re.error, err:
+                            dprint("Error in regex '%s': %s. Please check the pages %s." % (blacklist_re, str(err), ', '.join(BLACKLISTPAGES)))
+                            continue
+                    request.cfg._mmblcache = mmblcache
+
+                from MoinMoin.Page import Page
+
+                oldtext = ""
+                if rev > 0: # rev is the revision of the old page
+                    page = Page(request, editor.page_name, rev=rev)
+                    oldtext = page.get_raw_body()
+
+                newset = sets.ImmutableSet(newtext.splitlines(1))
+                oldset = sets.ImmutableSet(oldtext.splitlines(1))
+                difference = newset.difference(oldset)
+                addedtext = ''.join(difference) 
+                
+                for blacklist_re in request.cfg._mmblcache:
+                    match = blacklist_re.search(addedtext)
+                    if match:
+                        # Log error and raise SaveError, PageEditor
+                        # should handle this.
+                        _ = editor.request.getText
+                        msg = _('Sorry, can not save page because "%(content)s"'
+                                ' is not allowed in this wiki.') % {
+                            'content': match.group()
+                            }
+                        dprint(msg)
+                        raise editor.SaveError(msg)
+            request.clock.stop('antispam')
+            
+        # No problem to save if my base class agree
+        return Permissions.save(self, editor, newtext, rev, **kw)
+
+
+def main():
+    """ Fetch spammer patterns from MT blacklist and moinmaster and merge them.
+        A complete new list for moinmaster gets printed to stdout,
+        only the new entries are printed to stderr.
+    """
+    import urllib
+    mtbl = urllib.urlopen("http://www.jayallen.org/comment_spam/blacklist.txt").read()
+    mmbl = urllib.urlopen("http://moinmaster.wikiwikiweb.de/BadContent?action=raw").read()
+    mtbl = makelist(mtbl)
+    mmbl = makelist(mmbl)
+    print "#format plain"
+    print "#acl All:read"
+    newbl = []
+    for i in mtbl:
+        for j in mmbl:
+            match = re.search(j, i, re.I)
+            if match:
+                break
+        if not match and i not in mmbl:
+            print >>sys.stderr, "%s" % i
+            newbl.append(i)
+    bl = mmbl + newbl
+    bl.sort()
+    lasti = None
+    for i in bl:
+        if i != lasti:
+            print i
+            lasti = i
+
+if __name__ == '__main__':
+    main()
+
+
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/MoinMoin/security/autoadmin.py	Tue Jun 06 21:39:16 2006 +0200
@@ -0,0 +1,113 @@
+# -*- coding: iso-8859-1 -*-
+"""
+    MoinMoin - SecurityPolicy implementing auto admin rights for some users and some groups.
+    
+    AutoAdminGroup page contains users which automatically get admin rights
+    on their homepage and subpages of it. E.g. if ThomasWaldmann is in
+    AutoAdminGroup (or in a group contained in AutoAdminGroup), he gets
+    admin rights on pages ThomasWaldmann and ThomasWaldmann/*.
+
+    AutoAdminGroup page also contains groups which members automatically get
+    admin rights on the group's basename.
+    E.g. if SomeProject/AdminGroup is in AutoAdminGroup and ThomasWaldmann is
+    in SomeProject/AdminGroup, then ThomasWaldmann gets admin rights on pages
+    SomeProject and SomeProject/*.
+    
+    Further, it can autocreate the UserName/XxxxGroup (see grouppages var) when
+    a user save his homepage. Alternatively, this could be also done manually by
+    the user using *Template pages.
+
+    Usage (for wiki admin):
+     * Create an AutoAdminGroup page. If you don't know better, create an empty
+       page for starting.
+     * Enabling a home page for AutoAdmin: just add the user name to the
+       AutoAdminGroup page. After that, this user can create or change ACLs on
+       his homepage or subpages of it.
+     * Enabling another (project) page for AutoAdmin: add <PageName>/AdminGroup
+       to AutoAdminGroup. Also create that <PageName>/AdminGroup page and add
+       at least one user or one group to that page, enabling him or them to
+       create or change ACLs on <PageName> or subpages of it.
+     Those pages edited by wiki admin should be ACL protected with write access
+     limited to allowed people. They are used as source for some ACL
+     information and thus should be treated like the ACLs they get fed into.
+
+    Usage (for homepage owners):
+     * see if there is a HomepageTemplate with a prepared ACL line and some
+       other magic already on it. It is a good idea to have your homepage
+       read- and writeable for everybody as a means of open communication.
+       
+     * For creating personal (or private) subpages of your homepage, use the
+       ReadWritePageTemplate, ReadPageTemplate or PrivatePageTemplate.
+       They usually have some prepared ACL line on them, e.g.:
+       #acl @ME@/ReadWriteGroup:read,write @ME@/ReadGroup:read
+       That @ME@ from the template will be expanded to your name when saving,
+       thus using those 2 subpages (YourName/ReadWriteGroup and
+       YourName/ReadGroup) for allowing read/write or read-only access to
+       Now you only have to maintain 2 subpages (maybe they even have been
+       auto- created for you)
+     
+    Usage (for project people):
+     * see if there is some <ProjectName>Template with a prepared ACL line for
+       your project pages and use it for creating new subpages.
+       Use <ProjectName>/ReadWriteGroup and /ReadGroup etc. as you would do for
+       a homepage (see above).
+
+    @copyright: (c) Bastian Blank, Florian Festi, Thomas Waldmann
+    @license: GNU GPL, see COPYING for details.
+"""
+
+grouppage_autocreate = False # autocreate the group pages - alternatively use templates
+grouppages = ['AdminGroup', 'ReadGroup', 'ReadWriteGroup', ] # names of the subpages defining ACL groups
+
+from MoinMoin.security import Permissions
+from MoinMoin.Page import Page
+from MoinMoin.PageEditor import PageEditor
+
+class SecurityPolicy(Permissions):
+    """ Extend the default security policy with autoadmin feature """
+    
+    def admin(self, pagename):
+        try:
+            request = self.request
+            has_member = request.dicts.has_member
+            username = request.user.name
+            pagename = request.page.page_name
+            mainpage = pagename.split('/')[0]
+            if username == mainpage and has_member('AutoAdminGroup', username):
+                return True
+            groupname = "%s/AdminGroup" % mainpage
+            if has_member(groupname, username) and has_member('AutoAdminGroup', groupname):
+                return True
+        except AttributeError:
+            pass # when we get called from xmlrpc, there is no request.page
+        return Permissions.__getattr__(self, 'admin')(pagename)
+
+    def save(self, editor, newtext, rev, **kw):
+        request = self.request
+        has_member = request.dicts.has_member
+        username = request.user.name
+        pagename = editor.page_name
+
+        if grouppage_autocreate and username == pagename:
+            # create group pages when a user saves his own homepage
+            for page in grouppages:
+                grouppagename = "%s/%s" % (username, page)
+                grouppage = Page(request, grouppagename)
+                if not grouppage.exists():
+                    text = """\
+#acl %(username)s:read,write,delete,revert
+ * %(username)s
+""" % locals()
+                    editor = PageEditor(request, grouppagename)
+                    editor._write_file(text)
+
+        parts = pagename.split('/')
+        if len(parts) == 2:
+            mainpage, subpage = parts
+            if subpage in grouppages and not self.admin(pagename):
+                return False
+
+        # No problem to save if my base class agrees
+        return Permissions.save(self, editor, newtext, rev, **kw)
+
+
--- a/MoinMoin/util/antispam.py	Mon Jun 05 17:52:35 2006 +0200
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,245 +0,0 @@
-#!/usr/bin/env python
-# -*- coding: iso-8859-1 -*-
-"""
-    This implements a global (and a local) blacklist against wiki spammers.
-
-    If started from commandline, it prints a merged list (moinmaster + MT) on
-    stdout, and what it got additionally from MT on stderr.
-    
-    @copyright: 2005 by Thomas Waldmann
-    @license: GNU GPL, see COPYING for details
-"""
-
-# give some log entries to stderr
-debug = 1
-
-import re, sys, time, datetime
-import sets
-
-if __name__ == '__main__':
-    sys.path.insert(0, "../..")
-
-from MoinMoin.security import Permissions
-from MoinMoin import caching, wikiutil
-
-# Errors ---------------------------------------------------------------
-
-class Error(Exception):
-    """Base class for antispam errors."""
-
-    def __str__(self):
-        return repr(self)
-
-class WikirpcError(Error):
-    """ Raised when we get xmlrpclib.Fault """
-
-    def __init__(self, msg, fault):
-        """ Init with msg and xmlrpclib.Fault dict """
-        self.msg = msg
-        self.fault = fault
-
-    def __str__(self):
-        """ Format the using description and data from the fault """
-        return self.msg + ": [%(faultCode)s]  %(faultString)s" % self.fault
-
-
-# Functions ------------------------------------------------------------
-
-def dprint(s):
-    if debug:
-        if isinstance(s, unicode):
-            s = s.encode('utf-8')
-        sys.stderr.write('%s\n' % s)
-
-
-def makelist(text):
-    """ Split text into lines, strip them, skip # comments """
-    lines = text.splitlines()
-    list = []
-    for line in lines:
-        line = line.split(' # ', 1)[0] # rest of line comment
-        line = line.strip()
-        if line and not line.startswith('#'):
-            list.append(line)
-    return list
-
-
-def getblacklist(request, pagename, do_update):
-    """ Get blacklist, possibly downloading new copy
-
-    @param request: current request (request instance)
-    @param pagename: bad content page name (unicode)
-    @rtype: list
-    @return: list of blacklisted regular expressions
-    """
-    from MoinMoin.PageEditor import PageEditor
-    p = PageEditor(request, pagename, uid_override="Antispam subsystem")
-    invalidate_cache = False
-    if do_update:
-        tooold = time.time() - 3600
-        mymtime = wikiutil.version2timestamp(p.mtime_usecs())
-        failure = caching.CacheEntry(request, "antispam", "failure", scope='wiki')
-        fail_time = failure.mtime() # only update if no failure in last hour
-        if (mymtime < tooold) and (fail_time < tooold):
-            dprint("%d *BadContent too old, have to check for an update..." % tooold)
-            import xmlrpclib
-            import socket
-
-            timeout = 15 # time out for reaching the master server via xmlrpc
-            old_timeout = socket.getdefaulttimeout()
-            socket.setdefaulttimeout(timeout)
-            
-            # For production code
-            uri = "http://moinmaster.wikiwikiweb.de:8000/?action=xmlrpc2"
-            # For testing (use your test wiki as BadContent source)
-            ##uri = "http://localhost/main/?action=xmlrpc2")
-            master = xmlrpclib.ServerProxy(uri)
-
-            try:
-                # Get BadContent info
-                master.putClientInfo('ANTISPAM-CHECK',
-                                     request.http_host+request.script_name)
-                response = master.getPageInfo(pagename)
-
-                # It seems that response is always a dict
-                if isinstance(response, dict) and 'faultCode' in response:
-                    raise WikirpcError("failed to get BadContent information",
-                                       response)
-                
-                # Compare date against local BadContent copy
-                masterdate = response['lastModified']
-
-                if isinstance(masterdate, datetime.datetime): 
-                    # for python 2.5a
-                    mydate = datetime.datetime(*tuple(time.gmtime(mymtime))[0:6])
-                else:
-                    # for python <= 2.4.x
-                    mydate = xmlrpclib.DateTime(tuple(time.gmtime(mymtime)))
-                                                    
-                dprint("master: %s mine: %s" % (masterdate, mydate))
-                if mydate < masterdate:
-                    # Get new copy and save
-                    dprint("Fetching page from master...")
-                    master.putClientInfo('ANTISPAM-FETCH',
-                                         request.http_host + request.script_name)
-                    response = master.getPage(pagename)
-                    if isinstance(response, dict) and 'faultCode' in response:
-                        raise WikirpcError("failed to get BadContent data",
-                                           response)
-                    p._write_file(response)
-
-                invalidate_cache = True
-
-            except (socket.error, xmlrpclib.ProtocolError), err:
-                # Log the error
-                # TODO: check if this does not fill the logs!
-                dprint('Timeout / socket / protocol error when accessing'
-                       ' moinmaster: %s' % str(err))
-                # update cache to wait before the next try
-                failure.update("")
-
-            except Error, err:
-                # In case of Error, we log the error and use the local
-                # BadContent copy.
-                dprint(str(err))
-
-            # set back socket timeout
-            socket.setdefaulttimeout(old_timeout)
-                
-    blacklist = p.get_raw_body()
-    return invalidate_cache, makelist(blacklist)
-
-
-class SecurityPolicy(Permissions):
-    """ Extend the default security policy with antispam feature """
-    
-    def save(self, editor, newtext, rev, **kw):
-        BLACKLISTPAGES = ["BadContent", "LocalBadContent"]
-        if not editor.page_name in BLACKLISTPAGES:
-            request = editor.request
-
-            # Start timing of antispam operation
-            request.clock.start('antispam')
-            
-            blacklist = []
-            invalidate_cache = not getattr(request.cfg, "_mmblcache", None)
-            for pn in BLACKLISTPAGES:
-                do_update = (pn != "LocalBadContent")
-                invalidate_cache_necessary, blacklist_entries = getblacklist(request, pn, do_update)
-                blacklist += blacklist_entries
-                invalidate_cache |= invalidate_cache_necessary
-
-            if blacklist:
-                if invalidate_cache:
-                    mmblcache = []
-                    for blacklist_re in blacklist:
-                        try:
-                            mmblcache.append(re.compile(blacklist_re, re.I))
-                        except re.error, err:
-                            dprint("Error in regex '%s': %s. Please check the pages %s." % (blacklist_re, str(err), ', '.join(BLACKLISTPAGES)))
-                            continue
-                    request.cfg._mmblcache = mmblcache
-
-                from MoinMoin.Page import Page
-
-                oldtext = ""
-                if rev > 0: # rev is the revision of the old page
-                    page = Page(request, editor.page_name, rev=rev)
-                    oldtext = page.get_raw_body()
-
-                newset = sets.ImmutableSet(newtext.splitlines(1))
-                oldset = sets.ImmutableSet(oldtext.splitlines(1))
-                difference = newset.difference(oldset)
-                addedtext = ''.join(difference) 
-                
-                for blacklist_re in request.cfg._mmblcache:
-                    match = blacklist_re.search(addedtext)
-                    if match:
-                        # Log error and raise SaveError, PageEditor
-                        # should handle this.
-                        _ = editor.request.getText
-                        msg = _('Sorry, can not save page because "%(content)s"'
-                                ' is not allowed in this wiki.') % {
-                            'content': match.group()
-                            }
-                        dprint(msg)
-                        raise editor.SaveError(msg)
-            request.clock.stop('antispam')
-            
-        # No problem to save if my base class agree
-        return Permissions.save(self, editor, newtext, rev, **kw)
-
-
-def main():
-    """ Fetch spammer patterns from MT blacklist and moinmaster and merge them.
-        A complete new list for moinmaster gets printed to stdout,
-        only the new entries are printed to stderr.
-    """
-    import urllib
-    mtbl = urllib.urlopen("http://www.jayallen.org/comment_spam/blacklist.txt").read()
-    mmbl = urllib.urlopen("http://moinmaster.wikiwikiweb.de/BadContent?action=raw").read()
-    mtbl = makelist(mtbl)
-    mmbl = makelist(mmbl)
-    print "#format plain"
-    print "#acl All:read"
-    newbl = []
-    for i in mtbl:
-        for j in mmbl:
-            match = re.search(j, i, re.I)
-            if match:
-                break
-        if not match and i not in mmbl:
-            print >>sys.stderr, "%s" % i
-            newbl.append(i)
-    bl = mmbl + newbl
-    bl.sort()
-    lasti = None
-    for i in bl:
-        if i != lasti:
-            print i
-            lasti = i
-
-if __name__ == '__main__':
-    main()
-
-
--- a/MoinMoin/util/autoadmin.py	Mon Jun 05 17:52:35 2006 +0200
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,113 +0,0 @@
-# -*- coding: iso-8859-1 -*-
-"""
-    MoinMoin - SecurityPolicy implementing auto admin rights for some users and some groups.
-    
-    AutoAdminGroup page contains users which automatically get admin rights
-    on their homepage and subpages of it. E.g. if ThomasWaldmann is in
-    AutoAdminGroup (or in a group contained in AutoAdminGroup), he gets
-    admin rights on pages ThomasWaldmann and ThomasWaldmann/*.
-
-    AutoAdminGroup page also contains groups which members automatically get
-    admin rights on the group's basename.
-    E.g. if SomeProject/AdminGroup is in AutoAdminGroup and ThomasWaldmann is
-    in SomeProject/AdminGroup, then ThomasWaldmann gets admin rights on pages
-    SomeProject and SomeProject/*.
-    
-    Further, it can autocreate the UserName/XxxxGroup (see grouppages var) when
-    a user save his homepage. Alternatively, this could be also done manually by
-    the user using *Template pages.
-
-    Usage (for wiki admin):
-     * Create an AutoAdminGroup page. If you don't know better, create an empty
-       page for starting.
-     * Enabling a home page for AutoAdmin: just add the user name to the
-       AutoAdminGroup page. After that, this user can create or change ACLs on
-       his homepage or subpages of it.
-     * Enabling another (project) page for AutoAdmin: add <PageName>/AdminGroup
-       to AutoAdminGroup. Also create that <PageName>/AdminGroup page and add
-       at least one user or one group to that page, enabling him or them to
-       create or change ACLs on <PageName> or subpages of it.
-     Those pages edited by wiki admin should be ACL protected with write access
-     limited to allowed people. They are used as source for some ACL
-     information and thus should be treated like the ACLs they get fed into.
-
-    Usage (for homepage owners):
-     * see if there is a HomepageTemplate with a prepared ACL line and some
-       other magic already on it. It is a good idea to have your homepage
-       read- and writeable for everybody as a means of open communication.
-       
-     * For creating personal (or private) subpages of your homepage, use the
-       ReadWritePageTemplate, ReadPageTemplate or PrivatePageTemplate.
-       They usually have some prepared ACL line on them, e.g.:
-       #acl @ME@/ReadWriteGroup:read,write @ME@/ReadGroup:read
-       That @ME@ from the template will be expanded to your name when saving,
-       thus using those 2 subpages (YourName/ReadWriteGroup and
-       YourName/ReadGroup) for allowing read/write or read-only access to
-       Now you only have to maintain 2 subpages (maybe they even have been
-       auto- created for you)
-     
-    Usage (for project people):
-     * see if there is some <ProjectName>Template with a prepared ACL line for
-       your project pages and use it for creating new subpages.
-       Use <ProjectName>/ReadWriteGroup and /ReadGroup etc. as you would do for
-       a homepage (see above).
-
-    @copyright: (c) Bastian Blank, Florian Festi, Thomas Waldmann
-    @license: GNU GPL, see COPYING for details.
-"""
-
-grouppage_autocreate = False # autocreate the group pages - alternatively use templates
-grouppages = ['AdminGroup', 'ReadGroup', 'ReadWriteGroup', ] # names of the subpages defining ACL groups
-
-from MoinMoin.security import Permissions
-from MoinMoin.Page import Page
-from MoinMoin.PageEditor import PageEditor
-
-class SecurityPolicy(Permissions):
-    """ Extend the default security policy with autoadmin feature """
-    
-    def admin(self, pagename):
-        try:
-            request = self.request
-            has_member = request.dicts.has_member
-            username = request.user.name
-            pagename = request.page.page_name
-            mainpage = pagename.split('/')[0]
-            if username == mainpage and has_member('AutoAdminGroup', username):
-                return True
-            groupname = "%s/AdminGroup" % mainpage
-            if has_member(groupname, username) and has_member('AutoAdminGroup', groupname):
-                return True
-        except AttributeError:
-            pass # when we get called from xmlrpc, there is no request.page
-        return Permissions.__getattr__(self, 'admin')(pagename)
-
-    def save(self, editor, newtext, rev, **kw):
-        request = self.request
-        has_member = request.dicts.has_member
-        username = request.user.name
-        pagename = editor.page_name
-
-        if grouppage_autocreate and username == pagename:
-            # create group pages when a user saves his own homepage
-            for page in grouppages:
-                grouppagename = "%s/%s" % (username, page)
-                grouppage = Page(request, grouppagename)
-                if not grouppage.exists():
-                    text = """\
-#acl %(username)s:read,write,delete,revert
- * %(username)s
-""" % locals()
-                    editor = PageEditor(request, grouppagename)
-                    editor._write_file(text)
-
-        parts = pagename.split('/')
-        if len(parts) == 2:
-            mainpage, subpage = parts
-            if subpage in grouppages and not self.admin(pagename):
-                return False
-
-        # No problem to save if my base class agrees
-        return Permissions.save(self, editor, newtext, rev, **kw)
-
-
--- a/docs/CHANGES	Mon Jun 05 17:52:35 2006 +0200
+++ b/docs/CHANGES	Tue Jun 06 21:39:16 2006 +0200
@@ -56,6 +56,9 @@
       WSGI not
       FCGI not
       TWISTED not
+    * moved util/antispam.py to security/antispam.py,
+      moved util/autoadmin.py to security/autoadmin.py,
+      moved security.py to security/__init__.py
     * added wikiutil.MimeType class (works internally with sanitized mime
       types because the official ones suck)
     * renamed parsers to module names representing sane mimetypes, e.g.:
@@ -660,7 +663,7 @@
        did not allow attachments, you now have to use:
        actions_excluded = ['AttachFile']
      * special users (All, Known, Trusted) in Groups are now supported
-     * MoinMoin.util.autoadmin SecurityPolicy added
+     * MoinMoin.security.autoadmin SecurityPolicy added
        When using this security policy, a user will get admin rights on his
        homepage (where pagename == username) and its sub pages. This is needed
        for the MyPages action, but can also get used for manual ACL changes.
@@ -1708,7 +1711,7 @@
       Nevertheless it is a very good idea to use a non-broken and more secure
       browser like Mozilla, Firefox or Opera!
 
-    * from MoinMoin.util.antispam import SecurityPolicy in your
+    * from MoinMoin.security.antispam import SecurityPolicy in your
       moin_config.py will protect your wiki from at least the known spammers.
       See MoinMoin:AntiSpamGlobalSolution for details.
 
--- a/setup.py	Mon Jun 05 17:52:35 2006 +0200
+++ b/setup.py	Tue Jun 06 21:39:16 2006 +0200
@@ -218,6 +218,7 @@
         'MoinMoin.script.old',
         'MoinMoin.script.old.migration',
         'MoinMoin.script.old.xmlrpc-tools',
+        'MoinMoin.security',
         'MoinMoin.server',
         'MoinMoin.stats',
         'MoinMoin.support',
--- a/wiki/config/more_samples/ldap_smb_farmconfig.py	Mon Jun 05 17:52:35 2006 +0200
+++ b/wiki/config/more_samples/ldap_smb_farmconfig.py	Tue Jun 06 21:39:16 2006 +0200
@@ -180,7 +180,7 @@
 
     # Link spam protection for public wikis (uncomment to enable).
     # Needs a reliable internet connection.
-    from MoinMoin.util.autoadmin import SecurityPolicy
+    from MoinMoin.security.autoadmin import SecurityPolicy
 
 
     # Mail --------------------------------------------------------------
--- a/wiki/config/wikiconfig.py	Mon Jun 05 17:52:35 2006 +0200
+++ b/wiki/config/wikiconfig.py	Tue Jun 06 21:39:16 2006 +0200
@@ -92,7 +92,7 @@
 
     # Link spam protection for public wikis (Uncomment to enable)
     # Needs a reliable internet connection.
-    #from MoinMoin.util.antispam import SecurityPolicy
+    #from MoinMoin.security.antispam import SecurityPolicy
 
 
     # Mail --------------------------------------------------------------
--- a/wiki/config/wikifarm/farmconfig.py	Mon Jun 05 17:52:35 2006 +0200
+++ b/wiki/config/wikifarm/farmconfig.py	Tue Jun 06 21:39:16 2006 +0200
@@ -109,7 +109,7 @@
 
     # Link spam protection for public wikis (uncomment to enable).
     # Needs a reliable internet connection.
-    #from MoinMoin.util.antispam import SecurityPolicy
+    #from MoinMoin.security.antispam import SecurityPolicy
 
 
     # Mail --------------------------------------------------------------