changeset 505:c6ab1f202891

moin maint mkpagepacks, moin maint globaledit imported from: moin--main--1.5--patch-509
author Thomas Waldmann <tw@waldmann-edv.de>
date Sun, 26 Mar 2006 16:19:13 +0000
parents 67d5daa9e872
children 24463c96f276
files ChangeLog Makefile MoinMoin/script/maint/globaledit.py MoinMoin/script/maint/mkpagepacks.py MoinMoin/script/old/globaledit.py MoinMoin/script/old/packages/__init__.py MoinMoin/script/old/packages/create_pagepacks.py wiki/underlay.tar.bz2
diffstat 8 files changed, 315 insertions(+), 320 deletions(-) [+]
line wrap: on
line diff
--- a/ChangeLog	Sun Mar 26 13:30:19 2006 +0000
+++ b/ChangeLog	Sun Mar 26 16:19:13 2006 +0000
@@ -2,6 +2,40 @@
 # arch-tag: automatic-ChangeLog--arch@arch.thinkmo.de--2003-archives/moin--main--1.5
 #
 
+2006-03-26 17:19:13 GMT	Thomas Waldmann <tw@waldmann-edv.de>	patch-509
+
+    Summary:
+      moin maint mkpagepacks, moin maint globaledit
+    Revision:
+      moin--main--1.5--patch-509
+
+    moin maint mkpagepacks, moin maint globaledit
+    
+
+    removed files:
+     MoinMoin/script/old/packages/.arch-ids/=id
+     MoinMoin/script/old/packages/.arch-ids/__init__.py.id
+     MoinMoin/script/old/packages/__init__.py
+
+    modified files:
+     ChangeLog Makefile MoinMoin/script/maint/globaledit.py
+     MoinMoin/script/maint/mkpagepacks.py wiki/underlay.tar.bz2
+
+    renamed files:
+     MoinMoin/script/old/.arch-ids/globaledit.py.id
+       ==> MoinMoin/script/maint/.arch-ids/globaledit.py.id
+     MoinMoin/script/old/globaledit.py
+       ==> MoinMoin/script/maint/globaledit.py
+     MoinMoin/script/old/packages/.arch-ids/create_pagepacks.py.id
+       ==> MoinMoin/script/maint/.arch-ids/mkpagepacks.py.id
+     MoinMoin/script/old/packages/create_pagepacks.py
+       ==> MoinMoin/script/maint/mkpagepacks.py
+
+    removed directories:
+     MoinMoin/script/old/packages
+     MoinMoin/script/old/packages/.arch-ids
+
+
 2006-03-26 14:30:19 GMT	Thomas Waldmann <tw@waldmann-edv.de>	patch-508
 
     Summary:
--- a/Makefile	Sun Mar 26 13:30:19 2006 +0000
+++ b/Makefile	Sun Mar 26 16:19:13 2006 +0000
@@ -48,7 +48,7 @@
 
 pagepacks:
 	@python tests/maketestwiki.py
-	@python MoinMoin/scripts/packages/create_pagepacks.py
+	@MoinMoin/script/moin.py maint mkpagepacks # must NOT use --config-dir or --wiki-url here!
 	cd $(share) ; rm -rf underlay
 	cp -a $(testwiki)/underlay $(share)/
 	
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/MoinMoin/script/maint/globaledit.py	Sun Mar 26 16:19:13 2006 +0000
@@ -0,0 +1,109 @@
+# -*- coding: iso-8859-1 -*-
+"""
+    MoinMoin - do global changes to all pages in a wiki.
+
+    The changes being done are hardcoded in function do_edit.
+    As it is, this script is mostly useful for the MoinMoin release maintainer
+    using: moin ... --wiki-url=moinmaster.wikiwikiweb.de/ maint globaledit
+    
+    @copyright: 2004-2006 by MoinMoin:ThomasWaldmann
+    @license: GNU GPL, see COPYING for details.
+"""
+debug = False
+
+from MoinMoin import PageEditor
+from MoinMoin.script._util import MoinScript
+
+class PluginScript(MoinScript):
+    def __init__(self, argv, def_values):
+        MoinScript.__init__(self, argv, def_values)
+    
+    def do_edit(self, pagename, origtext):
+        if pagename in ['LocalSpellingWords', 'LocalBadContent',] or pagename.endswith('Template'):
+            return origtext
+        language_line = format_line = masterpage = None
+        acl_lines = []
+        master_lines = []
+        pragma_lines = []
+        comment_lines = []
+        content_lines = []
+        lines = origtext.splitlines()
+        header = True
+        for l in lines:
+            if not l.startswith('#'):
+                header = False
+            if header:
+                if l.startswith('#acl '):
+                    acl_lines.append(l)
+                elif l.startswith('#language '):
+                    language_line = l
+                elif l.startswith('#format '):
+                    format_line = l
+                elif l.startswith('##master-page:'):
+                    masterpage = l.split(':',1)[1].strip()
+                    master_lines.append(l)
+                elif l.startswith('##master-date:'):
+                    master_lines.append(l)
+                elif l.startswith('##'):
+                    comment_lines.append(l)
+                elif l.startswith('#'):
+                    pragma_lines.append(l)
+            else:
+                content_lines.append(l)
+
+        if not language_line:
+            language_line = '#language en'
+        if not format_line:
+            format_line = '#format wiki'
+        if not acl_lines and (
+            masterpage is None or masterpage not in ['FrontPage', 'WikiSandBox',] and not masterpage.endswith('Template')):
+            acl_lines = ['#acl MoinPagesEditorGroup:read,write,delete,revert All:read']
+        if not master_lines:
+            master_lines = ['##master-page:Unknown-Page', '##master-date:Unknown-Date',]
+
+        c1old = "## Please edit (or translate) system/help pages on the moinmaster wiki ONLY."
+        c2old = "## For more information, please see MoinMaster:MoinPagesEditorGroup."
+        c1 = "## Please edit system and help pages ONLY in the moinmaster wiki! For more"
+        c2 = "## information, please see MoinMaster:MoinPagesEditorGroup."
+        for c in (c1old, c2old, c1, c2):
+            if c in comment_lines:
+                comment_lines.remove(c)
+            
+        comment_lines = [c1, c2, ] + comment_lines
+
+        if content_lines and content_lines[-1].strip(): # not an empty line at EOF
+            content_lines.append('')
+
+        if masterpage and masterpage.endswith('Template'):
+            changedtext = master_lines + [format_line, language_line,] + pragma_lines + content_lines
+        else:
+            changedtext = comment_lines + master_lines + acl_lines + [format_line, language_line,] + pragma_lines + content_lines
+        changedtext = '\n'.join(changedtext)
+        return changedtext
+
+    def mainloop(self):
+        if debug:
+            import codecs
+            origtext = codecs.open('origtext', 'r', 'utf-8').read()
+            origtext = origtext.replace('\r\n','\n')
+            changedtext = self.do_edit("", origtext)
+            changedtext = changedtext.replace('\n','\r\n')
+            f = codecs.open('changedtext', 'w', 'utf-8')
+            f.write(changedtext)
+            f.close()
+        else:
+            self.init_request()
+            request = self.request
+
+            # Get all existing pages in the wiki
+            pagelist = request.rootpage.getPageList(user='')
+
+            for pagename in pagelist:
+                #request = RequestCLI(url=url, pagename=pagename.encode('utf-8'))
+                p = PageEditor.PageEditor(request, pagename, do_editor_backup=0)
+                origtext = p.get_raw_body()
+                changedtext = self.do_edit(pagename, origtext)
+                if changedtext and changedtext != origtext:
+                    print "Writing %s ..." % repr(pagename)
+                    p._write_file(changedtext)
+
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/MoinMoin/script/maint/mkpagepacks.py	Sun Mar 26 16:19:13 2006 +0000
@@ -0,0 +1,171 @@
+# -*- coding: iso-8859-1 -*-
+"""
+    MoinMoin - Package Generator
+
+    @copyright: 2005 by Alexander Schremmer,
+                2006 by MoinMoin:ThomasWaldmann
+    @license: GNU GPL, see COPYING for details.
+"""
+
+import os
+import zipfile
+from sets import Set
+from datetime import datetime
+
+from MoinMoin import wikidicts, wikiutil
+from MoinMoin.Page import Page
+from MoinMoin.PageEditor import PageEditor
+from MoinMoin.packages import packLine, unpackLine, MOIN_PACKAGE_FILE
+from MoinMoin.script._util import MoinScript
+
+EXTRA = u'extra'
+NODIST = u'nodist'
+ALL = u'all_languages'
+COMPRESSION_LEVEL = zipfile.ZIP_STORED
+
+class PluginScript(MoinScript):
+    def __init__(self, argv, def_values):
+        MoinScript.__init__(self, argv, def_values)
+
+    def buildPageSets(self):
+        """ Calculates which pages should go into which package. """
+        request = self.request
+        pageSets = {}
+
+        allPages = Set(request.rootpage.getPageList())
+
+        systemPages = wikidicts.Group(request, "SystemPagesGroup").members()
+
+        for pagename in systemPages:
+            if pagename.endswith("Group"):
+                #print x + " -> " + repr(wikidicts.Group(request, x).members())
+                self.gd.addgroup(request, pagename)
+
+        langPages = Set()
+        for name, group in self.gd.dictdict.items():
+            group.expandgroups(self.gd)
+            groupPages = Set(group.members() + [name])
+            name = name.replace("SystemPagesIn", "").replace("Group", "")
+            pageSets[name] = groupPages
+            langPages |= groupPages
+
+        specialPages = Set(["SystemPagesGroup"])
+
+        masterNonSystemPages = allPages - langPages - specialPages
+
+        moinI18nPages = Set([x for x in masterNonSystemPages if x.startswith("MoinI18n")])
+        
+        nodistPages = moinI18nPages | Set(["InterWikiMap", ])
+
+        extraPages = masterNonSystemPages - nodistPages
+
+        pageSets[ALL] = langPages
+        
+        for name in pageSets.keys():
+            if name not in (u"English"):
+                pageSets[name] -= pageSets[u"English"]
+                pageSets[name] -= nodistPages
+
+        pageSets[EXTRA] = extraPages   # stuff that maybe should be in some language group
+        pageSets[NODIST] = nodistPages # we dont want to have them in dist archive
+        return pageSets
+
+    def packagePages(self, pagelist, filename, function):
+        """ Puts pages from pagelist into filename and calls function on them on installation. """
+        request = self.request
+        try:
+            os.remove(filename)
+        except OSError:
+            pass
+        zf = zipfile.ZipFile(filename, "w", COMPRESSION_LEVEL)
+
+        cnt = 0
+        script = [packLine(['MoinMoinPackage', '1']), ]
+                  
+        for pagename in pagelist:
+            pagename = pagename.strip()
+            page = Page(request, pagename)
+            if page.exists():
+                cnt += 1
+                script.append(packLine([function, str(cnt), pagename]))
+                timestamp = wikiutil.version2timestamp(page.mtime_usecs())
+                zi = zipfile.ZipInfo(filename=str(cnt), date_time=datetime.fromtimestamp(timestamp).timetuple()[:6])
+                zi.compress_type = COMPRESSION_LEVEL
+                zf.writestr(zi, page.get_raw_body().encode("utf-8"))
+            else:
+                #import sys
+                #print >>sys.stderr, "Could not find the page %s." % pagename.encode("utf-8")
+                pass
+
+        script += [packLine(['Print', 'Installed MoinMaster page bundle %s.' % os.path.basename(filename)])]
+
+        zf.writestr(MOIN_PACKAGE_FILE, u"\n".join(script).encode("utf-8"))
+        zf.close()
+
+    def removePages(self, pagelist):
+        """ Pages from pagelist get removed from the underlay directory. """
+        request = self.request
+        import shutil
+        for pagename in pagelist:
+            pagename = pagename.strip()
+            page = Page(request, pagename)
+            try:
+                underlay, path = page.getPageBasePath(-1)
+                shutil.rmtree(path)
+            except:
+                pass
+
+    def packageCompoundInstaller(self, bundledict, filename):
+        """ Creates a package which installs all other packages. """
+        try:
+            os.remove(filename)
+        except OSError:
+            pass
+        zf = zipfile.ZipFile(filename, "w", COMPRESSION_LEVEL)
+
+        script = [packLine(['MoinMoinPackage', '1']), ]
+
+        script += [packLine(["InstallPackage", "SystemPagesSetup", name + ".zip"])
+                   for name in bundledict.keys() if name not in (NODIST, EXTRA, ALL, u"English")]
+        script += [packLine(['Print', 'Installed all MoinMaster page bundles.'])]
+
+        zf.writestr(MOIN_PACKAGE_FILE, u"\n".join(script).encode("utf-8"))
+        zf.close()
+
+    def mainloop(self):
+        # self.options.wiki_url = 'localhost/'
+        if self.options.wiki_url and '.' in self.options.wiki_url:
+            print "NEVER EVER RUN THIS ON A REAL WIKI!!! This must be run on a local testwiki."
+            return
+        if self.options.config_dir:
+            print "NEVER EVER RUN THIS ON A REAL WIKI!!! This must be run on a local testwiki without any --config-dir!"
+            return
+            
+        self.init_request() # this request will work on a test wiki in testwiki/ directory
+                            # we assume that there are current moinmaster pages there
+        request = self.request
+        request.form = request.args = request.setup_args()
+
+        if not ('testwiki' in request.cfg.data_dir and 'testwiki' in request.cfg.data_underlay_dir):
+            print "NEVER EVER RUN THIS ON A REAL WIKI!!! This must be run on a local testwiki."
+            return
+            
+        self.gd = wikidicts.GroupDict(request)
+        self.gd.reset()
+
+        print "Building page sets ..."
+        pageSets = self.buildPageSets()
+
+        print "Creating packages ..."
+        generate_filename = lambda name: os.path.join('testwiki', 'underlay', 'pages', 'SystemPagesSetup', 'attachments', '%s.zip' % name)
+
+        self.packageCompoundInstaller(pageSets, generate_filename(ALL))
+
+        [self.packagePages(list(pages), generate_filename(name), "ReplaceUnderlay") 
+            for name, pages in pageSets.items() if not name in (u'English', ALL, NODIST)]
+
+        [self.removePages(list(pages)) 
+            for name, pages in pageSets.items() if not name in (u'English', ALL)]
+
+        print "Finished."
+
--- a/MoinMoin/script/old/globaledit.py	Sun Mar 26 13:30:19 2006 +0000
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,110 +0,0 @@
-#!/usr/bin/env python
-"""
-    Script for doing global changes to all pages in a wiki.
-
-    You either need to have your wiki configs in sys.path or you
-    need to invoke this script from the same directory.
-
-    @copyright: 2004, Thomas Waldmann
-    @license: GPL licensed, see COPYING for details
-"""
-
-debug = False
-
-url = "moinmaster.wikiwikiweb.de/"
-
-import sys
-sys.path.insert(0, '/org/de.wikiwikiweb.moinmaster/bin15') # farmconfig/wikiconfig location
-sys.path.insert(0, '../..')
-
-def do_edit(pagename, origtext):
-    if pagename in ['LocalSpellingWords','LocalBadContent',] or pagename.endswith('Template'):
-        return origtext
-    language_line = format_line = masterpage = None
-    acl_lines = []
-    master_lines = []
-    pragma_lines = []
-    comment_lines = []
-    content_lines = []
-    lines = origtext.splitlines()
-    header = True
-    for l in lines:
-        if not l.startswith('#'):
-            header = False
-        if header:
-            if l.startswith('#acl '):
-                acl_lines.append(l)
-            elif l.startswith('#language '):
-                language_line = l
-            elif l.startswith('#format '):
-                format_line = l
-            elif l.startswith('##master-page:'):
-                masterpage = l.split(':',1)[1].strip()
-                master_lines.append(l)
-            elif l.startswith('##master-date:'):
-                master_lines.append(l)
-            elif l.startswith('##'):
-                comment_lines.append(l)
-            elif l.startswith('#'):
-                pragma_lines.append(l)
-        else:
-            content_lines.append(l)
-
-    if not language_line:
-        language_line = '#language en'
-    if not format_line:
-        format_line = '#format wiki'
-    if not acl_lines and (
-        masterpage is None or masterpage not in ['FrontPage', 'WikiSandBox',] and not masterpage.endswith('Template')):
-        acl_lines = ['#acl MoinPagesEditorGroup:read,write,delete,revert All:read']
-    if not master_lines:
-        master_lines = ['##master-page:Unknown-Page', '##master-date:Unknown-Date',]
-
-    c1old = "## Please edit (or translate) system/help pages on the moinmaster wiki ONLY."
-    c2old = "## For more information, please see MoinMaster:MoinPagesEditorGroup."
-    c1 = "## Please edit system and help pages ONLY in the moinmaster wiki! For more"
-    c2 = "## information, please see MoinMaster:MoinPagesEditorGroup."
-    for c in (c1old, c2old, c1, c2):
-        if c in comment_lines:
-            comment_lines.remove(c)
-        
-    comment_lines = [c1, c2, ] + comment_lines
-
-    if content_lines and content_lines[-1].strip(): # not an empty line at EOF
-        content_lines.append('')
-
-    if masterpage and masterpage.endswith('Template'):
-        changedtext = master_lines + [format_line, language_line,] + pragma_lines + content_lines
-    else:
-        changedtext = comment_lines + master_lines + acl_lines + [format_line, language_line,] + pragma_lines + content_lines
-    changedtext = '\n'.join(changedtext)
-    return changedtext
-
-if __name__ == '__main__':
-    if debug:
-        import codecs
-        origtext = codecs.open('origtext', 'r', 'utf-8').read()
-        origtext = origtext.replace('\r\n','\n')
-        changedtext = do_edit("", origtext)
-        changedtext = changedtext.replace('\n','\r\n')
-        f = codecs.open('changedtext', 'w', 'utf-8')
-        f.write(changedtext)
-        f.close()
-    else:
-
-        from MoinMoin import PageEditor, wikiutil
-        from MoinMoin.request import RequestCLI
-
-        request = RequestCLI(url=url)
-        # Get all existing pages in the wiki
-        pagelist = request.rootpage.getPageList(user='')
-
-        for pagename in pagelist:
-            request = RequestCLI(url=url, pagename=pagename.encode('utf-8'))
-            p = PageEditor.PageEditor(request, pagename, do_editor_backup=0)
-            origtext = p.get_raw_body()
-            changedtext = do_edit(pagename, origtext)
-            if changedtext and changedtext != origtext:
-                print "Writing %s ..." % repr(pagename)
-                p._write_file(changedtext)
-
--- a/MoinMoin/script/old/packages/__init__.py	Sun Mar 26 13:30:19 2006 +0000
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,7 +0,0 @@
-# -*- coding: iso-8859-1 -*-
-"""
-    MoinMoin - create language packages
-
-    @copyright: 2005 by Thomas Waldmann (MoinMoin:ThomasWaldmann)
-    @license: GNU GPL, see COPYING for details.
-"""
--- a/MoinMoin/script/old/packages/create_pagepacks.py	Sun Mar 26 13:30:19 2006 +0000
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,202 +0,0 @@
-#!/usr/bin/env python
-# -*- coding: iso-8859-1 -*-
-"""
-    MoinMoin - Package Generator
-
-    @copyright: 2005 by Alexander Schremmer
-    @license: GNU GPL, see COPYING for details.
-"""
-
-import os, sys
-import zipfile
-import threading
-import xmlrpclib
-from sets import Set
-from datetime import datetime
-from time import sleep
-
-# your MoinMoin package path here
-sys.path.insert(0, r"../../..")
-sys.path.insert(0, r".")
-
-from MoinMoin import config, wikidicts, wikiutil
-from MoinMoin.Page import Page
-from MoinMoin.PageEditor import PageEditor
-from MoinMoin.request import RequestCLI
-from MoinMoin.packages import packLine, unpackLine, MOIN_PACKAGE_FILE
-
-master_url ="http://moinmaster.wikiwikiweb.de/?action=xmlrpc2"
-
-EXTRA = u'extra'
-NODIST = u'nodist'
-ALL = u'all_languages'
-COMPRESSION_LEVEL = zipfile.ZIP_STORED
-
-def buildPageSets():
-    """ Calculates which pages should go into which package. """
-    pageSets = {}
-
-    #allPages = Set(xmlrpclib.ServerProxy(master_url).getAllPages())
-    allPages = Set(request.rootpage.getPageList())
-
-    systemPages = wikidicts.Group(request, "SystemPagesGroup").members()
-
-    for pagename in systemPages:
-        if pagename.endswith("Group"):
-            #print x + " -> " + repr(wikidicts.Group(request, x).members())
-            gd.addgroup(request, pagename)
-
-    langPages = Set()
-    for name, group in gd.dictdict.items():
-        group.expandgroups(gd)
-        groupPages = Set(group.members() + [name])
-        name = name.replace("SystemPagesIn", "").replace("Group", "")
-        pageSets[name] = groupPages
-        langPages |= groupPages
-
-    specialPages = Set(["SystemPagesGroup"])
-
-    masterNonSystemPages = allPages - langPages - specialPages
-
-    moinI18nPages = Set([x for x in masterNonSystemPages if x.startswith("MoinI18n")])
-    
-    nodistPages = moinI18nPages | Set(["InterWikiMap", ])
-
-    extraPages = masterNonSystemPages - nodistPages
-
-    pageSets[ALL] = langPages
-    
-    for name in pageSets.keys():
-        if name not in (u"English"):
-            pageSets[name] -= pageSets[u"English"]
-            pageSets[name] -= nodistPages
-
-    pageSets[EXTRA] = extraPages   # stuff that maybe should be in some language group
-    pageSets[NODIST] = nodistPages # we dont want to have them in dist archive
-    return pageSets
-
-def packagePages(pagelist, filename, function):
-    """ Puts pages from pagelist into filename and calls function on them on installation. """
-    try:
-        os.remove(filename)
-    except OSError:
-        pass
-    zf = zipfile.ZipFile(filename, "w", COMPRESSION_LEVEL)
-
-    cnt = 0
-    script = [packLine(['MoinMoinPackage', '1']),
-              ]
-
-    for pagename in pagelist:
-        pagename = pagename.strip()
-        page = Page(request, pagename)
-        if page.exists():
-            cnt += 1
-            script.append(packLine([function, str(cnt), pagename]))
-            timestamp = wikiutil.version2timestamp(page.mtime_usecs())
-            zi = zipfile.ZipInfo(filename=str(cnt), date_time=datetime.fromtimestamp(timestamp).timetuple()[:6])
-            zi.compress_type = COMPRESSION_LEVEL
-            zf.writestr(zi, page.get_raw_body().encode("utf-8"))
-        else:
-            #print >>sys.stderr, "Could not find the page %s." % pagename.encode("utf-8")
-            pass
-
-    script += [packLine(['Print', 'Installed MoinMaster page bundle %s.' % os.path.basename(filename)])]
-
-    zf.writestr(MOIN_PACKAGE_FILE, u"\n".join(script).encode("utf-8"))
-    zf.close()
-
-def removePages(pagelist):
-    """ Pages from pagelist get removed from the underlay directory. """
-    import shutil
-    for pagename in pagelist:
-        pagename = pagename.strip()
-        page = Page(request, pagename)
-        try:
-            underlay, path = page.getPageBasePath(-1)
-            shutil.rmtree(path)
-        except:
-            pass
-
-def packageCompoundInstaller(bundledict, filename):
-    """ Creates a package which installs all other packages. """
-    try:
-        os.remove(filename)
-    except OSError:
-        pass
-    zf = zipfile.ZipFile(filename, "w", COMPRESSION_LEVEL)
-
-    script = [packLine(['MoinMoinPackage', '1']),
-              ]
-
-    script += [packLine(["InstallPackage", "SystemPagesSetup", name + ".zip"])
-               for name in bundledict.keys() if name not in (NODIST, EXTRA, ALL, u"English")]
-    script += [packLine(['Print', 'Installed all MoinMaster page bundles.'])]
-
-    zf.writestr(MOIN_PACKAGE_FILE, u"\n".join(script).encode("utf-8"))
-    zf.close()
-
-def getMasterPages():
-    """ Leechezzz. """
-    master = xmlrpclib.ServerProxy(master_url)
-    maxThreads = 100
-
-    def downloadpage(wiki, pagename):
-        source = wiki.getPage(pagename)
-        if source.find("##master-page:FrontPage") != -1:
-            source += u"""\n\n||<tablestyle="background: lightyellow; width:100%; text-align:center">[[en]] If you want to add help pages in your favorite language, see '''SystemPagesSetup'''.||\n"""
-
-        PageEditor(request, pagename, uid_override="Fetching ...")._write_file(source)
-        #print "Fetched " + pagename.encode("utf-8")
-
-    stopped = []
-    running = []
-
-    print "Loading master page list ..."
-    pagelist = master.getAllPages()
-    print "Preparing threads ..."
-    for pagename in pagelist:
-        t = threading.Thread(target=downloadpage, args=(master, pagename), name=pagename.encode("unicode_escape"))
-        stopped.append(t)
-
-    print "Starting scheduler ..."
-    while len(running) > 0 or len(stopped) != 0:
-        for x in running:
-            if not x.isAlive():
-                #print "Found dead thread " + repr(x)
-                running.remove(x)
-        print "running %i| stopped %i" % (len(running), len(stopped))
-        for i in xrange(min(maxThreads - len(running), len(stopped))):
-            t = stopped.pop()
-            running.append(t)
-            t.start()
-            #print "Scheduled %s." % repr(t)
-        sleep(1)
-
-def run():
-    request = RequestCLI(url='localhost/')
-    request.form = request.args = request.setup_args()
-
-    gd = wikidicts.GroupDict(request)
-    gd.reset()
-
-    #getMasterPages()
-    print "Building page sets ..."
-    pageSets = buildPageSets()
-
-    print "Creating packages ..."
-    generate_filename = lambda name: os.path.join('testwiki', 'underlay', 'pages', 'SystemPagesSetup', 'attachments', '%s.zip' % name)
-
-    packageCompoundInstaller(pageSets, generate_filename(ALL))
-
-    [packagePages(list(pages), generate_filename(name), "ReplaceUnderlay") 
-        for name, pages in pageSets.items() if not name in (u'English', ALL, NODIST)]
-
-    [removePages(list(pages)) 
-        for name, pages in pageSets.items() if not name in (u'English', ALL)]
-
-    print "Finished."
-
-if __name__ == "__main__":
-    run()
-
Binary file wiki/underlay.tar.bz2 has changed