changeset 315:830cf8bdcf24

implemented 'make pagepacks', updated underlay imported from: moin--main--1.5--patch-319
author Thomas Waldmann <tw@waldmann-edv.de>
date Tue, 13 Dec 2005 22:54:31 +0000
parents 43c6bc5dc22c
children 14d0a26ab4ec
files Makefile MoinMoin/scripts/packages/create_pagepacks.py tests/maketestwiki.py tests/runtests.py wiki/underlay.tar.bz2
diffstat 5 files changed, 110 insertions(+), 65 deletions(-) [+]
line wrap: on
line diff
--- a/Makefile	Tue Dec 13 22:49:42 2005 +0000
+++ b/Makefile	Tue Dec 13 22:54:31 2005 +0000
@@ -8,13 +8,6 @@
 all:
 	python setup.py build
 
-dist:
-	-rm MANIFEST
-	python setup.py sdist
-
-pagepacks:
-	python MoinMoin/scripts/packages/create_pagepacks.py
-
 install-docs:
 	-mkdir build
 	wget -U MoinMoin/Makefile -O build/INSTALL.html "http://moinmaster.wikiwikiweb.de/MoinMoin/InstallDocs?action=print"
@@ -64,6 +57,16 @@
 	    $(share)/underlay/pages/MoinPagesEditorGroup/revisions/00000001
 	cd $(share); rm -rf underlay.tar.bz2; tar cjf underlay.tar.bz2 underlay
 
+pagepacks:
+	@python tests/maketestwiki.py
+	@python MoinMoin/scripts/packages/create_pagepacks.py
+	cd $(share) ; rm -rf underlay
+	cp -a $(testwiki)/underlay $(share)/
+	
+dist:
+	-rm MANIFEST
+	python setup.py sdist
+
 # Create patchlevel module
 patchlevel:
 	@echo -e patchlevel = "\"`tla logs | tail -n1`\"\n" >MoinMoin/patchlevel.py
@@ -87,6 +90,7 @@
 	tla star-merge arch@arch.thinkmo.de--2003-archives/moin--main--1.3
 
 test: 
+	@python tests/maketestwiki.py
 	@python tests/runtests.py
 
 clean: clean-testwiki clean-pyc
--- a/MoinMoin/scripts/packages/create_pagepacks.py	Tue Dec 13 22:49:42 2005 +0000
+++ b/MoinMoin/scripts/packages/create_pagepacks.py	Tue Dec 13 22:54:31 2005 +0000
@@ -28,6 +28,7 @@
 master_url ="http://moinmaster.wikiwikiweb.de/?action=xmlrpc2"
 
 EXTRA = u'extra'
+NODIST = u'nodist'
 ALL = u'all_languages'
 COMPRESSION_LEVEL = zipfile.ZIP_STORED
 
@@ -35,44 +36,45 @@
     """ Calculates which pages should go into which package. """
     pageSets = {}
 
-    allPages = Set()
-
-    masterSet = Set(xmlrpclib.ServerProxy(master_url).getAllPages())
+    allPages = Set(xmlrpclib.ServerProxy(master_url).getAllPages())
 
     systemPages = wikidicts.Group(request, "SystemPagesGroup").members()
 
-    specialPages = Set([x for x in systemPages if not x.endswith("Group")] + ["SystemPagesGroup"])
-
     for pagename in systemPages:
         if pagename.endswith("Group"):
             #print x + " -> " + repr(wikidicts.Group(request, x).members())
             gd.addgroup(request, pagename)
 
+    langPages = Set()
     for name, group in gd.dictdict.items():
         group.expandgroups(gd)
         groupPages = Set(group.members() + [name])
         name = name.replace("SystemPagesIn", "").replace("Group", "")
         pageSets[name] = groupPages
-        allPages |= groupPages
-
-    masterNonSystemPages = masterSet - allPages - specialPages
-
-    frontPagesEtc = Set([x for x in masterNonSystemPages if not (
-        x.startswith("MoinI18n") or x == "MoinPagesEditorGroup" or
-        x == "InterWikiMap"
-        )])
+        langPages |= groupPages
 
-    specialPages |= frontPagesEtc
-
-    pageSets[EXTRA] = specialPages
+    specialPages = Set(["SystemPagesGroup"])
 
-    pageSets[ALL] = allPages
+    masterNonSystemPages = allPages - langPages - specialPages
 
+    moinI18nPages = Set([x for x in masterNonSystemPages if x.startswith("MoinI18n")])
+    
+    nodistPages = moinI18nPages | Set([
+            "MoinPagesEditorGroup",
+            "InterWikiMap",
+            ])
+
+    extraPages = masterNonSystemPages - nodistPages
+
+    pageSets[ALL] = langPages
+    
     for name in pageSets.keys():
-        if name not in (EXTRA, u"English"):
-            pageSets[name] -= pageSets[EXTRA]
+        if name not in (u"English"):
             pageSets[name] -= pageSets[u"English"]
+            pageSets[name] -= nodistPages
 
+    pageSets[EXTRA] = extraPages   # stuff that maybe should be in some language group
+    pageSets[NODIST] = nodistPages # we dont want to have them in dist archive
     return pageSets
 
 def packagePages(pagelist, filename, function):
@@ -106,6 +108,18 @@
     zf.writestr(MOIN_PACKAGE_FILE, u"\n".join(script).encode("utf-8"))
     zf.close()
 
+def removePages(pagelist):
+    """ Pages from pagelist get removed from the underlay directory. """
+    import shutil
+    for pagename in pagelist:
+        pagename = pagename.strip()
+        page = Page(request, pagename)
+        try:
+            underlay, path = page.getPageBasePath(-1)
+            shutil.rmtree(path)
+        except:
+            pass
+
 def packageCompoundInstaller(bundledict, filename):
     """ Creates a package which installs all other packages. """
     try:
@@ -118,7 +132,7 @@
               ]
 
     script += [packLine(["InstallPackage", "SystemPagesSetup", name + ".zip"])
-               for name in bundledict.keys() if name not in (EXTRA, ALL, u"English")]
+               for name in bundledict.keys() if name not in (NODIST, EXTRA, ALL, u"English")]
     script += [packLine(['Print', 'Installed all MoinMaster page bundles.'])]
 
     zf.writestr(MOIN_PACKAGE_FILE, u"\n".join(script).encode("utf-8"))
@@ -175,7 +189,12 @@
 generate_filename = lambda name: os.path.join('testwiki', 'underlay', 'pages', 'SystemPagesSetup', 'attachments', '%s.zip' % name)
 
 packageCompoundInstaller(pageSets, generate_filename(ALL))
-[packagePages(list(pages), generate_filename(name), "ReplaceUnderlay") for name, pages in pageSets.items() if name != ALL]
+
+[packagePages(list(pages), generate_filename(name), "ReplaceUnderlay") 
+    for name, pages in pageSets.items() if not name in (u'English', ALL, NODIST)]
+
+[removePages(list(pages)) 
+    for name, pages in pageSets.items() if not name in (u'English', ALL)]
 
 print "Finished."
 
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/tests/maketestwiki.py	Tue Dec 13 22:54:31 2005 +0000
@@ -0,0 +1,58 @@
+# -*- coding: iso-8859-1 -*-
+"""
+MoinMoin - make a test wiki
+
+Usage:
+
+    maketestwiki.py
+
+@copyright: 2005 by Thomas Waldmann
+@license: GNU GPL, see COPYING for details.
+"""
+
+import os, sys, shutil, errno, tarfile
+
+moinpath = os.path.join(os.path.dirname(sys.argv[0]), os.pardir)
+sys.path.insert(0, os.path.abspath(moinpath))
+
+WIKI = os.path.abspath('testwiki')
+SHARE = os.path.abspath('wiki')
+
+def removeTestWiki():
+    print 'removing old wiki ...'
+    for dir in ['data', 'underlay']:
+        try:
+            shutil.rmtree(os.path.join(WIKI, dir))
+        except OSError, err:
+            if err.errno != errno.ENOENT:
+                raise
+
+def copyData():
+    print 'copying data ...'
+    src = os.path.join(SHARE, 'data')
+    dst = os.path.join(WIKI, 'data')
+    shutil.copytree(src, dst)
+    # Remove arch-ids dirs
+    for path, dirs, files in os.walk(dst):
+        for dir in dirs[:]:
+            if dir == '.arch-ids':
+                shutil.rmtree(os.path.join(path, dir))
+                dirs.remove(dir)
+
+
+def untarUnderlay():
+    print 'untaring underlay ...'
+    tar = tarfile.open(os.path.join(SHARE, 'underlay.tar.bz2'), mode='r:bz2')
+    for member in tar:
+        tar.extract(member, WIKI)
+    tar.close()
+
+
+def run():   
+    removeTestWiki()
+    copyData()
+    untarUnderlay()
+
+if __name__ == '__main__':
+    run()
+
--- a/tests/runtests.py	Tue Dec 13 22:49:42 2005 +0000
+++ b/tests/runtests.py	Tue Dec 13 22:54:31 2005 +0000
@@ -12,50 +12,14 @@
 @license: GNU GPL, see COPYING for details.
 """
 
-import os, sys, shutil, errno, tarfile
+import sys
 
 moinpath = os.path.join(os.path.dirname(sys.argv[0]), os.pardir)
 sys.path.insert(0, os.path.abspath(moinpath))
 
 from MoinMoin import _tests
 
-WIKI = os.path.abspath('testwiki')
-SHARE = os.path.abspath('wiki')
-
-def removeTestWiki():
-    print 'removing old wiki ...'
-    for dir in ['data', 'underlay']:
-        try:
-            shutil.rmtree(os.path.join(WIKI, dir))
-        except OSError, err:
-            if err.errno != errno.ENOENT:
-                raise
-
-def copyData():
-    print 'copying data ...'
-    src = os.path.join(SHARE, 'data')
-    dst = os.path.join(WIKI, 'data')
-    shutil.copytree(src, dst)
-    # Remove arch-ids dirs
-    for path, dirs, files in os.walk(dst):
-        for dir in dirs[:]:
-            if dir == '.arch-ids':
-                shutil.rmtree(os.path.join(path, dir))
-                dirs.remove(dir)
-
-
-def untarUnderlay():
-    print 'untaring underlay ...'
-    tar = tarfile.open(os.path.join(SHARE, 'underlay.tar.bz2'), mode='r:bz2')
-    for member in tar:
-        tar.extract(member, WIKI)
-    tar.close()
-
-
 def run():   
-    removeTestWiki()
-    copyData()
-    untarUnderlay()
     _tests.run(names=sys.argv[1:])
 
 
Binary file wiki/underlay.tar.bz2 has changed