changeset 499:a65a53daed80

add old migration scripts imported from: moin--main--1.5--patch-503
author Thomas Waldmann <tw@waldmann-edv.de>
date Sun, 26 Mar 2006 09:57:07 +0000
parents fa788cd35485
children b3c18ea7db7d
files ChangeLog MoinMoin/script/old/migration/12_to_13_mig01.py MoinMoin/script/old/migration/12_to_13_mig02.py MoinMoin/script/old/migration/12_to_13_mig03.py MoinMoin/script/old/migration/12_to_13_mig04.py MoinMoin/script/old/migration/12_to_13_mig05.py MoinMoin/script/old/migration/12_to_13_mig06.py MoinMoin/script/old/migration/12_to_13_mig07.py MoinMoin/script/old/migration/12_to_13_mig08.py MoinMoin/script/old/migration/12_to_13_mig09.py MoinMoin/script/old/migration/12_to_13_mig10.py MoinMoin/script/old/migration/12_to_13_mig11.py MoinMoin/script/old/migration/152_to_1050300.py setup.py
diffstat 14 files changed, 1841 insertions(+), 0 deletions(-) [+]
line wrap: on
line diff
--- a/ChangeLog	Sun Mar 26 09:55:35 2006 +0000
+++ b/ChangeLog	Sun Mar 26 09:57:07 2006 +0000
@@ -2,6 +2,51 @@
 # arch-tag: automatic-ChangeLog--arch@arch.thinkmo.de--2003-archives/moin--main--1.5
 #
 
+2006-03-26 10:57:07 GMT	Thomas Waldmann <tw@waldmann-edv.de>	patch-503
+
+    Summary:
+      add old migration scripts
+    Revision:
+      moin--main--1.5--patch-503
+
+    add old migration scripts
+    
+
+    new files:
+     MoinMoin/script/old/migration/.arch-ids/12_to_13_mig01.py.id
+     MoinMoin/script/old/migration/.arch-ids/12_to_13_mig02.py.id
+     MoinMoin/script/old/migration/.arch-ids/12_to_13_mig03.py.id
+     MoinMoin/script/old/migration/.arch-ids/12_to_13_mig04.py.id
+     MoinMoin/script/old/migration/.arch-ids/12_to_13_mig05.py.id
+     MoinMoin/script/old/migration/.arch-ids/12_to_13_mig06.py.id
+     MoinMoin/script/old/migration/.arch-ids/12_to_13_mig07.py.id
+     MoinMoin/script/old/migration/.arch-ids/12_to_13_mig08.py.id
+     MoinMoin/script/old/migration/.arch-ids/12_to_13_mig09.py.id
+     MoinMoin/script/old/migration/.arch-ids/12_to_13_mig10.py.id
+     MoinMoin/script/old/migration/.arch-ids/12_to_13_mig11.py.id
+     MoinMoin/script/old/migration/.arch-ids/152_to_1050300.py.id
+     MoinMoin/script/old/migration/.arch-ids/=id
+     MoinMoin/script/old/migration/12_to_13_mig01.py
+     MoinMoin/script/old/migration/12_to_13_mig02.py
+     MoinMoin/script/old/migration/12_to_13_mig03.py
+     MoinMoin/script/old/migration/12_to_13_mig04.py
+     MoinMoin/script/old/migration/12_to_13_mig05.py
+     MoinMoin/script/old/migration/12_to_13_mig06.py
+     MoinMoin/script/old/migration/12_to_13_mig07.py
+     MoinMoin/script/old/migration/12_to_13_mig08.py
+     MoinMoin/script/old/migration/12_to_13_mig09.py
+     MoinMoin/script/old/migration/12_to_13_mig10.py
+     MoinMoin/script/old/migration/12_to_13_mig11.py
+     MoinMoin/script/old/migration/152_to_1050300.py
+
+    modified files:
+     ChangeLog setup.py
+
+    new directories:
+     MoinMoin/script/old/migration
+     MoinMoin/script/old/migration/.arch-ids
+
+
 2006-03-26 10:55:35 GMT	Thomas Waldmann <tw@waldmann-edv.de>	patch-502
 
     Summary:
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/MoinMoin/script/old/migration/12_to_13_mig01.py	Sun Mar 26 09:57:07 2006 +0000
@@ -0,0 +1,184 @@
+#!/usr/bin/env python
+"""
+    12_to_13.py - migration from moin 1.2 to moin 1.3
+    * switch the wiki to utf-8 encoding
+    * switch quoting mechanism from _xx to (xx)
+    * switch timestamps from float secs to int usecs
+
+    Steps for a successful migration to utf-8:
+        1. stop your wiki and make a backup
+        2. make a copy of the wiki's "data" directory to your working dir
+        3. clean up your working copy of the data dir:
+            a. if you use CVS or GNU arch remove stuff like CVS/, .cvsignore
+               or .arch-ids/ etc.
+            b. remove *.pickle (used by moin for caching some information,
+               will be re-created automatically), especially:
+                   I. data/user/userdict.pickle
+                   II. data/dicts.pickle
+            c. if you used symlinks in data/text or elsewhere, remove them
+        4. make sure that from_encoding and to_encoding matches your needs (see
+           beginning of script below and config.charset in moin_config.py) and
+           run python2.3 12_to_13_mig1.py from your working dir
+        5. if there was no error, you will find:
+            data.pre-mig1 (the script renames your data directory copy to that name)
+            data (result, converted to utf-8)
+        6. verify conversion results (number of pages, size of logs, attachments,
+           number of backup copies) - everything should be reasonable before
+           you proceed. Usually the file size gets larger when converting from
+           iso8859-1 (or other non-unicode charset) to utf-8 except if your
+           content is ASCII-only, then it will keep its size.
+        7. copy additional files from data.pre-mig1 to data (maybe intermaps, logs,
+           etc.). Be aware that the file contents AND file names of wiki content
+           may have changed, so DO NOT copy the cache/ directory, but let
+           the wiki recreate it.
+        8. replace the data directory your wiki uses with the data directory
+           you created by previous steps. DO NOT simply copy the converted stuff
+           into the original or you will duplicate pages and create chaos!
+        9. test it. if something has gone wrong, you still have your backup.
+
+
+        10. if you use dictionaries for spellchecking, you have to convert them
+            to config.charset, too. Remove your dict.cache before re-starting
+            your wiki.
+
+    @copyright: 2004 Thomas Waldmann
+    @license: GPL, see COPYING for details
+"""
+
+from_encoding = 'iso8859-1'
+#from_encoding = 'utf-8'
+
+to_encoding = 'utf-8'
+
+import os.path, sys, shutil, urllib
+
+sys.path.insert(0, '../../../..')
+from MoinMoin import wikiutil
+
+from MoinMoin.script.migration.migutil import opj, listdir, copy_file, copy_dir
+
+# this is a copy of the wikiutil.unquoteFilename of moin 1.2.1
+
+def unquoteFilename12(filename, encoding):
+    """
+    Return decoded original filename when given an encoded filename.
+    
+    @param filename: encoded filename
+    @rtype: string
+    @return: decoded, original filename
+    """
+    str = urllib.unquote(filename.replace('_', '%'))
+    try:
+        newstr = str.decode(encoding)
+    except UnicodeDecodeError: # try again with iso
+        newstr = str.decode('iso-8859-1')
+    return newstr
+
+unquoteWikiname12 = unquoteFilename12
+
+
+def convert_string(str, enc_from, enc_to):
+    try:
+        newstr = str.decode(enc_from)
+    except UnicodeDecodeError: # try again with iso
+        newstr = str.decode('iso-8859-1')
+    return newstr.encode(enc_to)
+    
+def qf_convert_string(str, enc_from, enc_to):
+    str = unquoteWikiname12(str, enc_from)
+    str = wikiutil.quoteWikinameFS(str, enc_to)
+    return str
+
+def convert_file(fname_from, fname_to, enc_from, enc_to):
+    print "%s -> %s" % (fname_from, fname_to)
+    file_from = open(fname_from, "rb")
+    if os.path.exists(fname_to):
+        raise "file exists %s" % fname_to
+    file_to = open(fname_to, "wb")
+    for line in file_from:
+        file_to.write(convert_string(line, enc_from, enc_to))
+    file_to.close()
+    file_from.close()
+    st=os.stat(fname_from)
+    os.utime(fname_to, (st.st_atime,st.st_mtime))
+
+def convert_textdir(dir_from, dir_to, enc_from, enc_to, is_backupdir=0):
+    os.mkdir(dir_to)
+    for fname_from in listdir(dir_from):
+        if is_backupdir:
+            fname, timestamp = fname_from.split('.',1)
+            timestamp = str(wikiutil.timestamp2version(float(timestamp)))
+        else:
+            fname = fname_from
+        fname = qf_convert_string(fname, enc_from, enc_to)
+        if is_backupdir:
+            fname_to = '.'.join([fname, timestamp])
+        else:
+            fname_to = fname
+        convert_file(opj(dir_from, fname_from), opj( dir_to, fname_to),
+                     enc_from, enc_to)
+
+def convert_pagedir(dir_from, dir_to, enc_from, enc_to):
+    os.mkdir(dir_to)
+    for dname_from in listdir(dir_from):
+        dname_to = qf_convert_string(dname_from, enc_from, enc_to)
+        print "%s -> %s" % (dname_from, dname_to)
+        shutil.copytree(opj(dir_from, dname_from), opj(dir_to, dname_to), 1)
+        try:
+            convert_editlog(opj(dir_from, dname_from, 'last-edited'),
+                            opj(dir_to, dname_to, 'last-edited'),
+                            enc_from, enc_to)
+        except IOError:
+            pass # we ignore if it doesnt exist
+
+def convert_userdir(dir_from, dir_to, enc_from, enc_to):
+    os.mkdir(dir_to)
+    for fname in listdir(dir_from):
+        convert_file(opj(dir_from, fname), opj(dir_to, fname),
+                     enc_from, enc_to)
+
+def convert_editlog(log_from, log_to, enc_from, enc_to):
+        file_from = open(log_from)
+        file_to = open(log_to, "w")
+        for line in file_from:
+            line = line.replace('\r','')
+            line = line.replace('\n','')
+            if not line.strip(): # skip empty lines
+                continue
+            fields = line.split('\t')
+            fields[0] = qf_convert_string(fields[0], enc_from, enc_to)
+            fields[2] = str(wikiutil.timestamp2version(float(fields[2])))
+            if len(fields) < 6:
+                fields.append('') # comment
+            if len(fields) < 7:
+                fields.append('SAVE') # action
+            fields[5] = convert_string(fields[5], enc_from, enc_to)
+            line = '\t'.join(fields) + '\n'
+            file_to.write(line)
+
+origdir = 'data.pre-mig1'
+
+try:
+    os.rename('data', origdir)
+    os.mkdir('data')
+except OSError:
+    print "You need to be in the directory where your copy of the 'data' directory is located."
+    sys.exit(1)
+
+convert_textdir(opj(origdir, 'text'), opj('data', 'text'), from_encoding, to_encoding)
+
+convert_textdir(opj(origdir, 'backup'), opj('data', 'backup'), from_encoding, to_encoding, 1)
+
+convert_pagedir(opj(origdir, 'pages'), opj('data', 'pages'), from_encoding, to_encoding)
+
+convert_userdir(opj(origdir, 'user'), opj('data', 'user'), from_encoding, to_encoding)
+
+convert_editlog(opj(origdir, 'editlog'), opj('data', 'editlog'), from_encoding, to_encoding)
+
+copy_file(opj(origdir, 'event.log'), opj('data', 'event.log'))
+
+copy_dir(opj(origdir, 'plugin'), opj('data', 'plugin'))
+
+copy_file(opj(origdir, 'intermap.txt'), opj('data', 'intermap.txt'))
+
+
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/MoinMoin/script/old/migration/12_to_13_mig02.py	Sun Mar 26 09:57:07 2006 +0000
@@ -0,0 +1,174 @@
+#!/usr/bin/env python
+"""
+    migration from moin 1.3 < patch-78 to moin 1.3 >= patch-78
+    * switch quoting mechanism from (xx)(xx) to (xxxx)
+    * charset isn't changed, it was utf-8 before and will be utf-8 after
+    
+    Steps for a successful migration:
+        1. stop your wiki and make a backup
+        2. make a copy of the wiki's "data" directory to your working dir
+        3. run this script from your working dir
+        4. if there was no error, you will find:
+            data.pre-mig2 (the script renames your data directory copy to that name)
+            data (result, converted)
+        5. verify conversion results (number of pages, size of logs, attachments,
+           number of backup copies) - everything should be reasonable before
+           you proceed.
+        6. copy additional files from data.pre-mig2 to data (maybe intermaps, logs,
+           etc.). Be aware that the file contents AND file names of wiki content
+           may have changed, so DO NOT copy the cache/ directory, but let
+           the wiki recreate it.
+        7. replace the data directory your wiki uses with the data directory
+           you created by previous steps. DO NOT simply copy the converted stuff
+           into the original or you will duplicate pages and create chaos!
+        8. test it. if something has gone wrong, you still have your backup.
+        9. if you use dictionaries for spellchecking, you have to convert them
+           to config.charset, too. Remove your dict.cache before re-starting
+           your wiki.
+
+    @copyright: 2004 Thomas Waldmann
+    @license: GPL, see COPYING for details
+"""
+
+from_encoding = 'utf-8'
+to_encoding = 'utf-8'
+
+import os.path, sys, shutil, urllib
+
+sys.path.insert(0, '../../../..')
+from MoinMoin import wikiutil
+
+from MoinMoin.script.migration.migutil import opj, listdir, copy_file, copy_dir
+
+# this is a copy of the wikiutil.unquoteWikiname of moin--main--1.3--patch-77
+def unquoteWikinameOld(filename, charsets=[from_encoding,]):
+    """
+    Return decoded original filename when given an encoded filename.
+    @param filename: encoded filename
+    @rtype: string
+    @return: decoded, original filename
+    """
+    if isinstance(filename, type(u'')): # from some places we get called with unicode
+        filename = filename.encode(from_encoding)
+    fn = ''
+    i = 0
+    while i < len(filename):
+        c = filename[i]
+        if c == '(':
+            c1 = filename[i+1]
+            c2 = filename[i+2]
+            close = filename[i+3]
+            if close != ')':
+                raise Exception('filename encoding invalid')
+            i+=4
+            fn = fn + chr( 16 * int(c1,16) + int(c2, 16) )
+        else:
+            fn = fn + c
+            i+=1
+    return wikiutil.decodeUserInput(fn, charsets)
+
+
+def convert_string(str, enc_from, enc_to):
+    return str.decode(enc_from).encode(enc_to)
+
+
+def qf_convert_string(str, enc_from, enc_to):
+    """ Convert filename from pre patch 78 quoting to new quoting 
+    
+    The old quoting function from patch 77 can convert name ONLY from 
+    the old way to the new, so if you have a partially converted 
+    directory, as it the situation as of moin--main--1.3--patch-86, 
+    it does not work.
+    
+    The new unquoting function is backward compatible, and can unquote
+    both post and pre patch 78 file names.
+    """
+    str = wikiutil.unquoteWikiname(str, [enc_from])
+    str = wikiutil.quoteWikinameFS(str, enc_to)
+    return str
+
+
+def convert_file(fname_from, fname_to, enc_from, enc_to):
+    print "%s -> %s" % (fname_from, fname_to)
+    file_from = open(fname_from)
+    file_to = open(fname_to, "w")
+    for line in file_from:
+        file_to.write(convert_string(line, enc_from, enc_to))
+    file_to.close()
+    file_from.close()
+    st=os.stat(fname_from)
+    os.utime(fname_to, (st.st_atime,st.st_mtime))
+
+
+def convert_textdir(dir_from, dir_to, enc_from, enc_to, is_backupdir=0):
+    os.mkdir(dir_to)
+    for fname_from in listdir(dir_from):
+        if is_backupdir:
+            fname, timestamp = fname_from.split('.')
+        else:
+            fname = fname_from
+        fname = qf_convert_string(fname, enc_from, enc_to)
+        if is_backupdir:
+            fname_to = '.'.join([fname, timestamp])
+        else:
+            fname_to = fname
+        convert_file(opj(dir_from, fname_from), opj(dir_to, fname_to),
+                     enc_from, enc_to)
+
+
+def convert_pagedir(dir_from, dir_to, enc_from, enc_to):
+    os.mkdir(dir_to)
+    for dname_from in listdir(dir_from):
+        dname_to = qf_convert_string(dname_from, enc_from, enc_to)
+        print "%s -> %s" % (dname_from, dname_to)
+        shutil.copytree(opj(dir_from, dname_from), opj(dir_to, dname_to), 1)
+        try:
+            convert_editlog(opj(dir_from, dname_from, 'last-edited'),
+                            opj(dir_to, dname_to, 'last-edited'),
+                            enc_from, enc_to)
+        except IOError:
+            pass # we ignore if it doesnt exist
+
+def convert_userdir(dir_from, dir_to, enc_from, enc_to):
+    os.mkdir(dir_to)
+    for fname in listdir(dir_from):
+        convert_file(opj(dir_from, fname), opj(dir_to, fname),
+                     enc_from, enc_to)
+
+
+def convert_editlog(log_from, log_to, enc_from, enc_to):
+        file_from = open(log_from)
+        file_to = open(log_to, "w")
+        for line in file_from:
+            fields = line.split('\t')
+            fields[0] = qf_convert_string(fields[0], enc_from, enc_to)
+            fields[5] = convert_string(fields[5], enc_from, enc_to)
+            line = '\t'.join(fields)
+            file_to.write(line)
+
+origdir = 'data.pre-mig2'
+
+# Backup original dir and create new empty dir
+try:
+    os.rename('data', origdir)
+    os.mkdir('data')
+except OSError:
+    print "You need to be in the directory where your copy of the 'data' directory is located."
+    sys.exit(1)
+
+convert_textdir(opj(origdir, 'text'), opj('data', 'text'), from_encoding, to_encoding)
+
+convert_textdir(opj(origdir, 'backup'), opj('data', 'backup'), from_encoding, to_encoding, 1)
+
+convert_pagedir(opj(origdir, 'pages'), opj('data', 'pages'), from_encoding, to_encoding)
+
+convert_userdir(opj(origdir, 'user'), opj('data', 'user'), from_encoding, to_encoding)
+
+convert_editlog(opj(origdir, 'editlog'), opj('data', 'editlog'), from_encoding, to_encoding)
+
+copy_file(opj(origdir, 'event.log'), opj('data', 'event.log'))
+
+copy_dir(opj(origdir, 'plugin'), opj('data', 'plugin'))
+
+copy_file(opj(origdir, 'intermap.txt'), opj('data', 'intermap.txt'))
+
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/MoinMoin/script/old/migration/12_to_13_mig03.py	Sun Mar 26 09:57:07 2006 +0000
@@ -0,0 +1,145 @@
+#!/usr/bin/env python
+"""
+    migration from moin 1.3 < patch-101 to moin 1.3 >= patch-101
+    We heavily change the file system layout here:
+    * data/backup/PageName.<UTC timestamp> -> data/pages/PageName/backup/<UTC timestamp>
+    * data/text/PageName -> data/pages/PageName/text
+    * data/pages/PageName/edit-lock stays the same
+    * data/pages/PageName/last-edited isn't used any more as we have the same in last line of page edit-log
+    * data/pages/PageName/attachments/* stays the same
+    * data/editlog -> stays there (as edit-log), but also gets splitted into data/pages/PageName/edit-log
+    * data/event.log -> stays there (as event-log)
+
+    We will use this, but don't need to convert, as it will be recreated automatically:
+    * data/cache/Page.py/PageName.<formatter> -> data/pages/PageName/cache/<formatter>
+    * data/cache/pagelinks/PageName -> data/pages/PageName/cache/pagelinks
+    * data/cache/charts/hitcounts-PageName -> data/pages/PageName/cache/hitcounts
+
+    
+    Steps for a successful migration:
+
+        1. Stop your wiki and make a backup of old data and code
+
+        2. Make a copy of the wiki's "data" directory to your working dir
+
+        3. Run this script from your working dir
+
+        4. If there was no error, you will find:
+            data.pre-mig3 - the script renames your data directory copy to that name
+            data - converted data dir
+
+        5. Verify conversion results (number of pages, size of logs, attachments,
+           number of backup copies) - everything should be reasonable before
+           you proceed.
+
+        6. Copy additional files from data.pre-mig3 to data (maybe intermaps, logs,
+           etc.). Be aware that the file contents AND file names of wiki content
+           may have changed, so DO NOT copy the files inside the cache/ directory,
+           let the wiki refill it.
+
+        7. Replace the data directory your wiki uses with the data directory
+           you created by previous steps. DO NOT simply copy the converted stuff
+           into the original or you will duplicate pages and create chaos!
+
+        8. Test it - if something has gone wrong, you still have your backup.
+
+
+    @copyright: 2004 Thomas Waldmann
+    @license: GPL, see COPYING for details
+"""
+
+import os, sys, shutil, urllib
+
+sys.path.insert(0, '../../../..')
+from MoinMoin import wikiutil
+
+from MoinMoin.script.migration.migutil import opj, copy_file, copy_dir, listdir
+
+origdir = 'data.pre-mig3'
+
+def convert_textdir(dir_from, dir_to, is_backupdir=0):
+    for fname_from in listdir(dir_from):
+        if is_backupdir:
+            fname, timestamp = fname_from.split('.')
+        else:
+            fname = fname_from
+        try:
+            os.mkdir(opj(dir_to, 'pages', fname))
+        except: pass
+        try:
+            os.mkdir(opj(dir_to, 'pages', fname, 'backup'))
+        except: pass
+        try:
+            os.mkdir(opj(dir_to, 'pages', fname, 'cache'))
+        except: pass
+        if is_backupdir:
+            fname_to = opj('pages', fname, 'backup', timestamp)
+        else:
+            fname_to = opj('pages', fname, 'text')
+        copy_file(opj(dir_from, fname_from), opj(dir_to, fname_to))
+
+        #we don't have cache, mig2 doesn't convert it
+        #try:
+        #    cache_from = opj(origdir,'cache','charts','hitcounts-%s' % fname)
+        #    cache_to = opj(dir_to, 'pages', fname, 'cache', 'hitcounts')
+        #    if os.path.exists(cache_from):
+        #        copy_file(cache_from, cache_to)
+        #except: pass
+
+
+def convert_pagedir(dir_from, dir_to):
+    os.mkdir(dir_to)
+    for dname_from in listdir(dir_from):
+        print "%s" % (dname_from,)
+        dname_to = dname_from
+        shutil.copytree(opj(dir_from, dname_from), opj(dir_to, dname_to), 1)
+        try:
+            os.remove(opj(dir_to, dname_to, 'last-edited'))
+        except: pass
+
+
+def convert_editlog(file_from, file_to, dir_to):
+    for l in open(file_from):
+        data = l.split('\t')
+        pagename = data[0]
+        timestamp = data[2]
+        data[2] = str(long(float(timestamp))) # we only want integer (must be long for py 2.2.x)
+        data = '\t'.join(data)
+        
+        f = open(file_to, 'a')
+        f.write(data)
+        f.close()
+        
+        try:
+            file_to2 = opj(dir_to, pagename, 'edit-log')
+            f = open(file_to2, 'a')
+            f.write(data)
+            f.close()
+        except: pass
+
+# Backup original dir and create new empty dir
+try:
+    os.rename('data', origdir)
+    os.mkdir('data')
+except OSError:
+    print "You need to be in the directory where your copy of the 'data' directory is located."
+    sys.exit(1)
+
+convert_pagedir(opj(origdir, 'pages'), opj('data', 'pages'))
+
+convert_textdir(opj(origdir,'text'), 'data')
+
+convert_textdir(opj(origdir, 'backup'), 'data', 1)
+
+convert_editlog(opj(origdir, 'editlog'),
+                opj('data', 'edit-log'),
+                opj('data', 'pages'))
+
+copy_file(opj(origdir, 'event.log'), opj('data', 'event.log'))
+
+copy_dir(opj(origdir, 'plugin'), opj('data', 'plugin'))
+
+copy_dir(opj(origdir, 'user'), opj('data', 'user'))
+
+copy_file(opj(origdir, 'intermap.txt'), opj('data', 'intermap.txt'))
+
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/MoinMoin/script/old/migration/12_to_13_mig04.py	Sun Mar 26 09:57:07 2006 +0000
@@ -0,0 +1,157 @@
+#!/usr/bin/env python
+"""
+    migration from moin 1.3 < patch-196 to moin 1.3 >= patch-196
+    Because of trouble with float timestamps, we migrate to usec timestamp resolution here.
+    * data/pages/PageName/backup/<UTC timestamp> -> .../<UTC timestamp in usecs>
+    * data/user/<uid>.bookmark -> convert to usecs
+    * data/edit-log and data/pages/PageName/edit-log -> convert to usecs
+    * data/event-log -> convert to usecs
+    
+    Steps for a successful migration:
+
+        1. Stop your wiki and make a backup of old data and code
+
+        2. Make a copy of the wiki's "data" directory to your working dir
+
+        3. Run this script from your working dir
+
+        4. If there was no error, you will find:
+            data.pre-mig4 - the script renames your data directory copy to that name
+            data - converted data dir
+
+        5. Verify conversion results (number of pages, size of logs, attachments,
+           number of backup copies) - everything should be reasonable before
+           you proceed.
+
+        6. Copy additional files from data.pre-mig4 to data (maybe intermaps, logs,
+           etc.). Be aware that the file contents AND file names of wiki content
+           may have changed, so DO NOT copy the files inside the cache/ directory,
+           let the wiki refill it.
+
+        7. Replace the data directory your wiki uses with the data directory
+           you created by previous steps. DO NOT simply copy the converted stuff
+           into the original or you will duplicate pages and create chaos!
+
+        8. Test it - if something has gone wrong, you still have your backup.
+
+
+    @copyright: 2004 Thomas Waldmann
+    @license: GPL, see COPYING for details
+"""
+
+
+import os.path, sys, urllib
+
+sys.path.insert(0, '../../../..')
+from MoinMoin import wikiutil
+
+from MoinMoin.script.migration.migutil import opj, listdir, copy_file, copy_dir
+
+def convert_ts(ts_from):
+    if ts_from > 5000000000: # far more than 32bits?
+        ts_to = ts_from # we already have usec kind of timestamp
+    else:
+        ts_to = wikiutil.timestamp2version(ts_from)
+    return long(ts_to) # must be long for py 2.2.x
+
+def convert_eventlog(file_from, file_to):
+    if not os.path.exists(file_from): 
+        return
+    f = open(file_to, 'a')
+    for l in open(file_from):
+        if not l.strip():
+            continue
+        data = l.split('\t')
+        data[0] = str(convert_ts(float(data[0]))) # we want usecs
+        data = '\t'.join(data)
+        f.write(data)
+    f.close()
+        
+def convert_editlog(file_from, file_to):
+    if not os.path.exists(file_from): 
+        return
+    f = open(file_to, 'a')
+    for l in open(file_from):
+        data = l.split('\t')
+        pagename = data[0]
+        timestamp = data[2]
+        data[2] = str(convert_ts(float(timestamp))) # we want usecs
+        data = '\t'.join(data)
+        f.write(data)
+    f.close()
+        
+def convert_pagedir(dir_from, dir_to, is_backupdir=0):
+    os.mkdir(dir_to)
+    for pagedir in listdir(dir_from):
+        text_from = opj(dir_from, pagedir, 'text')
+        text_to = opj(dir_to, pagedir, 'text')
+        os.mkdir(opj(dir_to, pagedir))
+        copy_file(text_from, text_to)
+        
+        backupdir_from = opj(dir_from, pagedir, 'backup')
+        backupdir_to = opj(dir_to, pagedir, 'backup')
+        if os.path.exists(backupdir_from):
+            os.mkdir(backupdir_to)
+            for ts in listdir(backupdir_from):
+                ts_usec = str(convert_ts(float(ts)))
+                backup_from = opj(backupdir_from, ts)
+                backup_to = opj(backupdir_to, ts_usec)
+                copy_file(backup_from, backup_to)
+        
+        editlog_from = opj(dir_from, pagedir, 'edit-log')
+        editlog_to = opj(dir_to, pagedir, 'edit-log')
+        convert_editlog(editlog_from, editlog_to)
+        
+        #cachedir_from = opj(dir_from, pagedir, 'cache')
+        #cachedir_to = opj(dir_to, pagedir, 'cache')
+        #if os.path.exists(cachedir_from):
+        #    os.mkdir(cachedir_to)
+        #    try:
+        #        copy_file(
+        #            opj(cachedir_from, 'hitcounts'),
+        #            opj(cachedir_to, 'hitcounts'))
+        #    except: pass
+
+        attachdir_from = opj(dir_from, pagedir, 'attachments')
+        attachdir_to = opj(dir_to, pagedir, 'attachments')
+        if os.path.exists(attachdir_from):
+            try:
+                copy_dir(attachdir_from, attachdir_to)
+            except: pass
+
+
+def convert_userdir(dir_from, dir_to):
+    os.mkdir(dir_to)
+    for fname in listdir(dir_from):
+        if fname.endswith('.bookmark'):
+            bm = open(opj(dir_from, fname)).read().strip()
+            bm = str(wikiutil.timestamp2version(float(bm)))
+            f = open(opj(dir_to, fname), 'w')
+            f.write(bm)
+            f.close()
+        else:
+            copy_file(opj(dir_from, fname), opj(dir_to, fname))
+
+
+origdir = 'data.pre-mig4'
+
+# Backup original dir and create new empty dir
+try:
+    os.rename('data', origdir)
+    os.mkdir('data')
+except OSError:
+    print "You need to be in the directory where your copy of the 'data' directory is located."
+    sys.exit(1)
+
+convert_pagedir(opj(origdir, 'pages'), opj('data', 'pages'))
+
+convert_editlog(opj(origdir, 'edit-log'), opj('data', 'edit-log'))
+
+convert_eventlog(opj(origdir, 'event.log'), opj('data', 'event-log'))
+
+convert_userdir(opj(origdir, 'user'), opj('data', 'user'))
+
+copy_dir(opj(origdir, 'plugin'), opj('data', 'plugin'))
+
+copy_file(opj(origdir, 'intermap.txt'), opj('data', 'intermap.txt'))
+
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/MoinMoin/script/old/migration/12_to_13_mig05.py	Sun Mar 26 09:57:07 2006 +0000
@@ -0,0 +1,331 @@
+#!/usr/bin/env python
+"""
+    migration from moin 1.3 < patch-221 to moin 1.3 >= patch-221
+    We need to make versioning completely different. Problem:
+        * old versioning used UNIX timestamps (32bits), but had collisions due
+          to seconds resolution (on the FS, they were avoided by using floats
+          in early moin versions, but floats suck and xmlrpc only does ints).
+        * then we moved to usecs resolution, collision problem solved, but
+          xmlrpc broke because it can't handle long ints. Oh well ... 8-(
+        * So for the 3rd try, we now just enumerate versions 1,2,3,4,...
+          This makes xmlrpc happy again (and matches better how xmlrpc was
+          designed, as it has separate fields for timestamp and version),
+          but we now have to keep the timestamp somewhere else. The appropriate
+          place is of course the edit-log.
+    
+    So we change like this:      
+        * data/pages/PageName/backup/<UTC timestamp in usecs>
+          -> data/pages/PageName/revisions/<revno>
+    A page save is now done like that:
+        * mv 'current' 'notcurrent'
+        * if success ('current' was there):
+            * revno = read('notcurrent')
+            * revno++
+            * write('notcurrent', revno)
+            * save to revisions/<revno>
+            * mv 'notcurrent' 'current'
+        * else give error msg and let user retry save
+            
+    * data/user/<uid>.bookmark stays in usecs
+    * data/event-log stays in usecs
+    * data/edit-log and data/pages/PageName/edit-log stay in usecs and:
+        * old: PageName UserIp TimeUSecs UserHost UserId Comment Action
+        * new: TimeUSecs PageRev Action PageName UserIp UserHost UserId Extra Comment
+        *                =======                                        =====
+         * PageRev is identical to the filename in revisions/ directory
+         * Extra is used for some stuff formerly put into comment field, like
+           revert info or attach filename
+           
+    Steps for a successful migration:
+
+        1. Stop your wiki and make a backup of old data and code
+
+        2. Make a copy of the wiki's "data" directory to your working dir
+
+        3. Run this script from your working dir
+
+        4. If there was no error, you will find:
+            data.pre-mig5 - the script renames your data directory copy to that name
+            data - converted data dir
+
+        5. Verify conversion results (number of pages, size of logs, attachments,
+           number of backup copies) - everything should be reasonable before
+           you proceed.
+
+        6. Copy additional files from data.pre-mig5 to data (maybe intermaps, logs,
+           etc.). Be aware that the file contents AND file names of wiki content
+           may have changed, so DO NOT copy the files inside the cache/ directory,
+           let the wiki refill it.
+
+        7. Replace the data directory your wiki uses with the data directory
+           you created by previous steps. DO NOT simply copy the converted stuff
+           into the original or you will duplicate pages and create chaos!
+
+        8. Test it - if something has gone wrong, you still have your backup.
+
+
+    @copyright: 2004 Thomas Waldmann
+    @license: GPL, see COPYING for details
+"""
+
+
+import os.path, sys, urllib
+
+# Insert THIS moin dir first into sys path, or you would run another
+# version of moin!
+sys.path.insert(0, '../../../..')
+from MoinMoin import wikiutil
+
+from MoinMoin.script.migration.migutil import opj, listdir, copy_file, move_file, copy_dir
+
+# info[pagename][timestamp_usecs] = (file_from, (...))
+# if file_from is None, we have just a log entry, but no associated file yet
+info = {}
+info2 = {}
+exists = {}
+pagelist = []
+
+def gather_editlog(dir_from, el_from):
+    """ this gathers everything that is in edit-log into internal
+        data structures, converting to the future format
+    """
+    if not os.path.exists(el_from): 
+        return
+    for l in open(el_from):
+        data = l.rstrip('\n').split('\t')
+        origlen = len(data)
+        while len(data) < 7: data.append('')
+        (pagename,ip,timestamp,host,id,comment,action) = data
+        if origlen == 6:
+            action = comment
+            comment = ''
+        
+        extra = ''
+        if action == 'SAVE/REVERT': # we missed to convert that in mig4
+            ts = long(comment) # must be long for py 2.2.x
+            if ts < 4000000000: # UNIX timestamp (secs)
+                extra = str(wikiutil.timestamp2version(ts))
+            else: # usecs timestamp
+                extra = str(ts)
+            # later we convert this timestamp to a revision number
+            comment = ''
+        if action in ['ATTNEW','ATTDRW','ATTDEL',]:
+            extra = comment # filename
+            comment = '' # so we can use comments on ATT* in future
+
+        timestamp = long(timestamp) # must be long for py 2.2.x
+        data = [timestamp,'',action,pagename,ip,host,id,extra,comment]
+        
+        entry = info.get(pagename, {})
+        entry[timestamp] = [None, data]
+        info[pagename] = entry
+        
+def gather_pagedirs(dir_from, is_backupdir=0):
+    """ this gathers information from the pagedirs, i.e. text and backup
+        files (and also the local editlog) and tries to merge/synchronize
+        with the informations gathered from editlog
+    """
+    global pagelist
+    pagelist = listdir(dir_from)
+    for pagename in pagelist:
+        editlog_from = opj(dir_from, pagename, 'edit-log')
+        gather_editlog(dir_from, editlog_from)
+         
+        entry = info.get(pagename, {})
+
+        loglist = [] # editlog timestamps of page revisions
+        for ts,data in entry.items():
+            if data[1][2] in ['SAVE','SAVENEW','SAVE/REVERT',]:
+                loglist.append(ts)
+        loglist.sort()
+        lleftover = loglist[:]
+        
+        # remember the latest log entry
+        if lleftover:
+            llatest = lleftover[-1]
+        else:
+            llatest = None
+            
+        backupdir_from = opj(dir_from, pagename, 'backup')
+        if os.path.exists(backupdir_from):
+            backuplist = listdir(backupdir_from)
+            bleftover = backuplist[:]
+            for bfile in backuplist:
+                backup_from = opj(backupdir_from, bfile)
+                ts = long(bfile)
+                if ts in loglist: # we have an editlog entry, exact match
+                    entry[ts][0] = backup_from
+                    lleftover.remove(ts)
+                    bleftover.remove(bfile)
+            
+        text_from = opj(dir_from, pagename, 'text')
+        found_text = False
+        if os.path.exists(text_from): # we have a text file, it should match latest log entry
+            exists[pagename] = True
+            mtime = os.path.getmtime(text_from)
+            if llatest and llatest in lleftover:
+                ts = llatest
+                if abs(wikiutil.timestamp2version(mtime) - ts) < 2000000: # less than a second diff
+                    entry[ts][0] = text_from
+                    lleftover.remove(ts)
+                    found_text = True
+            else: # we have no log entries left 8(
+                ts = wikiutil.timestamp2version(mtime)
+                data = [ts,'','SAVE', pagename,'','','','','missing editlog entry for this page version']
+                entry[ts] = [text_from, data]
+        else:
+            # this page was maybe deleted, so we remember for later:
+            exists[pagename] = False
+            if llatest in lleftover: # if a page is deleted, the last log entry has no file
+                entry[llatest][0] = None
+                lleftover.remove(llatest)
+                        
+        if os.path.exists(backupdir_from):
+            backuplist = listdir(backupdir_from)
+            for bfile in backuplist:
+                if not bfile in bleftover: continue
+                backup_from = opj(backupdir_from, bfile)
+                bts = long(bfile) # must be long for py 2.2.x
+                for ts in lleftover:
+                    tdiff = abs(bts-ts)
+                    if tdiff < 2000000: # editlog, inexact match
+                        entry[ts][0] = backup_from
+                        lleftover.remove(ts)
+                        bleftover.remove(bfile)
+                    elif 3599000000 <= tdiff <= 3601000000: # editlog, win32 daylight saving bug
+                        entry[ts][0] = backup_from
+                        lleftover.remove(ts)
+                        bleftover.remove(bfile)
+                        print "Warning: Win32 daylight saving bug encountered & fixed!"
+                        
+            if len(bleftover) == 1 and len(lleftover) == 1: # only 1 left, must be this
+                backup_from = opj(backupdir_from, bleftover[0])
+                entry[lleftover[0]][0] = backup_from
+                lleftover = []
+                bleftover = []
+            
+            # fake some log entries
+            for bfile in bleftover:
+                backup_from = opj(backupdir_from, bfile)
+                bts = long(bfile) # must be long py 2.2.x
+                data = [ts,'','SAVE',pagename,'','','','','missing editlog entry for this page version']
+                entry[bts] = [backup_from, data]
+                
+        # check if we still haven't matched the "text" file
+        if not found_text and os.path.exists(text_from):
+            if llatest in lleftover: # latest log entry still free
+                entry[llatest][0] = text_from # take it. do not care about mtime of file.
+                lleftover.remove(llatest)
+            else: # log for "text" file is missing or latest was taken by other rev 8(
+                mtime = os.path.getmtime(text_from)
+                ts = wikiutil.timestamp2version(mtime) # take mtime, we have nothing better
+                data = [ts,'','SAVE', pagename,'','','','','missing editlog entry for this page version']
+                entry[ts] = [text_from, data]
+                
+        # delete unmatching log entries
+        for ts in lleftover:
+            #print "XXX Deleting leftover log entry: %r" % entry[ts]
+            del entry[ts]
+        
+        info[pagename] = entry
+
+def remove_trash(dir_from):
+    for pagename in info:
+        # omit dead pages and MoinEditorBackup
+        if pagename in pagelist and (
+           os.path.exists(opj(dir_from, pagename, 'text')) or
+           os.path.exists(opj(dir_from, pagename, 'backup'))
+           ) and not pagename.endswith('MoinEditorBackup'):
+            info2[pagename] = info[pagename]
+
+def generate_pages(dir_from, dir_to):
+    for pagename in info2:
+        entry = info2.get(pagename, {})
+        tslist = entry.keys()
+        if tslist:
+            pagedir = opj(dir_to, 'pages', pagename)
+            os.makedirs(opj(pagedir, 'revisions'))
+            editlog_file = opj(pagedir, 'edit-log')
+            f = open(editlog_file, 'w')
+            rev = 0
+            tslist.sort()
+            for ts in tslist:
+                rev += 1
+                revstr = '%08d' % rev
+                file_from, data = entry[ts]
+                data[0] = str(ts)
+                data[1] = revstr
+                if data[2].endswith('/REVERT'):
+                    # replace the timestamp with the revision number
+                    revertts = long(data[7]) # must be long for py 2.2.x
+                    try:
+                        revertrev = int(entry[revertts][1][1])
+                    except KeyError:
+                        # never should trigger...
+                        print "********* KeyError %s entry[%d][1][1] **********" % (pagename, revertts)
+                        revertrev = 0
+                    data[7] = '%08d' % revertrev
+                f.write('\t'.join(data)+'\n')
+                if file_from is not None:
+                    file_to = opj(pagedir, 'revisions', revstr)
+                    copy_file(file_from, file_to)
+            f.close()
+                
+            curr_file = opj(pagedir, 'current')
+            f = open(curr_file, 'w')
+            f.write(revstr)
+            f.close()
+
+        att_from = opj(dir_from, 'pages', pagename, 'attachments')
+        if os.path.exists(att_from):
+            att_to = opj(pagedir, 'attachments')
+            copy_dir(att_from, att_to)
+        
+
+def generate_editlog(dir_from, dir_to):
+    editlog = {}
+    for pagename in info2:
+        entry = info2.get(pagename, {})
+        for ts in entry:
+            file_from, data = entry[ts]
+            editlog[ts] = data
+    
+    tslist = editlog.keys()
+    tslist.sort()
+    
+    editlog_file = opj(dir_to, 'edit-log')
+    f = open(editlog_file, 'w')
+    for ts in tslist:
+        data = editlog[ts]
+        f.write('\t'.join(data)+'\n')
+    f.close()
+
+        
+origdir = 'data.pre-mig5'
+
+# Backup original dir and create new empty dir
+try:
+    os.rename('data', origdir)
+    os.mkdir('data')
+except OSError:
+    print "You need to be in the directory where your copy of the 'data' directory is located."
+    sys.exit(1)
+
+gather_editlog(origdir, opj(origdir, 'edit-log'))
+gather_pagedirs(opj(origdir, 'pages'))
+
+remove_trash(opj(origdir, 'pages'))
+
+generate_pages(origdir, 'data')
+generate_editlog(origdir, 'data')
+
+
+copy_dir(opj(origdir, 'plugin'), opj('data', 'plugin'))
+
+copy_dir(opj(origdir, 'user'), opj('data', 'user'))
+
+copy_file(opj(origdir, 'event-log'), opj('data', 'event-log'))
+
+copy_file(opj(origdir, 'intermap.txt'), opj('data', 'intermap.txt'))
+
+
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/MoinMoin/script/old/migration/12_to_13_mig06.py	Sun Mar 26 09:57:07 2006 +0000
@@ -0,0 +1,101 @@
+#!/usr/bin/env python
+"""
+    12_to_13.py - migration from < moin--main--patch-248 to >= patch 249
+    * convert event-log from iso8859-1 to config.charset (utf-8) encoding
+
+    Steps for a successful migration to utf-8:
+        1. stop your wiki and make a backup
+        2. make a copy of the wiki's "data" directory to your working dir
+        3. clean up your working copy of the data dir:
+            a. if you use CVS or GNU arch remove stuff like CVS/, .cvsignore
+               or .arch-ids/ etc.
+            b. remove *.pickle (used by moin for caching some information,
+               will be re-created automatically), especially:
+                   I. data/user/userdict.pickle
+                   II. data/dicts.pickle
+            c. if you used symlinks in data/text or elsewhere, remove them
+        4. make sure that from_encoding and to_encoding matches your needs (see
+           beginning of script below and config.charset in moin_config.py) and
+           run python2.3 12_to_13_mig6.py from your working dir
+        5. if there was no error, you will find:
+            data.pre-mig6 (the script renames your data directory copy to that name)
+            data (result, converted to utf-8)
+        6. verify conversion results (number of pages, size of logs, attachments,
+           number of backup copies) - everything should be reasonable before
+           you proceed. Usually the file size gets larger when converting from
+           iso8859-1 (or other non-unicode charset) to utf-8 except if your
+           content is ASCII-only, then it will keep its size.
+        7. copy additional files from data.pre-mig6 to data (maybe intermaps, logs,
+           etc.). Be aware that the file contents AND file names of wiki content
+           may have changed, so DO NOT copy the cache/ directory, but let
+           the wiki recreate it.
+        8. replace the data directory your wiki uses with the data directory
+           you created by previous steps. DO NOT simply copy the converted stuff
+           into the original or you will duplicate pages and create chaos!
+        9. test it. if something has gone wrong, you still have your backup.
+
+
+        10. if you use dictionaries for spellchecking, you have to convert them
+            to config.charset, too. Remove your dict.cache before re-starting
+            your wiki.
+
+    @copyright: 2004 Thomas Waldmann
+    @license: GPL, see COPYING for details
+"""
+
+from_encoding = 'iso8859-1'
+to_encoding = 'utf-8'
+
+import os.path, sys, shutil, urllib
+
+sys.path.insert(0, '../../../..')
+from MoinMoin import wikiutil
+
+from MoinMoin.script.migration.migutil import opj, listdir, copy_file, copy_dir
+
+def convert_string(str, enc_from, enc_to):
+    return str.decode(enc_from).encode(enc_to)
+
+def convert_eventlog(fname_from, fname_to, enc_from, enc_to):
+    print "%s -> %s" % (fname_from, fname_to)
+    file_from = open(fname_from)
+    file_to = open(fname_to, "w")
+        
+    for line in file_from:
+        line = line.replace('\r','')
+        line = line.replace('\n','')
+        fields = line.split('\t')
+        kvpairs = fields[2]
+        kvpairs = kvpairs.split('&')
+        kvlist = []
+        for kvpair in kvpairs:
+            key, val = kvpair.split('=')
+            key = urllib.unquote(key)
+            val = urllib.unquote(val)
+            key = convert_string(key, enc_from, enc_to)
+            val = convert_string(val, enc_from, enc_to)
+            key = urllib.quote(key)
+            val = urllib.quote(val)
+            kvlist.append("%s=%s" % (key,val))
+        fields[2] = '&'.join(kvlist)
+        line = '\t'.join(fields) + '\n'
+        file_to.write(line)
+
+    file_to.close()
+    file_from.close()
+    st=os.stat(fname_from)
+    os.utime(fname_to, (st.st_atime,st.st_mtime))
+
+origdir = 'data.pre-mig6'
+
+try:
+    os.rename('data', origdir)
+except OSError:
+    print "You need to be in the directory where your copy of the 'data' directory is located."
+    sys.exit(1)
+
+copy_dir(origdir, 'data')
+os.remove(opj('data','event-log')) # old format
+convert_eventlog(opj(origdir, 'event-log'), opj('data', 'event-log'), from_encoding, to_encoding)
+
+
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/MoinMoin/script/old/migration/12_to_13_mig07.py	Sun Mar 26 09:57:07 2006 +0000
@@ -0,0 +1,85 @@
+#!/usr/bin/env python
+"""
+    12_to_13.py - converting CRLF / LF style to the future standard
+    Use this to convert from 1.3 pre patch-275 to patch-275.
+    
+    Changes:
+    * use OS style for logs (== no change, same as it was)
+    * use CRLF for page files on any platform (text/* mandates it!) -
+      and we will use that MIME type soon.
+    * use LF only internally in moin, convert from/to CRLF early/late
+      where needed
+
+    @copyright: 2004 Thomas Waldmann
+    @license: GPL, see COPYING for details
+"""
+
+import os.path, sys, urllib
+
+# Insert THIS moin dir first into sys path, or you would run another
+# version of moin!
+sys.path.insert(0, '../../../..')
+from MoinMoin import wikiutil
+
+from MoinMoin.script.migration.migutil import opj, listdir, copy_file, move_file, copy_dir
+
+def tocrlf(fni, fno):
+    """ rewrite a text file using CRLF for line endings, no matter what
+        it was before.
+    """
+    fi = open(fni, "rb")
+    data = fi.read()
+    fi.close()
+    data = data.replace("\r","")
+    lines = data.split("\n")
+    data = "\r\n".join(lines)
+    if data[-2:] != "\r\n":
+        data += "\r\n"
+    fo = open(fno, "wb")
+    fo.write(data)
+    fo.close()
+    st=os.stat(fni)
+    os.utime(fno, (st.st_atime,st.st_mtime))
+                
+def process_pagedirs(dir_from, dir_to):
+    pagelist = listdir(dir_from)
+    for pagename in pagelist:
+        pagedir_from = opj(dir_from, pagename)
+        pagedir_to = opj(dir_to, pagename)
+        
+        # first we copy all, even the stuff we convert later:
+        copy_dir(pagedir_from, pagedir_to)
+        
+        rev_from = opj(pagedir_from, 'revisions')
+        rev_to = opj(pagedir_to, 'revisions')
+        if os.path.exists(rev_from):
+            revlist = listdir(rev_from)
+            for rfile in revlist:
+                rev = int(rfile)
+                r_from = opj(rev_from, rfile)
+                r_to = opj(rev_to, rfile)
+                tocrlf(r_from, r_to)
+
+origdir = 'data.pre-mig7'
+
+try:
+    os.rename('data', origdir)
+except OSError:
+    print "You need to be in the directory where your copy of the 'data' directory is located."
+    sys.exit(1)
+
+os.makedirs(opj('data','pages'))
+
+process_pagedirs(opj(origdir, 'pages'), opj('data', 'pages'))
+
+copy_dir(opj(origdir, 'plugin'), opj('data', 'plugin'))
+
+copy_dir(opj(origdir, 'user'), opj('data', 'user'))
+
+copy_file(opj(origdir, 'edit-log'), opj('data', 'edit-log'))
+copy_file(opj(origdir, 'event-log'), opj('data', 'event-log'))
+
+copy_file(opj(origdir, 'intermap.txt'), opj('data', 'intermap.txt'))
+
+
+
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/MoinMoin/script/old/migration/12_to_13_mig08.py	Sun Mar 26 09:57:07 2006 +0000
@@ -0,0 +1,218 @@
+#!/usr/bin/env python
+"""
+    migration from moin 1.3 < patch-305 to moin 1.3 >= patch-305
+    Here we fix 2 errors that crept in by use of mig1(?) and mig5:
+    * the edit-log misses 1 field (missing TAB) on faked "missing editlog
+      entry" entries
+    * we accidently gave ATTNEW/DRW/DEL an incremented revno (although
+      attaching a file doesn't change page content and revision), so we need
+      to convert those entries to use revno == 99999999 and renumber the
+      normal entries so we have no missing numbers in between
+    * edit-log's action field sometimes was empty (default: SAVE)
+    
+    Steps for a successful migration:
+
+        1. Stop your wiki and make a backup of old data and code
+
+        2. Make a copy of the wiki's "data" directory to your working dir
+
+        3. Run this script from your working dir
+
+        4. If there was no error, you will find:
+            data.pre-mig8 - the script renames your data directory copy to that name
+            data - converted data dir
+
+        5. Verify conversion results (number of pages, size of logs, attachments,
+           number of backup copies) - everything should be reasonable before
+           you proceed.
+
+        6. Copy additional files from data.pre-mig8 to data (maybe intermaps, logs,
+           etc.). Be aware that the file contents AND file names of wiki content
+           may have changed, so DO NOT copy the files inside the cache/ directory,
+           let the wiki refill it.
+
+        7. Replace the data directory your wiki uses with the data directory
+           you created by previous steps. DO NOT simply copy the converted stuff
+           into the original or you will duplicate pages and create chaos!
+
+        8. Test it - if something has gone wrong, you still have your backup.
+
+
+    @copyright: 2004 Thomas Waldmann
+    @license: GPL, see COPYING for details
+"""
+
+
+import os.path, sys, urllib
+
+# Insert THIS moin dir first into sys path, or you would run another
+# version of moin!
+sys.path.insert(0, '../../../..')
+from MoinMoin import wikiutil
+
+from MoinMoin.script.migration.migutil import opj, listdir, copy_file, move_file, copy_dir
+
+# info[pagename][timestamp_usecs] = [revno_new, [...]]
+# if revno_new is 99999999, we haven't assigned a new revno to this entry
+info = {}
+
+def gather_editlog(el_from, forcepagename=None):
+    """ this gathers everything that is in edit-log into internal
+        data structures, converting to the future format
+    """
+    if not os.path.exists(el_from): 
+        return
+    for l in open(el_from):
+        data = l.rstrip('\n').rstrip('\r').split('\t')
+        while len(data) < 9:
+            data.append('')
+        (timestampstr,revstr,action,pagename,ip,host,id,extra,comment) = data
+        
+        if forcepagename: # we use this for edit-log in pagedirs (for renamed pages!)
+            pagename = forcepagename
+
+        if not action: # FIX: sometimes action is empty ...
+            action = 'SAVE'
+
+        if action in ['ATTNEW','ATTDRW','ATTDEL',]:
+            revstr = '99999999' # FIXES revno
+            # use reserved value, ATT action doesn't create new rev of anything
+
+        if (comment == '' and extra == '' and id == 'missing editlog entry for this page version') or \
+           (extra == '' and id == '' and comment == 'missing editlog entry for this page version'):
+            # FIX omitted field bug on fake entries
+            comment = 'missing edit-log entry for this revision' # more precise
+            extra = ''
+            id = ''
+            
+        rev = int(revstr)
+        data = [timestampstr,rev,action,pagename,ip,host,id,extra,comment]
+        
+        entry = info.get(pagename, {})
+        timestamp = long(timestampstr) # must be long for py 2.2.x
+        entry[timestamp] = [99999999, data] # new revno, data
+        info[pagename] = entry
+        
+def gather_pagedirs(dir_from):
+    """ this gathers edit-log information from the pagedirs, just to make sure
+    """
+    pagedir = opj(dir_from, 'pages')
+    pagelist = listdir(pagedir)
+    for pagename in pagelist:
+        editlog_from = opj(pagedir, pagename, 'edit-log')
+        gather_editlog(editlog_from, pagename)
+
+
+def generate_pages(dir_from, dir_to):
+    revactions = ['SAVE','SAVENEW','SAVE/REVERT',] # these actions create revisions
+    for pn in info:
+        entry = info.get(pn, {})
+        tslist = entry.keys()
+        if tslist:
+            pagedir = opj(dir_to, 'pages', pn)
+            revdir = opj(pagedir, 'revisions')
+            os.makedirs(revdir)
+            editlog_file = opj(pagedir, 'edit-log')
+            f = open(editlog_file, 'w')
+            revnew = 0
+            tslist.sort()
+            for ts in tslist:
+                data = entry[ts][1]
+                datanew = data[:]
+                (timestamp,rev,action,pagename,ip,host,id,extra,comment) = data
+                revstr = '%08d' % rev
+                if action in revactions:
+                    revnew += 1
+                    revnewstr = '%08d' % revnew
+                    entry[ts][0] = revnew # remember what new revno we chose
+                else: # ATTNEW,ATTDRW,ATTDEL
+                    revnewstr = '99999999'
+                if action.endswith('/REVERT'):
+                    # replace the old revno with the correct new revno
+                    revertrevold = int(extra)
+                    revertrevnew = 0
+                    for ts2 in tslist:
+                        data2 = entry[ts2][1]
+                        (timestamp2,rev2,action2,pagename2,ip2,host2,id2,extra2,comment2) = data2
+                        if rev2 == revertrevold:
+                            revertrevnew = entry[ts2][0]
+                    datanew[7] = '%08d' % revertrevnew
+                    
+                datanew[1] = revnewstr
+                f.write('\t'.join(datanew)+'\n') # does make a CRLF on win32 in the file
+                
+                if action in revactions: # we DO have a page rev for this one
+                    file_from = opj(dir_from, 'pages', pn, 'revisions', revstr)
+                    file_to = opj(revdir, revnewstr)
+                    copy_file(file_from, file_to)
+            f.close()
+            
+            # check if page exists or is deleted in orig dir
+            pagedir_from = opj(dir_from, 'pages', pn)
+            revdir_from = opj(pagedir_from, 'revisions')
+            try:
+                curr_file_from = opj(pagedir_from, 'current')
+                currentfrom = open(curr_file_from).read().strip() # try to access it
+                page_exists = 1
+            except:
+                page_exists = 0
+                
+            # re-make correct DELETED status!
+            if page_exists:
+                curr_file = opj(pagedir, 'current')
+                f = open(curr_file, 'w')
+                f.write("%08d\n" % revnew) # we add a \n, so it is easier to hack in there manually
+                f.close()
+
+        att_from = opj(dir_from, 'pages', pn, 'attachments')
+        if os.path.exists(att_from):
+            att_to = opj(pagedir, 'attachments')
+            copy_dir(att_from, att_to)
+        
+
+def generate_editlog(dir_from, dir_to):
+    editlog = {}
+    for pagename in info:
+        entry = info.get(pagename, {})
+        for ts in entry:
+            file_from, data = entry[ts]
+            editlog[ts] = data
+    
+    tslist = editlog.keys()
+    tslist.sort()
+    
+    editlog_file = opj(dir_to, 'edit-log')
+    f = open(editlog_file, 'w')
+    for ts in tslist:
+        datatmp = editlog[ts][:]
+        rev = datatmp[1]
+        datatmp[1] = '%08d' % rev
+        f.write('\t'.join(datatmp)+'\n')
+    f.close()
+
+        
+origdir = 'data.pre-mig8'
+
+# Backup original dir and create new empty dir
+try:
+    os.rename('data', origdir)
+    os.mkdir('data')
+except OSError:
+    print "You need to be in the directory where your copy of the 'data' directory is located."
+    sys.exit(1)
+
+#gather_editlog(opj(origdir, 'edit-log'))
+gather_pagedirs(origdir)
+
+generate_editlog(origdir, 'data')
+generate_pages(origdir, 'data')
+
+copy_dir(opj(origdir, 'plugin'), opj('data', 'plugin'))
+
+copy_dir(opj(origdir, 'user'), opj('data', 'user'))
+
+copy_file(opj(origdir, 'event-log'), opj('data', 'event-log'))
+
+copy_file(opj(origdir, 'intermap.txt'), opj('data', 'intermap.txt'))
+
+
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/MoinMoin/script/old/migration/12_to_13_mig09.py	Sun Mar 26 09:57:07 2006 +0000
@@ -0,0 +1,159 @@
+#!/usr/bin/env python
+"""
+Migration from moin--main--1.3 pre patch-332 to post patch-332.
+
+In patch-332 we changed the format of page lists in user data file. They
+are now tab separated instead of comma separated, and page names are not
+quoted using file system quoting.
+
+You can run the script multiple times with no damage.
+
+
+Steps for a successful migration:
+
+ 1. Stop your wiki
+
+ 2. Make a backup of your wiki 'data' directory
+
+    WARNING: THIS SCRIPT MIGHT CORRUPT YOUR 'DATA' DIRECTORY. DON'T
+    COMPLAIN LATER, MAKE BACKUP NOW!
+
+ 3. Move the wiki's 'data' directory to your working dir
+
+ 4. Run this script from your working dir
+
+ 5. If there was no error, you will find:
+    data.pre-mig9   - backup of original data directory
+    data            - converted data dir
+
+ 6. Verify conversion results (number of pages, size of logs,
+    attachments, number of backup copies) - everything should be
+    reasonable before you proceed.
+
+    NOTE: THE CACHE DIRECTORY IS NOT COPIED - DO NOT COPY IT, IT WILL BE
+    CREATED AND FILLED BY THE WIKI AUTOMATICALLY.
+
+ 7. Move the converted data directory into your wiki. Do not simply copy
+    the converted stuff into the original or you will duplicate pages
+    and create chaos!
+
+ 8. Fix permissions on your data directory, see HelpOnInstalling.
+
+ 9. Test it - if something has gone wrong, you still have your backup.
+
+
+@copyright: 2004 Thomas Waldmann
+@license: GPL, see COPYING for details
+"""
+
+import os, sys, codecs
+join = os.path.join
+
+# Insert THIS moin dir first into sys path, or you might run another
+# version of moin and get unpredicted results!
+sys.path.insert(0, '../../../..')
+
+from MoinMoin import wikiutil, user
+from MoinMoin.script.migration import migutil
+
+
+def convert_quicklinks(string):
+    """ Convert quicklinks from pre patch-332 to new format """
+    # No need to convert new style list
+    if '\t' in string:
+        return string
+        
+    names = [name.strip() for name in string.split(',')]
+    names = [wikiutil.unquoteWikiname(name) for name in names if name != '']
+    string = user.encodeList(names)
+    return string
+
+
+def convert_subscribed_pages(string):
+    """ Convert subscribed pages from pre patch-332 to new format """
+    # No need to convert new style list
+    if '\t' in string:
+        return string
+
+    # This might break pages that contain ',' in the name, we can't do
+    # anything about it. This was the reason we changed the format.
+    names = [name.strip() for name in string.split(',')]
+    string = user.encodeList(names)
+    return string
+
+    
+def convertUserData(text):
+    """ Convert user data
+
+    @param text: text of user file, unicode
+    @rtype: unicode
+    @return: convected user data
+    """
+    lines = text.splitlines()
+    for i in range(len(lines)):
+        line = lines[i]
+        try:
+            key, value = line.split('=', 1)
+        except ValueError:
+            continue
+        if key == u'quicklinks':
+            value = convert_quicklinks(value)
+        elif key == u'subscribed_pages':
+            value = convert_subscribed_pages(value)
+        lines[i] = u'%s=%s' % (key, value)
+
+    # Join back, append newline to last line
+    text = u'\n'.join(lines) + u'\n'
+    return text
+        
+
+def convertUsers(srcdir, dstdir):
+    """ Convert users files
+
+    @param srcdir: old users dir
+    @param dstdir: new users dir
+    """
+    charset = 'utf-8'
+    
+    # Create dstdir
+    if not os.path.exists(dstdir):
+        try:
+            os.mkdir(dstdir)
+        except OSError:
+            migutil.fatalError("can't create user directory at '%s'" % dstdir)
+
+    if not os.path.isdir(srcdir):
+        migutil.fatalError("can't find user directory at '%s'" % srcdir)
+
+    for name in migutil.listdir(srcdir):
+        if name == 'README' or name.endswith('.trail'):
+            # Copy as is
+            migutil.copy_file(join(srcdir, name), join(dstdir, name))
+        else:
+            srcfile = join(srcdir, name)
+            f = codecs.open(srcfile, 'rb', charset)
+            text = f.read()
+            f.close()
+            text = convertUserData(text)
+            dstfile = join(dstdir, name)
+            f = codecs.open(dstfile, 'wb', charset)
+            f.write(text)
+            f.close()
+            print "Converted '%s' to '%s'" % (srcfile, dstfile)
+
+
+if __name__ == '__main__':
+          
+    # Backup original dir
+    datadir = 'data'
+    origdir = 'data.pre-mig9'
+    migutil.backup(datadir, origdir)
+
+    # Copy ALL stuff from original dir into new data dir. Don't change
+    # or drop anything from the original directory expect cache files.
+    names = ['edit-log', 'event-log', 'intermap.txt', 'pages', 'plugin']
+    migutil.copy(names, origdir, datadir)
+
+    # Convert user directory
+    convertUsers(join(origdir, 'user'), join(datadir, 'user'))
+
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/MoinMoin/script/old/migration/12_to_13_mig10.py	Sun Mar 26 09:57:07 2006 +0000
@@ -0,0 +1,94 @@
+#!/usr/bin/env python
+"""
+    migration from moin 1.3 < patch-xxx to moin 1.3 >= patch-xxx
+    We fix 2 issues here:
+    * we forgot to handle edit-lock files. We simply delete them now.
+    * we convert attachment names to utf-8
+    
+    Steps for a successful migration:
+
+        1. Stop your wiki and make a backup of old data and code
+
+        2. Make a copy of the wiki's "data" directory to your working dir
+
+        3. make sure that from_encoding and to_encoding matches your needs (see
+           beginning of script below and config.charset in moin_config.py) and
+           run python2.3 12_to_13_mig10.py from your working dir
+        
+        4. If there was no error, you will find:
+            data.pre-mig10 - the script renames your data directory copy to that name
+            data - converted data dir
+
+        5. Verify conversion results (number of pages, size of logs, attachments,
+           number of backup copies) - everything should be reasonable before
+           you proceed.
+
+        6. Copy additional files from data.pre-mig10 to data (maybe intermaps, logs,
+           etc.). Be aware that the file contents AND file names of wiki content
+           may have changed, so DO NOT copy the files inside the cache/ directory,
+           let the wiki refill it.
+
+        7. Replace the data directory your wiki uses with the data directory
+           you created by previous steps. DO NOT simply copy the converted stuff
+           into the original or you will duplicate pages and create chaos!
+
+        8. Test it - if something has gone wrong, you still have your backup.
+
+
+    @copyright: 2005 Thomas Waldmann
+    @license: GPL, see COPYING for details
+"""
+
+from_encoding = 'iso8859-1'
+#from_encoding = 'utf-8'
+
+to_encoding = 'utf-8'
+
+import os, os.path, sys, urllib
+
+# Insert THIS moin dir first into sys path, or you would run another
+# version of moin!
+sys.path.insert(0, '../../../..')
+from MoinMoin import wikiutil
+
+from MoinMoin.script.migration.migutil import opj, listdir, copy_file, move_file, copy_dir
+
+def migrate(dir_to):
+    """ this removes edit-lock files from the pagedirs and
+        converts attachment filenames
+    """
+    pagesdir = opj(dir_to, 'pages')
+    pagelist = listdir(pagesdir)
+    for pagename in pagelist:
+        pagedir = opj(pagesdir, pagename)
+        editlock = opj(pagedir, 'edit-lock')
+        try:
+            os.remove(editlock)
+        except:
+            pass
+
+        attachdir = os.path.join(pagedir, 'attachments')
+        for root, dirs, files in os.walk(attachdir):
+            for f in  files:
+                try:
+                    f.decode(to_encoding)
+                except UnicodeDecodeError:
+                    fnew = f.decode(from_encoding).encode(to_encoding)
+                    os.rename(os.path.join(root,f), os.path.join(root, fnew))
+                    print 'renamed', f, '\n ->', fnew, ' in dir:', root
+
+        
+origdir = 'data.pre-mig10'
+destdir = 'data'
+
+# Backup original dir and create new empty dir
+try:
+    os.rename(destdir, origdir)
+except OSError:
+    print "You need to be in the directory where your copy of the 'data' directory is located."
+    sys.exit(1)
+
+copy_dir(origdir, destdir)
+migrate(destdir)
+
+
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/MoinMoin/script/old/migration/12_to_13_mig11.py	Sun Mar 26 09:57:07 2006 +0000
@@ -0,0 +1,82 @@
+#!/usr/bin/env python
+"""
+    migration from moin 1.2 to moin 1.3
+    For 1.3, the plugin module loader needs some __init__.py files.
+    Although we supply those files in the new "empty wiki template" in
+    wiki/data, many people forgot to update their plugin directories,
+    so we do that via this mig script now.
+    
+    Steps for a successful migration:
+
+        1. Stop your wiki and make a backup of old data and code
+
+        2. Make a copy of the wiki's "data" directory to your working dir
+
+        3. If there was no error, you will find:
+            data.pre-mig11 - the script renames your data directory copy to that name
+            data - converted data dir
+
+        4. Copy additional files from data.pre-mig11 to data (maybe intermaps, logs,
+           etc.). Be aware that the file contents AND file names of wiki content
+           may have changed, so DO NOT copy the files inside the cache/ directory,
+           let the wiki refill it.
+
+        5. Replace the data directory your wiki uses with the data directory
+           you created by previous steps. DO NOT simply copy the converted stuff
+           into the original or you will duplicate pages and create chaos!
+
+        6. Test it - if something has gone wrong, you still have your backup.
+
+
+    @copyright: 2005 Thomas Waldmann
+    @license: GPL, see COPYING for details
+"""
+
+import os.path, sys, urllib
+
+# Insert THIS moin dir first into sys path, or you would run another
+# version of moin!
+sys.path.insert(0, '../../../..')
+from MoinMoin import wikiutil
+
+from MoinMoin.script.migration.migutil import opj, listdir, copy_file, move_file, copy_dir, makedir
+
+def migrate(destdir):
+    plugindir = opj(destdir, 'plugin')
+    makedir(plugindir)
+    fname = opj(plugindir, '__init__.py')
+    f = open(fname, 'w')
+    f.write('''\
+# *** Do not remove this! ***
+# Although being empty, the presence of this file is important for plugins
+# working correctly.
+''')
+    f.close()
+    for d in ['action', 'formatter', 'macro', 'parser', 'processor', 'theme', 'xmlrpc', ]:
+        thisdir = opj(plugindir, d)
+        makedir(thisdir)
+        fname = opj(thisdir, '__init__.py')
+        f = open(fname, 'w')
+        f.write('''\
+# -*- coding: iso-8859-1 -*-
+
+from MoinMoin.util import pysupport
+
+modules = pysupport.getPackageModules(__file__)
+''')
+        f.close()
+
+origdir = 'data.pre-mig11'
+destdir = 'data'
+
+# Backup original dir and create new empty dir
+try:
+    os.rename(destdir, origdir)
+except OSError:
+    print "You need to be in the directory where your copy of the 'data' directory is located."
+    sys.exit(1)
+
+copy_dir(origdir, destdir)
+migrate(destdir)
+
+
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/MoinMoin/script/old/migration/152_to_1050300.py	Sun Mar 26 09:57:07 2006 +0000
@@ -0,0 +1,65 @@
+#!/usr/bin/env python
+"""
+    Migration from moin 1.5.2 to moin 1.5.3
+    
+    We just make sure that there is a "meta" file in your data directory that
+    stores the "revision" level of it (so future mig scripts can tell if they
+    need to run or not [e.g. because you already have run them]).
+    
+    This is the last "old style" migration script.
+
+    Steps for a successful migration:
+
+        1. You do NOT need to stop your wiki for this mig script.
+
+        2. Change directory to: .../MoinMoin/script/old/migration
+        
+        3. Run this script as a user who may write to the data_dir of your wiki
+           and supply the pathes to the data_dir you want to migrate. If you
+           have multiple wikis, you may specify multiple pathes on the command
+           line:
+
+           sudo -u www-data ./152_to_1050300.py /my/path/to/data
+                                 
+        4. That's it.
+           Future mig scripts now can auto-detect the data_dir revision level.
+
+    @copyright: 2006 Thomas Waldmann
+    @license: GPL, see COPYING for details
+"""
+import sys, os
+
+# Insert THIS moin dir first into sys path, or you would run another
+# version of moin!
+sys.path.insert(0, '../../../..')
+
+def migrate(dirlist):
+    errors = warnings = success = 0
+    for dir in dirlist:
+        if not (os.path.exists(os.path.join(dir, 'pages')) and
+                os.path.exists(os.path.join(dir, 'user'))):
+            print "Error: Skipping %s - does not look like a data_dir" % dir
+            errors += 1
+        else:
+            fname = os.path.join(dir, 'meta')
+            if os.path.exists(fname):
+                print "Warning: There already is a meta file there, skipping %s" % dir
+                warnings += 1
+            else:
+                try:
+                    f = open(fname, 'w')
+                    f.write("data_format_revision: 01050300\n") # 01050300 = 1.5.3(.0)
+                    f.close()
+                    success += 1
+                except:
+                    errors += 1
+                    print "Error: Exception when migrating %s" % dir
+    print "%d data_dirs successfully migrated, %d warnings, %d errors." % (success, warnings, errors)
+
+if __name__ == '__main__':
+    if len(sys.argv) < 2:
+        data_dirs = ['data',]
+    else:
+        data_dirs = sys.argv[1:]
+    migrate(data_dirs)
+
--- a/setup.py	Sun Mar 26 09:55:35 2006 +0000
+++ b/setup.py	Sun Mar 26 09:57:07 2006 +0000
@@ -211,6 +211,7 @@
         'MoinMoin.script.import',
         'MoinMoin.script.old',
         'MoinMoin.script.old.accounts',
+        'MoinMoin.script.old.migration',
         'MoinMoin.script.old.reducewiki',
         'MoinMoin.script.old.unicode',
         'MoinMoin.script.old.xmlrpc-tools',