changeset 921:45e286183872

abstraction work on search engine index & cleanups
author Franz Pletz <fpletz AT franz-pletz DOT org>
date Wed, 28 Jun 2006 23:41:46 +0200
parents a2498260eca5
children 63e0bd0eea98
files MoinMoin/action/fullsearch.py MoinMoin/script/index/build.py MoinMoin/search/Xapian.py MoinMoin/search/builtin.py docs/CHANGES.fpletz
diffstat 5 files changed, 364 insertions(+), 339 deletions(-) [+]
line wrap: on
line diff
--- a/MoinMoin/action/fullsearch.py	Tue Jun 27 15:20:08 2006 +0200
+++ b/MoinMoin/action/fullsearch.py	Wed Jun 28 23:41:46 2006 +0200
@@ -56,9 +56,10 @@
         return
 
     # search the pages
-    from MoinMoin import search
-    results = search.searchPages(request, needle, case=case,
-            regex=regex, titlesearch=titlesearch)
+    from MoinMoin.search import searchPages, QueryParser
+    query = QueryParser(case=case, regex=regex,
+            titlesearch=titlesearch).parse_query(needle)
+    results = searchPages(request, query)
 
     # directly show a single hit
     # XXX won't work with attachment search
--- a/MoinMoin/script/index/build.py	Tue Jun 27 15:20:08 2006 +0200
+++ b/MoinMoin/script/index/build.py	Wed Jun 28 23:41:46 2006 +0200
@@ -38,7 +38,7 @@
     """ Xapian index build script class """
 
     def command(self):
-        from MoinMoin.Xapian import Index
+        from MoinMoin.search.Xapian import Index
         Index(self.request).indexPages(self.files, self.options.mode)
         #Index(self.request).test(self.request)
 
--- a/MoinMoin/search/Xapian.py	Tue Jun 27 15:20:08 2006 +0200
+++ b/MoinMoin/search/Xapian.py	Wed Jun 28 23:41:46 2006 +0200
@@ -8,7 +8,7 @@
 """
 debug = True
 
-import sys, os, re, codecs, errno, time
+import sys, os, re, codecs, time
 from pprint import pprint
 
 import xapian
@@ -19,7 +19,7 @@
 
 from MoinMoin.Page import Page
 from MoinMoin import config, wikiutil
-from MoinMoin.util import filesys, lock
+from MoinMoin.search.builtin import BaseIndex
 
 try:
     # PyStemmer, snowball python bindings from http://snowball.tartarus.org/
@@ -130,122 +130,7 @@
 ### Indexing
 #############################################################################
 
-class UpdateQueue:
-    def __init__(self, file, lock_dir):
-        self.file = file
-        self.writeLock = lock.WriteLock(lock_dir, timeout=10.0)
-        self.readLock = lock.ReadLock(lock_dir, timeout=10.0)
-
-    def exists(self):
-        return os.path.exists(self.file)
-
-    def append(self, pagename):
-        """ Append a page to queue """
-        if not self.writeLock.acquire(60.0):
-            request.log("can't add %r to xapian update queue: can't lock queue" %
-                        pagename)
-            return
-        try:
-            f = codecs.open(self.file, 'a', config.charset)
-            try:
-                f.write(pagename + "\n")
-            finally:
-                f.close()
-        finally:
-            self.writeLock.release()
-
-    def pages(self):
-        """ Return list of pages in the queue """
-        if self.readLock.acquire(1.0):
-            try:
-                return self._decode(self._read())
-            finally:
-                self.readLock.release()
-        return []
-
-    def remove(self, pages):
-        """ Remove pages from the queue
-        
-        When the queue is empty, the queue file is removed, so exists()
-        can tell if there is something waiting in the queue.
-        """
-        if self.writeLock.acquire(30.0):
-            try:
-                queue = self._decode(self._read())
-                for page in pages:
-                    try:
-                        queue.remove(page)
-                    except ValueError:
-                        pass
-                if queue:
-                    self._write(queue)
-                else:
-                    self._removeFile()
-                return True
-            finally:
-                self.writeLock.release()
-        return False
-
-    # Private -------------------------------------------------------
-
-    def _decode(self, data):
-        """ Decode queue data """
-        pages = data.splitlines()
-        return self._filterDuplicates(pages)
-
-    def _filterDuplicates(self, pages):
-        """ Filter duplicates in page list, keeping the order """
-        unique = []
-        seen = {}
-        for name in pages:
-            if not name in seen:
-                unique.append(name)
-                seen[name] = 1
-        return unique
-
-    def _read(self):
-        """ Read and return queue data
-        
-        This does not do anything with the data so we can release the
-        lock as soon as possible, enabling others to update the queue.
-        """
-        try:
-            f = codecs.open(self.file, 'r', config.charset)
-            try:
-                return f.read()
-            finally:
-                f.close()
-        except (OSError, IOError), err:
-            if err.errno != errno.ENOENT:
-                raise
-            return ''
-
-    def _write(self, pages):
-        """ Write pages to queue file
-        
-        Requires queue write locking.
-        """
-        # XXX use tmpfile/move for atomic replace on real operating systems
-        data = '\n'.join(pages) + '\n'
-        f = codecs.open(self.file, 'w', config.charset)
-        try:
-            f.write(data)
-        finally:
-            f.close()
-
-    def _removeFile(self):
-        """ Remove queue file 
-        
-        Requires queue write locking.
-        """
-        try:
-            os.remove(self.file)
-        except OSError, err:
-            if err.errno != errno.ENOENT:
-                raise
-
-
-class Index:
+class Index(BaseIndex):
     indexValueMap = {
         # mapping the value names we can easily fetch from the index to
         # integers required by xapian. 0 and 1 are reserved by xapwrap!
@@ -280,27 +165,8 @@
                        #Y   year (four digits)
     }
 
-    class LockedException(Exception):
-        pass
-    
     def __init__(self, request):
-        self.request = request
-        cache_dir = request.cfg.cache_dir
-        main_dir = self._main_dir()
-        self.dir = os.path.join(main_dir, 'index')
-        filesys.makeDirs(self.dir)
-        self.sig_file = os.path.join(main_dir, 'complete')
-        lock_dir = os.path.join(main_dir, 'index-lock')
-        self.lock = lock.WriteLock(lock_dir,
-                                   timeout=3600.0, readlocktimeout=60.0)
-        self.read_lock = lock.ReadLock(lock_dir, timeout=3600.0)
-        self.queue = UpdateQueue(os.path.join(main_dir, 'update-queue'),
-                                 os.path.join(main_dir, 'update-queue-lock'))
-
-        # Disabled until we have a sane way to build the index with a
-        # queue in small steps.
-        ## if not self.exists():
-        ##    self.indexPagesInNewThread(request)
+        BaseIndex.__init__(self, request)
 
         # Check if we should and can stem words
         if request.cfg.xapian_stemming and not Stemmer:
@@ -313,13 +179,6 @@
         else:
             return os.path.join(self.request.cfg.cache_dir, 'xapian')
 
-    def exists(self):
-        """ Check if index exists """        
-        return os.path.exists(self.sig_file)
-                
-    def mtime(self):
-        return os.path.getmtime(self.dir)
-
     def _search(self, query):
         """ read lock must be acquired """
         while True:
@@ -339,142 +198,24 @@
         self.request.cfg.xapian_searchers.append((searcher, timestamp))
         return hits
     
-    def search(self, query):
-        if not self.read_lock.acquire(1.0):
-            raise self.LockedException
-        try:
-            hits = self._search(query)
-        finally:
-            self.read_lock.release()
-        return hits
-
-    def update_page(self, page):
-        self.queue.append(page.page_name)
-        self._do_queued_updates_InNewThread()
-
-    def indexPages(self, files=None, mode='update'):
-        """ Index all pages (and files, if given)
-        
-        Can be called only from a script. To index pages during a user
-        request, use indexPagesInNewThread.
-        @arg files: iterator or list of files to index additionally
-        """
-        if not self.lock.acquire(1.0):
-            self.request.log("can't index: can't acquire lock")
-            return
-        try:
-            request = self._indexingRequest(self.request)
-            self._index_pages(request, None, files, mode)
-        finally:
-            self.lock.release()
-    
-    def indexPagesInNewThread(self, files=None, mode='update'):
-        """ Index all pages in a new thread
-        
-        Should be called from a user request. From a script, use indexPages.
-        """
-        if not self.lock.acquire(1.0):
-            self.request.log("can't index: can't acquire lock")
-            return
-        try:
-            # Prevent rebuilding the index just after it was finished
-            if self.exists():
-                self.lock.release()
-                return
-            from threading import Thread
-            indexThread = Thread(target=self._index_pages,
-                args=(self._indexingRequest(self.request), self.lock, files, mode))
-            indexThread.setDaemon(True)
-            
-            # Join the index thread after current request finish, prevent
-            # Apache CGI from killing the process.
-            def joinDecorator(finish):
-                def func():
-                    finish()
-                    indexThread.join()
-                return func
-
-            self.request.finish = joinDecorator(self.request.finish)
-            indexThread.start()
-        except:
-            self.lock.release()
-            raise
-
-    def optimize(self):
-        pass
-
-    # Private ----------------------------------------------------------------
+    def _do_queued_updates(self, request, amount=5):
+        """ Assumes that the write lock is acquired """
+        writer = xapidx.Index(self.dir, True)
+        writer.configure(self.prefixMap, self.indexValueMap)
+        pages = self.queue.pages()[:amount]
+        for name in pages:
+            p = Page(request, name)
+            self._index_page(writer, p, mode='update')
+            self.queue.remove([name])
+        writer.close()
 
-    def _do_queued_updates_InNewThread(self):
-        """ do queued index updates in a new thread
-        
-        Should be called from a user request. From a script, use indexPages.
-        """
-        if not self.lock.acquire(1.0):
-            self.request.log("can't index: can't acquire lock")
-            return
-        try:
-            from threading import Thread
-            indexThread = Thread(target=self._do_queued_updates,
-                args=(self._indexingRequest(self.request), self.lock))
-            indexThread.setDaemon(True)
-            
-            # Join the index thread after current request finish, prevent
-            # Apache CGI from killing the process.
-            def joinDecorator(finish):
-                def func():
-                    finish()
-                    indexThread.join()
-                return func
-                
-            self.request.finish = joinDecorator(self.request.finish)
-            indexThread.start()
-        except:
-            self.lock.release()
-            raise
-
-    def _do_queued_updates(self, request, lock=None, amount=5):
-        """ Assumes that the write lock is acquired """
-        try:
-            writer = xapidx.Index(self.dir, True)
-            writer.configure(self.prefixMap, self.indexValueMap)
-            pages = self.queue.pages()[:amount]
-            for name in pages:
-                p = Page(request, name)
-                self._index_page(writer, p, mode='update')
-                self.queue.remove([name])
-        finally:
-            writer.close()
-            if lock:
-                lock.release()
-
-    def contentfilter(self, filename):
-        """ Get a filter for content of filename and return unicode content. """
-        request = self.request
-        mt = wikiutil.MimeType(filename=filename)
-        for modulename in mt.module_name():
-            try:
-                execute = wikiutil.importPlugin(request.cfg, 'filter', modulename)
-                break
-            except wikiutil.PluginMissingError:
-                pass
-            else:
-                request.log("Cannot load filter for mimetype." + modulename)
-        try:
-            data = execute(self, filename)
-            if debug:
-                request.log("Filter %s returned %d characters for file %s" % (modulename, len(data), filename))
-        except (OSError, IOError), err:
-            data = ''
-            request.log("Filter %s threw error '%s' for file %s" % (modulename, str(err), filename))
-        return mt.mime_type(), data
-   
-    def test(self, request):
-        idx = xapidx.ReadOnlyIndex(self.dir)
-        idx.configure(self.prefixMap, self.indexValueMap)
-        print idx.search("is")
-        #for d in docs:
-        #    request.log("%r %r %r" % (d, d.get('attachment'), d.get('pagename')))
+    # XXX: why?
+    #def test(self, request):
+    #   idx = xapidx.ReadOnlyIndex(self.dir)
+    #   idx.configure(self.prefixMap, self.indexValueMap)
+    #   print idx.search("is")
+    #   #for d in docs:
+    #   #    request.log("%r %r %r" % (d, d.get('attachment'), d.get('pagename')))
 
     def _index_file(self, request, writer, filename, mode='update'):
         """ index a file as it were a page named pagename
@@ -665,9 +406,8 @@
                     if debug: request.log("%s (add)" % (pagename,))
                     id = writer.index(doc)
         #writer.flush()
-        
 
-    def _index_pages(self, request, lock=None, files=None, mode='update'):
+    def _index_pages(self, request, files=None, mode='update'):
         """ Index all pages (and all given files)
         
         This should be called from indexPages or indexPagesInNewThread only!
@@ -679,8 +419,6 @@
         and this method must release it when it finishes or fails.
         """
         try:
-            self._unsign()
-            start = time.time()
             writer = xapidx.Index(self.dir, True)
             writer.configure(self.prefixMap, self.indexValueMap)
             pages = request.rootpage.getPageList(user='', exists=1)
@@ -694,50 +432,8 @@
                     fname = fname.strip()
                     self._index_file(request, writer, fname, mode)
             writer.close()
-            request.log("indexing completed successfully in %0.2f seconds." %
-                        (time.time() - start))
-            self._sign()
         finally:
             writer.__del__()
-            if lock:
-                lock.release()
-
-    def _optimize(self, request):
-        """ Optimize the index """
-        pass
-
-    def _indexingRequest(self, request):
-        """ Return a new request that can be used for index building.
-        
-        This request uses a security policy that lets the current user
-        read any page. Without this policy some pages will not render,
-        which will create broken pagelinks index.        
-        """
-        from MoinMoin.request.CLI import Request
-        from MoinMoin.security import Permissions
-        request = Request(request.url)
-        class SecurityPolicy(Permissions):
-            def read(*args, **kw):
-                return True        
-        request.user.may = SecurityPolicy(request.user)
-        return request
-
-    def _unsign(self):
-        """ Remove sig file - assume write lock acquired """
-        try:
-            os.remove(self.sig_file)
-        except OSError, err:
-            if err.errno != errno.ENOENT:
-                raise
-
-    def _sign(self):
-        """ Add sig file - assume write lock acquired """
-        f = file(self.sig_file, 'w')
-        try:
-            f.write('')
-        finally:
-            f.close()
-
 
 def run_query(query, db):
     enquire = xapian.Enquire(db)
--- a/MoinMoin/search/builtin.py	Tue Jun 27 15:20:08 2006 +0200
+++ b/MoinMoin/search/builtin.py	Wed Jun 28 23:41:46 2006 +0200
@@ -10,16 +10,336 @@
     @license: GNU GPL, see COPYING for details
 """
 
-import time, sys
+import time, sys, os, errno
 from MoinMoin import wikiutil, config
 from MoinMoin.Page import Page
+from MoinMoin.util import filesys, lock
 from MoinMoin.search.results import getSearchResults
 
-try:
-    from MoinMoin.search import Xapian
-except ImportError:
-    pass
+##############################################################################
+# Search Engine Abstraction
+##############################################################################
 
+class UpdateQueue:
+    def __init__(self, file, lock_dir):
+        self.file = file
+        self.writeLock = lock.WriteLock(lock_dir, timeout=10.0)
+        self.readLock = lock.ReadLock(lock_dir, timeout=10.0)
+
+    def exists(self):
+        return os.path.exists(self.file)
+
+    def append(self, pagename):
+        """ Append a page to queue """
+        if not self.writeLock.acquire(60.0):
+            request.log("can't add %r to xapian update queue: can't lock queue" %
+                        pagename)
+            return
+        try:
+            f = codecs.open(self.file, 'a', config.charset)
+            try:
+                f.write(pagename + "\n")
+            finally:
+                f.close()
+        finally:
+            self.writeLock.release()
+
+    def pages(self):
+        """ Return list of pages in the queue """
+        if self.readLock.acquire(1.0):
+            try:
+                return self._decode(self._read())
+            finally:
+                self.readLock.release()
+        return []
+
+    def remove(self, pages):
+        """ Remove pages from the queue
+        
+        When the queue is empty, the queue file is removed, so exists()
+        can tell if there is something waiting in the queue.
+        """
+        if self.writeLock.acquire(30.0):
+            try:
+                queue = self._decode(self._read())
+                for page in pages:
+                    try:
+                        queue.remove(page)
+                    except ValueError:
+                        pass
+                if queue:
+                    self._write(queue)
+                else:
+                    self._removeFile()
+                return True
+            finally:
+                self.writeLock.release()
+        return False
+
+    # Private -------------------------------------------------------
+
+    def _decode(self, data):
+        """ Decode queue data """
+        pages = data.splitlines()
+        return self._filterDuplicates(pages)
+
+    def _filterDuplicates(self, pages):
+        """ Filter duplicates in page list, keeping the order """
+        unique = []
+        seen = {}
+        for name in pages:
+            if not name in seen:
+                unique.append(name)
+                seen[name] = 1
+        return unique
+
+    def _read(self):
+        """ Read and return queue data
+        
+        This does not do anything with the data so we can release the
+        lock as soon as possible, enabling others to update the queue.
+        """
+        try:
+            f = codecs.open(self.file, 'r', config.charset)
+            try:
+                return f.read()
+            finally:
+                f.close()
+        except (OSError, IOError), err:
+            if err.errno != errno.ENOENT:
+                raise
+            return ''
+
+    def _write(self, pages):
+        """ Write pages to queue file
+        
+        Requires queue write locking.
+        """
+        # XXX use tmpfile/move for atomic replace on real operating systems
+        data = '\n'.join(pages) + '\n'
+        f = codecs.open(self.file, 'w', config.charset)
+        try:
+            f.write(data)
+        finally:
+            f.close()
+
+    def _removeFile(self):
+        """ Remove queue file 
+        
+        Requires queue write locking.
+        """
+        try:
+            os.remove(self.file)
+        except OSError, err:
+            if err.errno != errno.ENOENT:
+                raise
+
+class BaseIndex:
+    class LockedException(Exception):
+        pass
+
+    def __init__(self, request):
+        self.request = request
+        cache_dir = request.cfg.cache_dir
+        main_dir = self._main_dir()
+        self.dir = os.path.join(main_dir, 'index')
+        filesys.makeDirs(self.dir)
+        self.sig_file = os.path.join(main_dir, 'complete')
+        lock_dir = os.path.join(main_dir, 'index-lock')
+        self.lock = lock.WriteLock(lock_dir,
+                                   timeout=3600.0, readlocktimeout=60.0)
+        self.read_lock = lock.ReadLock(lock_dir, timeout=3600.0)
+        self.queue = UpdateQueue(os.path.join(main_dir, 'update-queue'),
+                                 os.path.join(main_dir, 'update-queue-lock'))
+
+        # Disabled until we have a sane way to build the index with a
+        # queue in small steps.
+        ## if not self.exists():
+        ##    self.indexPagesInNewThread(request)
+
+    def _main_dir(self):
+        raise NotImplemented
+
+    def exists(self):
+        """ Check if index exists """        
+        return os.path.exists(self.sig_file)
+                
+    def mtime(self):
+        return os.path.getmtime(self.dir)
+    
+    def _search(self, query):
+        raise NotImplemented
+
+    def search(self, query):
+        if not self.read_lock.acquire(1.0):
+            raise self.LockedException
+        try:
+            hits = self._search(query)
+        finally:
+            self.read_lock.release()
+        return hits
+
+    def update_page(self, page):
+        self.queue.append(page.page_name)
+        self._do_queued_updates_InNewThread()
+
+    def indexPages(self, files=None, mode='update'):
+        """ Index all pages (and files, if given)
+        
+        Can be called only from a script. To index pages during a user
+        request, use indexPagesInNewThread.
+        @arg files: iterator or list of files to index additionally
+        """
+        if not self.lock.acquire(1.0):
+            self.request.log("can't index: can't acquire lock")
+            return
+        try:
+            self._unsign()
+            start = time.time()
+            request = self._indexingRequest(self.request)
+            self._index_pages(request, files, mode)
+            request.log("indexing completed successfully in %0.2f seconds." %
+                        (time.time() - start))
+            self._sign()
+        finally:
+            self.lock.release()
+
+    def indexPagesInNewThread(self, files=None, mode='update'):
+        """ Index all pages in a new thread
+        
+        Should be called from a user request. From a script, use indexPages.
+        """
+        # Prevent rebuilding the index just after it was finished
+        if self.exists():
+            return
+
+        from threading import Thread
+        indexThread = Thread(target=self._index_pages, args=(files, mode))
+        indexThread.setDaemon(True)
+        
+        # Join the index thread after current request finish, prevent
+        # Apache CGI from killing the process.
+        def joinDecorator(finish):
+            def func():
+                finish()
+                indexThread.join()
+            return func
+
+        self.request.finish = joinDecorator(self.request.finish)
+        indexThread.start()
+
+    def _index_pages(self, request, files=None, mode='update'):
+        """ Index all pages (and all given files)
+        
+        This should be called from indexPages or indexPagesInNewThread only!
+        
+        This may take some time, depending on the size of the wiki and speed
+        of the machine.
+
+        When called in a new thread, lock is acquired before the call,
+        and this method must release it when it finishes or fails.
+        """
+        raise NotImplemented
+
+    def _do_queued_updates_InNewThread(self):
+        """ do queued index updates in a new thread
+        
+        Should be called from a user request. From a script, use indexPages.
+        """
+        if not self.lock.acquire(1.0):
+            self.request.log("can't index: can't acquire lock")
+            return
+        try:
+            def lockedDecorator(self, f):
+                def func(*args, **kwargs):
+                    try:
+                        return f(*args, **kwargs)
+                    finally:
+                        self.lock.release()
+                return func
+
+            from threading import Thread
+            indexThread = Thread(
+                    target=lockedDecorator(self._do_queued_updates),
+                    args=(self._indexingRequest(self.request),))
+            indexThread.setDaemon(True)
+            
+            # Join the index thread after current request finish, prevent
+            # Apache CGI from killing the process.
+            def joinDecorator(finish):
+                def func():
+                    finish()
+                    indexThread.join()
+                return func
+                
+            self.request.finish = joinDecorator(self.request.finish)
+            indexThread.start()
+        except:
+            self.lock.release()
+            raise
+
+    def _do_queued_updates(self, request, amount=5):
+        raise NotImplemented
+
+    def optimize(self):
+        raise NotImplemented
+
+    def contentfilter(self, filename):
+        """ Get a filter for content of filename and return unicode content. """
+        request = self.request
+        mt = wikiutil.MimeType(filename=filename)
+        for modulename in mt.module_name():
+            try:
+                execute = wikiutil.importPlugin(request.cfg, 'filter', modulename)
+                break
+            except wikiutil.PluginMissingError:
+                pass
+            else:
+                request.log("Cannot load filter for mimetype." + modulename)
+        try:
+            data = execute(self, filename)
+            # XXX: proper debugging?
+            #if debug:
+            #    request.log("Filter %s returned %d characters for file %s" % (modulename, len(data), filename))
+        except (OSError, IOError), err:
+            data = ''
+            request.log("Filter %s threw error '%s' for file %s" % (modulename, str(err), filename))
+        return mt.mime_type(), data
+
+    def test(self, request):
+        raise NotImplemented
+
+    def _indexingRequest(self, request):
+        """ Return a new request that can be used for index building.
+        
+        This request uses a security policy that lets the current user
+        read any page. Without this policy some pages will not render,
+        which will create broken pagelinks index.        
+        """
+        from MoinMoin.request.CLI import Request
+        from MoinMoin.security import Permissions
+        request = Request(request.url)
+        class SecurityPolicy(Permissions):
+            def read(*args, **kw):
+                return True        
+        request.user.may = SecurityPolicy(request.user)
+        return request
+
+    def _unsign(self):
+        """ Remove sig file - assume write lock acquired """
+        try:
+            os.remove(self.sig_file)
+        except OSError, err:
+            if err.errno != errno.ENOENT:
+                raise
+
+    def _sign(self):
+        """ Add sig file - assume write lock acquired """
+        f = file(self.sig_file, 'w')
+        try:
+            f.write('')
+        finally:
+            f.close()
 
 ##############################################################################
 ### Searching
@@ -60,8 +380,9 @@
         """
         pages = None
         try:
-            index = Xapian.Index(self.request)
-        except NameError:
+            from MoinMoin.search.Xapian import Index
+            index = Index(self.request)
+        except ImportError:
             index = None
         if index and index.exists() and self.query.xapian_wanted():
             self.request.clock.start('_xapianSearch')
@@ -80,7 +401,7 @@
                     return d
                 pages = [dict_decode(hit['values']) for hit in hits]
                 self.request.log("xapianSearch: finds pages: %r" % pages)
-            except index.LockedException:
+            except BaseIndex.LockedException:
                 pass
             self.request.clock.stop('_xapianSearch')
         return self._moinSearch(pages)
--- a/docs/CHANGES.fpletz	Tue Jun 27 15:20:08 2006 +0200
+++ b/docs/CHANGES.fpletz	Wed Jun 28 23:41:46 2006 +0200
@@ -91,5 +91,12 @@
 
 2006-06-27
     * Splitting out MoinMoin/search.py to MoinMoin/search/*.py, no more
-      need to invoke QueryParser manually when using searchPages
+      need to invoke QueryParser manually when using searchPages, minor
+      refactoring
 
+2006-06-28
+    * Abstraction of a locked search engine index:
+      MoinMoin.search.builtin.BaseIndex, MoinMoin.search.Xapian.Index is
+      derived from this, cleanups in calling structure and function
+      prototypes to make it more extensible
+