changeset 953:326f7a365007

search/indexing: remove all_revs=False params (it is the default)
author Thomas Waldmann <tw AT waldmann-edv DOT de>
date Sun, 09 Oct 2011 21:10:52 +0200
parents 308ba8de39c4
children f79d471667b9
files MoinMoin/apps/admin/views.py MoinMoin/apps/frontend/views.py MoinMoin/apps/misc/views.py MoinMoin/converter/include.py MoinMoin/datastruct/backends/wiki_groups.py MoinMoin/items/__init__.py MoinMoin/script/maint/reduce_revisions.py MoinMoin/script/maint/set_meta.py MoinMoin/script/migration/moin19/import19.py MoinMoin/storage/middleware/_tests/test_indexing.py MoinMoin/storage/middleware/_tests/test_protecting.py MoinMoin/storage/middleware/indexing.py MoinMoin/storage/middleware/protecting.py MoinMoin/user.py
diffstat 14 files changed, 32 insertions(+), 32 deletions(-) [+]
line wrap: on
line diff
--- a/MoinMoin/apps/admin/views.py	Sun Oct 09 19:14:56 2011 +0200
+++ b/MoinMoin/apps/admin/views.py	Sun Oct 09 21:10:52 2011 +0200
@@ -227,7 +227,7 @@
                 _('Item name'),
                ]
     rows = [(doc[SIZE], doc[NAME])
-            for doc in flaskg.storage.documents(all_revs=False, wikiname=app.cfg.interwikiname)]
+            for doc in flaskg.storage.documents(wikiname=app.cfg.interwikiname)]
     rows = sorted(rows, reverse=True)
     return render_template('admin/itemsize.html',
                            item_name="+admin/itemsize",
--- a/MoinMoin/apps/frontend/views.py	Sun Oct 09 19:14:56 2011 +0200
+++ b/MoinMoin/apps/frontend/views.py	Sun Oct 09 21:10:52 2011 +0200
@@ -726,7 +726,7 @@
     """
     q = And([Term(WIKINAME, app.cfg.interwikiname),
              Or([Term(ITEMTRANSCLUSIONS, item_name), Term(ITEMLINKS, item_name)])])
-    revs = flaskg.storage.search(q, all_revs=False)
+    revs = flaskg.storage.search(q)
     return [rev.meta[NAME] for rev in revs]
 
 
@@ -766,7 +766,7 @@
     query = Term(WIKINAME, app.cfg.interwikiname)
     if bookmark_time is not None:
         query = And([query, DateRange(MTIME, start=datetime.utcfromtimestamp(bookmark_time), end=None)])
-    revs = flaskg.storage.search(query, all_revs=all_revs, sortedby=[MTIME], reverse=True, limit=1000) # was: all_revs=False
+    revs = flaskg.storage.search(query, all_revs=all_revs, sortedby=[MTIME], reverse=True, limit=1000)
     # Group by date
     history = []
     day_history = namedtuple('day_history', ['day', 'entries'])
@@ -800,7 +800,7 @@
     linked = set()
     transcluded = set()
     existing = set()
-    revs = flaskg.storage.documents(all_revs=False, wikiname=app.cfg.interwikiname)
+    revs = flaskg.storage.documents(wikiname=app.cfg.interwikiname)
     for rev in revs:
         existing.add(rev.meta[NAME])
         linked.update(rev.meta.get(ITEMLINKS, []))
@@ -1508,7 +1508,7 @@
     :rtype: tuple
     :returns: start word, end word, matches dict
     """
-    item_names = [rev.meta[NAME] for rev in flaskg.storage.documents(all_revs=False, wikiname=app.cfg.interwikiname)]
+    item_names = [rev.meta[NAME] for rev in flaskg.storage.documents(wikiname=app.cfg.interwikiname)]
     if item_name in item_names:
         item_names.remove(item_name)
     # Get matches using wiki way, start and end of word
@@ -1674,7 +1674,7 @@
     show a list or tag cloud of all tags in this wiki
     """
     item_name = request.values.get('item_name', '') # actions menu puts it into qs
-    revs = flaskg.storage.documents(all_revs=False, wikiname=app.cfg.interwikiname)
+    revs = flaskg.storage.documents(wikiname=app.cfg.interwikiname)
     tags_counts = {}
     for rev in revs:
         tags = rev.meta.get(TAGS, [])
@@ -1711,7 +1711,7 @@
     show all items' names that have tag <tag>
     """
     query = And([Term(WIKINAME, app.cfg.interwikiname), Term(TAGS, tag), ])
-    revs = flaskg.storage.search(query, all_revs=False, sortedby=NAME_EXACT, limit=None)
+    revs = flaskg.storage.search(query, sortedby=NAME_EXACT, limit=None)
     item_names = [rev.meta[NAME] for rev in revs]
     return render_template("item_link_list.html",
                            headline=_("Items tagged with %(tag)s", tag=tag),
--- a/MoinMoin/apps/misc/views.py	Sun Oct 09 19:14:56 2011 +0200
+++ b/MoinMoin/apps/misc/views.py	Sun Oct 09 21:10:52 2011 +0200
@@ -29,7 +29,7 @@
         return dt.strftime("%Y-%m-%dT%H:%M:%S+00:00")
 
     sitemap = []
-    for rev in flaskg.storage.documents(all_revs=False, wikiname=app.cfg.interwikiname):
+    for rev in flaskg.storage.documents(wikiname=app.cfg.interwikiname):
         name = rev.meta[NAME]
         mtime = rev.meta[MTIME]
         if False: # was: wikiutil.isSystemItem(name)   XXX add back later, when we have that in the index
@@ -45,7 +45,7 @@
         sitemap.append((name, format_timestamp(mtime), changefreq, priority))
     # add an entry for root url
     root_item = app.cfg.item_root
-    revs = list(flaskg.storage.documents(all_revs=False, wikiname=app.cfg.interwikiname, name=root_item))
+    revs = list(flaskg.storage.documents(wikiname=app.cfg.interwikiname, name=root_item))
     if revs:
         mtime = revs[0].meta[MTIME]
         sitemap.append((u'', format_timestamp(mtime), "hourly", "1.0"))
@@ -64,7 +64,7 @@
     See: http://usemod.com/cgi-bin/mb.pl?SisterSitesImplementationGuide
     """
     # XXX we currently also get deleted items, fix this
-    item_names = sorted([rev.meta[NAME] for rev in flaskg.storage.documents(all_revs=False, wikiname=app.cfg.interwikiname)])
+    item_names = sorted([rev.meta[NAME] for rev in flaskg.storage.documents(wikiname=app.cfg.interwikiname)])
     content = render_template('misc/urls_names.txt', item_names=item_names)
     return Response(content, mimetype='text/plain')
 
--- a/MoinMoin/converter/include.py	Sun Oct 09 19:14:56 2011 +0200
+++ b/MoinMoin/converter/include.py	Sun Oct 09 21:10:52 2011 +0200
@@ -209,7 +209,7 @@
                         xp_include_pages = xp_include_pages[1:]
                     query = And([Term(WIKINAME, app.cfg.interwikiname), Wildcard(NAME_EXACT, xp_include_pages)])
                     reverse = xp_include_sort == 'descending'
-                    results = flaskg.storage.search(query, all_revs=False, sortedby=NAME_EXACT, reverse=reverse, limit=None)
+                    results = flaskg.storage.search(query, sortedby=NAME_EXACT, reverse=reverse, limit=None)
                     pagelist = [result[NAME] for result in results]
                     if xp_include_skipitems is not None:
                         pagelist = pagelist[xp_include_skipitems:]
--- a/MoinMoin/datastruct/backends/wiki_groups.py	Sun Oct 09 19:14:56 2011 +0200
+++ b/MoinMoin/datastruct/backends/wiki_groups.py	Sun Oct 09 21:10:52 2011 +0200
@@ -41,7 +41,7 @@
         To find group pages, app.cfg.cache.item_group_regexact pattern is used.
         """
         # TODO: use whoosh to search for group_regex matching items
-        item_list = [rev.name for rev in flaskg.unprotected_storage.documents(all_revs=False)
+        item_list = [rev.name for rev in flaskg.unprotected_storage.documents()
                      if self.item_group_regex.search(rev.name)]
         return iter(item_list)
 
--- a/MoinMoin/items/__init__.py	Sun Oct 09 19:14:56 2011 +0200
+++ b/MoinMoin/items/__init__.py	Sun Oct 09 21:10:52 2011 +0200
@@ -530,7 +530,7 @@
             query = Term(WIKINAME, app.cfg.interwikiname)
         # We only want the sub-item part of the item names, not the whole item objects.
         prefix_len = len(prefix)
-        revs = flaskg.storage.search(query, all_revs=False, sortedby=NAME_EXACT, limit=None)
+        revs = flaskg.storage.search(query, sortedby=NAME_EXACT, limit=None)
         items = [(rev.meta[NAME], rev.meta[NAME][prefix_len:], rev.meta[CONTENTTYPE])
                  for rev in revs]
         return items
@@ -672,7 +672,7 @@
         if contenttype is not None:
             terms.append(Term(CONTENTTYPE, contenttype))
         query = And(terms)
-        revs = flaskg.storage.search(query, all_revs=False, sortedby=NAME_EXACT, limit=None)
+        revs = flaskg.storage.search(query, sortedby=NAME_EXACT, limit=None)
         return [rev.meta[NAME] for rev in revs]
 
     def do_modify(self, contenttype, template_name):
--- a/MoinMoin/script/maint/reduce_revisions.py	Sun Oct 09 19:14:56 2011 +0200
+++ b/MoinMoin/script/maint/reduce_revisions.py	Sun Oct 09 21:10:52 2011 +0200
@@ -26,11 +26,11 @@
     def run(self, query):
         storage = app.unprotected_storage
         if query:
-            qp = storage.query_parser([NAME_EXACT, ], all_revs=False)
+            qp = storage.query_parser([NAME_EXACT, ])
             q = qp.parse(query)
         else:
             q = Every()
-        results = storage.search(q, all_revs=False, limit=None)
+        results = storage.search(q, limit=None)
         for result in results:
             item_name = result[NAME]
             item = storage.get_item(item_name)
--- a/MoinMoin/script/maint/set_meta.py	Sun Oct 09 19:14:56 2011 +0200
+++ b/MoinMoin/script/maint/set_meta.py	Sun Oct 09 21:10:52 2011 +0200
@@ -44,11 +44,11 @@
                   "only a key you want to delete (with -r set).")
 
         if query:
-            qp = storage.query_parser([NAME_EXACT, ], all_revs=False)
+            qp = storage.query_parser([NAME_EXACT, ])
             q = qp.parse(query)
         else:
             q = Every()
-        results = storage.search(q, all_revs=False, limit=None)
+        results = storage.search(q, limit=None)
         for result in results:
             item_name = result[NAME]
             item = storage.get_item(item_name)
--- a/MoinMoin/script/migration/moin19/import19.py	Sun Oct 09 19:14:56 2011 +0200
+++ b/MoinMoin/script/migration/moin19/import19.py	Sun Oct 09 21:10:52 2011 +0200
@@ -90,7 +90,7 @@
         indexer.rebuild()
 
         print "Fix userids..."
-        userid_map = dict([(rev.meta[UID_OLD], rev.meta[ITEMID]) for rev in indexer.documents(all_revs=False, contenttype=CONTENTTYPE_USER)])
+        userid_map = dict([(rev.meta[UID_OLD], rev.meta[ITEMID]) for rev in indexer.documents(contenttype=CONTENTTYPE_USER)])
         for revid in backend:
             meta, data = backend.retrieve(revid)
             if USERID in meta:
--- a/MoinMoin/storage/middleware/_tests/test_indexing.py	Sun Oct 09 19:14:56 2011 +0200
+++ b/MoinMoin/storage/middleware/_tests/test_indexing.py	Sun Oct 09 21:10:52 2011 +0200
@@ -151,7 +151,7 @@
         item = self.imw[item_name]
         item.store_revision(dict(name=item_name), StringIO('1st'))
         expected_rev = item.store_revision(dict(name=item_name), StringIO('2nd'))
-        revs = list(self.imw.documents(all_revs=False, name=item_name))
+        revs = list(self.imw.documents(name=item_name))
         assert len(revs) == 1  # there is only 1 latest revision
         assert expected_rev.revid == revs[0].revid  # it is really the latest one
 
@@ -195,13 +195,13 @@
         expected_latest_revids.append(r.revid)
 
         # now we remember the index contents built that way:
-        expected_latest_revs = list(self.imw.documents(all_revs=False))
+        expected_latest_revs = list(self.imw.documents())
         expected_all_revs = list(self.imw.documents(all_revs=True))
 
         print "*** all on-the-fly:"
         self.imw.dump(all_revs=True)
         print "*** latest on-the-fly:"
-        self.imw.dump(all_revs=False)
+        self.imw.dump()
 
         # now kill the index and do a full rebuild
         self.imw.close()
@@ -212,13 +212,13 @@
 
         # read the index contents built that way:
         all_revs = list(self.imw.documents(all_revs=True))
-        latest_revs = list(self.imw.documents(all_revs=False))
+        latest_revs = list(self.imw.documents())
         latest_revids = [rev.revid for rev in latest_revs]
 
         print "*** all rebuilt:"
         self.imw.dump(all_revs=True)
         print "*** latest rebuilt:"
-        self.imw.dump(all_revs=False)
+        self.imw.dump()
 
         # should be all the same, order does not matter:
         assert sorted(expected_all_revs) == sorted(all_revs)
@@ -279,7 +279,7 @@
 
         # read the index contents we have now:
         all_revids = [doc[REVID] for doc in self.imw._documents(all_revs=True)]
-        latest_revids = [doc[REVID] for doc in self.imw._documents(all_revs=False)]
+        latest_revids = [doc[REVID] for doc in self.imw._documents()]
 
         # this index is outdated:
         for missing_revid in missing_revids:
@@ -293,7 +293,7 @@
 
         # read the index contents we have now:
         all_revids = [rev.revid for rev in self.imw.documents(all_revs=True)]
-        latest_revids = [rev.revid for rev in self.imw.documents(all_revs=False)]
+        latest_revids = [rev.revid for rev in self.imw.documents()]
 
         # now it should have the previously missing rev and all should be as expected:
         for missing_revid in missing_revids:
@@ -358,7 +358,7 @@
         item = self.imw[item_name]
         r = item.store_revision(dict(name=item_name, acl=u'joe:read'), StringIO('public content'))
         revid_public = r.revid
-        revids = [rev.revid for rev in self.imw.documents(all_revs=False)]
+        revids = [rev.revid for rev in self.imw.documents()]
         assert revids == [revid_public]
 
     def test_getitem(self):
--- a/MoinMoin/storage/middleware/_tests/test_protecting.py	Sun Oct 09 19:14:56 2011 +0200
+++ b/MoinMoin/storage/middleware/_tests/test_protecting.py	Sun Oct 09 21:10:52 2011 +0200
@@ -65,7 +65,7 @@
 
     def test_documents(self):
         revid_unprotected, revid_protected = self.make_items(u'joe:read', u'boss:read')
-        revids = [rev.revid for rev in self.imw.documents(all_revs=False)]
+        revids = [rev.revid for rev in self.imw.documents()]
         assert revids == [revid_unprotected]  # without revid_protected!
 
     def test_getitem(self):
--- a/MoinMoin/storage/middleware/indexing.py	Sun Oct 09 19:14:56 2011 +0200
+++ b/MoinMoin/storage/middleware/indexing.py	Sun Oct 09 21:10:52 2011 +0200
@@ -695,7 +695,7 @@
         self.backend = self.indexer.backend
         if latest_doc is None:
             # we need to call the method without acl check to avoid endless recursion:
-            latest_doc = self.indexer._document(all_revs=False, **query) or {}
+            latest_doc = self.indexer._document(**query) or {}
         self._current = latest_doc
 
     def _get_itemid(self):
@@ -795,7 +795,7 @@
         meta[REVID] = revid
         self.indexer.index_revision(meta, content)
         if not overwrite:
-            self._current = self.indexer._document(all_revs=False, revid=revid)
+            self._current = self.indexer._document(revid=revid)
         return Revision(self, revid)
 
     def store_all_revisions(self, meta, data):
--- a/MoinMoin/storage/middleware/protecting.py	Sun Oct 09 19:14:56 2011 +0200
+++ b/MoinMoin/storage/middleware/protecting.py	Sun Oct 09 21:10:52 2011 +0200
@@ -50,7 +50,7 @@
         return self.indexer.get_index(all_revs=all_revs)
 
     def query_parser(self, default_fields, all_revs=False):
-        return self.indexer.query_parser(default_fields, all_revs=False)
+        return self.indexer.query_parser(default_fields, all_revs=all_revs)
 
     def search(self, q, all_revs=False, **kw):
         for rev in self.indexer.search(q, all_revs, **kw):
--- a/MoinMoin/user.py	Sun Oct 09 19:14:56 2011 +0200
+++ b/MoinMoin/user.py	Sun Oct 09 21:10:52 2011 +0200
@@ -106,7 +106,7 @@
         CONTENTTYPE: CONTENTTYPE_USER,
     })
     backend = get_user_backend()
-    docs = backend.documents(all_revs=False, **q)
+    docs = backend.documents(**q)
     return list(docs)