changeset 1638:5070d0358544

caching: move use_pickle to __init__, remove unused encode/decode args from .content()/.update()
author Thomas Waldmann <tw AT waldmann-edv DOT de>
date Sun, 22 Oct 2006 20:48:23 +0200
parents 47a173ae5401
children 95efbd8cfa55
files MoinMoin/caching.py MoinMoin/i18n/__init__.py MoinMoin/macro/PageHits.py MoinMoin/user.py MoinMoin/wikidicts.py
diffstat 5 files changed, 30 insertions(+), 33 deletions(-) [+]
line wrap: on
line diff
--- a/MoinMoin/caching.py	Sun Oct 22 20:31:37 2006 +0200
+++ b/MoinMoin/caching.py	Sun Oct 22 20:48:23 2006 +0200
@@ -31,7 +31,7 @@
     pass
 
 class CacheEntry:
-    def __init__(self, request, arena, key, scope='page_or_wiki', do_locking=True):
+    def __init__(self, request, arena, key, scope='page_or_wiki', do_locking=True, use_pickle=False):
         """ init a cache entry
             @param request: the request object
             @param arena: either a string or a page object, when we want to use
@@ -46,6 +46,7 @@
         self.request = request
         self.key = key
         self.locking = do_locking
+        self.use_pickle = use_pickle
         if scope == 'page_or_wiki': # XXX DEPRECATED, remove later
             if isinstance(arena, str):
                 self.arena_dir = os.path.join(request.cfg.cache_dir, request.cfg.siteid, arena)
@@ -118,13 +119,11 @@
         else:
             self.request.log("Can't acquire write lock in %s" % self.lock_dir)
 
-    def update(self, content, encode=False, use_pickle=False):
+    def update(self, content):
         try:
             tmpfname = self._tmpfilename()
             fname = self._filename()
-            if encode:
-                content = content.encode(config.charset)
-            elif use_pickle:
+            if self.use_pickle:
                 content = pickle.dumps(content, PICKLE_PROTOCOL)
             if not self.locking or self.locking and self.wlock.acquire(1.0):
                 try:
@@ -157,7 +156,7 @@
         else:
             self.request.log("Can't acquire write lock in %s" % self.lock_dir)
 
-    def content(self, decode=False, use_pickle=False):
+    def content(self):
         try:
             if not self.locking or self.locking and self.rlock.acquire(1.0):
                 try:
@@ -169,9 +168,7 @@
                         self.rlock.release()
             else:
                 self.request.log("Can't acquire read lock in %s" % self.lock_dir)
-            if decode:
-                data = data.decode(config.charset)
-            elif use_pickle:
+            if self.use_pickle:
                 data = pickle.loads(data)
             return data
         except (pickle.UnpicklingError, IOError, EOFError, ValueError), err:
--- a/MoinMoin/i18n/__init__.py	Sun Oct 22 20:31:37 2006 +0200
+++ b/MoinMoin/i18n/__init__.py	Sun Oct 22 20:48:23 2006 +0200
@@ -54,10 +54,10 @@
         The very first time, this will be slow as it will load all languages,
         but next time it will be fast due to caching.
     """
-    request.clock.start('i18n_init')
     global languages
+    request.clock.start('i18n_init')
     if languages is None:
-        meta_cache = caching.CacheEntry(request, 'i18n', 'meta', scope='farm')
+        meta_cache = caching.CacheEntry(request, 'i18n', 'meta', scope='farm', use_pickle=True)
         i18n_dir = os.path.join(request.cfg.moinmoin_dir, 'i18n')
         if meta_cache.needsUpdate(i18n_dir):
             _languages = {}
@@ -74,17 +74,17 @@
                     #request.log("meta key %s value %r" % (key, value))
                     _languages[language][key] = value.decode(encoding)
             try:
-                meta_cache.update(_languages, use_pickle=True)
+                meta_cache.update(_languages)
             except caching.CacheError:
                 pass
 
-    if languages is None: # another thread maybe has done it before us
-        try:
-            _languages = meta_cache.content(use_pickle=True)
-            if languages is None:
-                languages = _languages
-        except caching.CacheError:
-            pass
+        if languages is None: # another thread maybe has done it before us
+            try:
+                _languages = meta_cache.content()
+                if languages is None:
+                    languages = _languages
+            except caching.CacheError:
+                pass
     request.clock.stop('i18n_init')
 
 
@@ -164,14 +164,14 @@
 
     def loadLanguage(self, request):
         request.clock.start('loadLanguage')
-        cache = caching.CacheEntry(request, arena='i18n', key=self.language, scope='farm')
+        cache = caching.CacheEntry(request, arena='i18n', key=self.language, scope='farm', use_pickle=True)
         langfilename = po_filename(request, self.language, self.domain)
         needsupdate = cache.needsUpdate(langfilename)
         if debug:
             request.log("i18n: langfilename %s needsupdate %d" % (langfilename, needsupdate))
         if not needsupdate:
             try:
-                uc_texts, uc_unformatted = cache.content(use_pickle=True)
+                uc_texts, uc_unformatted = cache.content()
             except caching.CacheError:
                 if debug:
                     request.log("i18n: pickle %s load failed" % lang)
@@ -202,7 +202,7 @@
             if debug:
                 request.log("i18n: dumping lang %s" % lang)
             try:
-                cache.update((uc_texts, uc_unformatted), use_pickle=True)
+                cache.update((uc_texts, uc_unformatted))
             except caching.CacheError:
                 pass
 
--- a/MoinMoin/macro/PageHits.py	Sun Oct 22 20:31:37 2006 +0200
+++ b/MoinMoin/macro/PageHits.py	Sun Oct 22 20:48:23 2006 +0200
@@ -17,7 +17,7 @@
     def __init__(self, macro, args):
         self.macro = macro
         self.request = macro.request
-        self.cache = cache = caching.CacheEntry(self.request, 'charts', 'pagehits', scope='wiki')
+        self.cache = cache = caching.CacheEntry(self.request, 'charts', 'pagehits', scope='wiki', use_pickle=True)
 
     def execute(self):
         """ Execute the macro and return output """
@@ -34,7 +34,7 @@
         date, hits = 0, {}
         if self.cache.exists():
             try:
-                date, hits = self.cache.content(use_pickle=True)
+                date, hits = self.cache.content()
             except caching.CacheError:
                 self.cache.remove()
         return date, hits
@@ -62,7 +62,7 @@
 
     def updateCache(self, date, hits):
         try:
-            self.cache.update((date, hits), use_pickle=True)
+            self.cache.update((date, hits))
         except caching.CacheError:
             pass
 
--- a/MoinMoin/user.py	Sun Oct 22 20:31:37 2006 +0200
+++ b/MoinMoin/user.py	Sun Oct 22 20:48:23 2006 +0200
@@ -52,9 +52,9 @@
     except AttributeError:
         arena = 'user'
         key = 'name2id'
-        cache = caching.CacheEntry(request, arena, key, scope='wiki')
+        cache = caching.CacheEntry(request, arena, key, scope='wiki', use_pickle=True)
         try:
-            _name2id = cache.content(use_pickle=True)
+            _name2id = cache.content()
         except caching.CacheError:
             _name2id = {}
         cfg.cache.name2id = _name2id
@@ -66,9 +66,9 @@
         cfg.cache.name2id = _name2id
         arena = 'user'
         key = 'name2id'
-        cache = caching.CacheEntry(request, arena, key, scope='wiki')
+        cache = caching.CacheEntry(request, arena, key, scope='wiki', use_pickle=True)
         try:
-            cache.update(_name2id, use_pickle=True)
+            cache.update(_name2id)
         except caching.CacheError:
             pass
         id = _name2id.get(searchName, None)
--- a/MoinMoin/wikidicts.py	Sun Oct 22 20:31:37 2006 +0200
+++ b/MoinMoin/wikidicts.py	Sun Oct 22 20:48:23 2006 +0200
@@ -315,8 +315,8 @@
             self.__dict__.update(self.cfg.cache.DICTS_DATA)
         except AttributeError:
             try:
-                cache = caching.CacheEntry(request, arena, key, scope='wiki')
-                data = cache.content(use_pickle=True)
+                cache = caching.CacheEntry(request, arena, key, scope='wiki', use_pickle=True)
+                data = cache.content()
                 self.__dict__.update(data)
 
                 # invalidate the cache if the pickle version changed
@@ -401,8 +401,8 @@
             for name in self.groupdict:
                 self.dictdict[name].expandgroups(self)
 
-            cache = caching.CacheEntry(request, arena, key, scope='wiki')
-            cache.update(data, use_pickle=True)
+            cache = caching.CacheEntry(request, arena, key, scope='wiki', use_pickle=True)
+            cache.update(data)
 
         # remember it (persistent environments)
         self.cfg.cache.DICTS_DATA = data