comparison MoinMoin/support/werkzeug/contrib/cache.py @ 6094:9f12f41504fc

upgrade werkzeug from 0.8.3 to 0.11.11 no other changes, does not work like this, see next commit.
author Thomas Waldmann <tw AT waldmann-edv DOT de>
date Mon, 05 Sep 2016 23:25:59 +0200
parents fc1f97a47c7e
children 7f12cf241d5e
comparison
equal deleted inserted replaced
6089:dfbc455e2c46 6094:9f12f41504fc
51 51
52 Please keep in mind that you have to create the cache and put it somewhere 52 Please keep in mind that you have to create the cache and put it somewhere
53 you have access to it (either as a module global you can import or you just 53 you have access to it (either as a module global you can import or you just
54 put it into your WSGI application). 54 put it into your WSGI application).
55 55
56 :copyright: (c) 2011 by the Werkzeug Team, see AUTHORS for more details. 56 :copyright: (c) 2014 by the Werkzeug Team, see AUTHORS for more details.
57 :license: BSD, see LICENSE for more details. 57 :license: BSD, see LICENSE for more details.
58 """ 58 """
59 import os 59 import os
60 import re 60 import re
61 import errno
61 import tempfile 62 import tempfile
62 try: 63 from hashlib import md5
63 from hashlib import md5
64 except ImportError:
65 from md5 import new as md5
66 from itertools import izip
67 from time import time 64 from time import time
68 from werkzeug.posixemulation import rename
69
70 try: 65 try:
71 import cPickle as pickle 66 import cPickle as pickle
72 except ImportError: 67 except ImportError: # pragma: no cover
73 import pickle 68 import pickle
69
70 from werkzeug._compat import iteritems, string_types, text_type, \
71 integer_types, to_native
72 from werkzeug.posixemulation import rename
74 73
75 74
76 def _items(mappingorseq): 75 def _items(mappingorseq):
77 """Wrapper for efficient iteration over mappings represented by dicts 76 """Wrapper for efficient iteration over mappings represented by dicts
78 or sequences:: 77 or sequences::
82 81
83 >>> for k, v in _items(dict((i, i*i) for i in xrange(5))): 82 >>> for k, v in _items(dict((i, i*i) for i in xrange(5))):
84 ... assert k*k == v 83 ... assert k*k == v
85 84
86 """ 85 """
87 return mappingorseq.iteritems() if hasattr(mappingorseq, 'iteritems') \ 86 if hasattr(mappingorseq, 'items'):
88 else mappingorseq 87 return iteritems(mappingorseq)
88 return mappingorseq
89 89
90 90
91 class BaseCache(object): 91 class BaseCache(object):
92
92 """Baseclass for the cache systems. All the cache systems implement this 93 """Baseclass for the cache systems. All the cache systems implement this
93 API or a superset of it. 94 API or a superset of it.
94 95
95 :param default_timeout: the default timeout that is used if no timeout is 96 :param default_timeout: the default timeout (in seconds) that is used if no
96 specified on :meth:`set`. 97 timeout is specified on :meth:`set`. A timeout of 0
98 indicates that the cache never expires.
97 """ 99 """
98 100
99 def __init__(self, default_timeout=300): 101 def __init__(self, default_timeout=300):
100 self.default_timeout = default_timeout 102 self.default_timeout = default_timeout
101 103
102 def get(self, key): 104 def get(self, key):
103 """Looks up key in the cache and returns the value for it. 105 """Look up key in the cache and return the value for it.
104 If the key does not exist `None` is returned instead.
105 106
106 :param key: the key to be looked up. 107 :param key: the key to be looked up.
108 :returns: The value if it exists and is readable, else ``None``.
107 """ 109 """
108 return None 110 return None
109 111
110 def delete(self, key): 112 def delete(self, key):
111 """Deletes `key` from the cache. If it does not exist in the cache 113 """Delete `key` from the cache.
112 nothing happens.
113 114
114 :param key: the key to delete. 115 :param key: the key to delete.
115 """ 116 :returns: Whether the key existed and has been deleted.
116 pass 117 :rtype: boolean
118 """
119 return True
117 120
118 def get_many(self, *keys): 121 def get_many(self, *keys):
119 """Returns a list of values for the given keys. 122 """Returns a list of values for the given keys.
120 For each key a item in the list is created. Example:: 123 For each key a item in the list is created::
121 124
122 foo, bar = cache.get_many("foo", "bar") 125 foo, bar = cache.get_many("foo", "bar")
123 126
124 If a key can't be looked up `None` is returned for that key 127 Has the same error handling as :meth:`get`.
125 instead.
126 128
127 :param keys: The function accepts multiple keys as positional 129 :param keys: The function accepts multiple keys as positional
128 arguments. 130 arguments.
129 """ 131 """
130 return map(self.get, keys) 132 return map(self.get, keys)
131 133
132 def get_dict(self, *keys): 134 def get_dict(self, *keys):
133 """Works like :meth:`get_many` but returns a dict:: 135 """Like :meth:`get_many` but return a dict::
134 136
135 d = cache.get_dict("foo", "bar") 137 d = cache.get_dict("foo", "bar")
136 foo = d["foo"] 138 foo = d["foo"]
137 bar = d["bar"] 139 bar = d["bar"]
138 140
139 :param keys: The function accepts multiple keys as positional 141 :param keys: The function accepts multiple keys as positional
140 arguments. 142 arguments.
141 """ 143 """
142 return dict(izip(keys, self.get_many(*keys))) 144 return dict(zip(keys, self.get_many(*keys)))
143 145
144 def set(self, key, value, timeout=None): 146 def set(self, key, value, timeout=None):
145 """Adds a new key/value to the cache (overwrites value, if key already 147 """Add a new key/value to the cache (overwrites value, if key already
146 exists in the cache). 148 exists in the cache).
147 149
148 :param key: the key to set 150 :param key: the key to set
149 :param value: the value for the key 151 :param value: the value for the key
150 :param timeout: the cache timeout for the key (if not specified, 152 :param timeout: the cache timeout for the key (if not specified,
151 it uses the default timeout). 153 it uses the default timeout). A timeout of 0 idicates
152 """ 154 that the cache never expires.
153 pass 155 :returns: ``True`` if key has been updated, ``False`` for backend
156 errors. Pickling errors, however, will raise a subclass of
157 ``pickle.PickleError``.
158 :rtype: boolean
159 """
160 return True
154 161
155 def add(self, key, value, timeout=None): 162 def add(self, key, value, timeout=None):
156 """Works like :meth:`set` but does not overwrite the values of already 163 """Works like :meth:`set` but does not overwrite the values of already
157 existing keys. 164 existing keys.
158 165
159 :param key: the key to set 166 :param key: the key to set
160 :param value: the value for the key 167 :param value: the value for the key
161 :param timeout: the cache timeout for the key or the default 168 :param timeout: the cache timeout for the key or the default
162 timeout if not specified. 169 timeout if not specified. A timeout of 0 indicates
163 """ 170 that the cache never expires.
164 pass 171 :returns: Same as :meth:`set`, but also ``False`` for already
172 existing keys.
173 :rtype: boolean
174 """
175 return True
165 176
166 def set_many(self, mapping, timeout=None): 177 def set_many(self, mapping, timeout=None):
167 """Sets multiple keys and values from a mapping. 178 """Sets multiple keys and values from a mapping.
168 179
169 :param mapping: a mapping with the keys/values to set. 180 :param mapping: a mapping with the keys/values to set.
170 :param timeout: the cache timeout for the key (if not specified, 181 :param timeout: the cache timeout for the key (if not specified,
171 it uses the default timeout). 182 it uses the default timeout). A timeout of 0
172 """ 183 indicates tht the cache never expires.
184 :returns: Whether all given keys have been set.
185 :rtype: boolean
186 """
187 rv = True
173 for key, value in _items(mapping): 188 for key, value in _items(mapping):
174 self.set(key, value, timeout) 189 if not self.set(key, value, timeout):
190 rv = False
191 return rv
175 192
176 def delete_many(self, *keys): 193 def delete_many(self, *keys):
177 """Deletes multiple keys at once. 194 """Deletes multiple keys at once.
178 195
179 :param keys: The function accepts multiple keys as positional 196 :param keys: The function accepts multiple keys as positional
180 arguments. 197 arguments.
181 """ 198 :returns: Whether all given keys have been deleted.
182 for key in keys: 199 :rtype: boolean
183 self.delete(key) 200 """
201 return all(self.delete(key) for key in keys)
202
203 def has(self, key):
204 """Checks if a key exists in the cache without returning it. This is a
205 cheap operation that bypasses loading the actual data on the backend.
206
207 This method is optional and may not be implemented on all caches.
208
209 :param key: the key to check
210 """
211 raise NotImplementedError(
212 '%s doesn\'t have an efficient implementation of `has`. That '
213 'means it is impossible to check whether a key exists without '
214 'fully loading the key\'s data. Consider using `self.get` '
215 'explicitly if you don\'t care about performance.'
216 )
184 217
185 def clear(self): 218 def clear(self):
186 """Clears the cache. Keep in mind that not all caches support 219 """Clears the cache. Keep in mind that not all caches support
187 completely clearing the cache. 220 completely clearing the cache.
188 """ 221 :returns: Whether the cache has been cleared.
189 pass 222 :rtype: boolean
223 """
224 return True
190 225
191 def inc(self, key, delta=1): 226 def inc(self, key, delta=1):
192 """Increments the value of a key by `delta`. If the key does 227 """Increments the value of a key by `delta`. If the key does
193 not yet exist it is initialized with `delta`. 228 not yet exist it is initialized with `delta`.
194 229
195 For supporting caches this is an atomic operation. 230 For supporting caches this is an atomic operation.
196 231
197 :param key: the key to increment. 232 :param key: the key to increment.
198 :param delta: the delta to add. 233 :param delta: the delta to add.
199 """ 234 :returns: The new value or ``None`` for backend errors.
200 self.set(key, (self.get(key) or 0) + delta) 235 """
236 value = (self.get(key) or 0) + delta
237 return value if self.set(key, value) else None
201 238
202 def dec(self, key, delta=1): 239 def dec(self, key, delta=1):
203 """Decrements the value of a key by `delta`. If the key does 240 """Decrements the value of a key by `delta`. If the key does
204 not yet exist it is initialized with `-delta`. 241 not yet exist it is initialized with `-delta`.
205 242
206 For supporting caches this is an atomic operation. 243 For supporting caches this is an atomic operation.
207 244
208 :param key: the key to increment. 245 :param key: the key to increment.
209 :param delta: the delta to subtract. 246 :param delta: the delta to subtract.
210 """ 247 :returns: The new value or `None` for backend errors.
211 self.set(key, (self.get(key) or 0) - delta) 248 """
249 value = (self.get(key) or 0) - delta
250 return value if self.set(key, value) else None
212 251
213 252
214 class NullCache(BaseCache): 253 class NullCache(BaseCache):
254
215 """A cache that doesn't cache. This can be useful for unit testing. 255 """A cache that doesn't cache. This can be useful for unit testing.
216 256
217 :param default_timeout: a dummy parameter that is ignored but exists 257 :param default_timeout: a dummy parameter that is ignored but exists
218 for API compatibility with other caches. 258 for API compatibility with other caches.
219 """ 259 """
220 260
221 261
222 class SimpleCache(BaseCache): 262 class SimpleCache(BaseCache):
263
223 """Simple memory cache for single process environments. This class exists 264 """Simple memory cache for single process environments. This class exists
224 mainly for the development server and is not 100% thread safe. It tries 265 mainly for the development server and is not 100% thread safe. It tries
225 to use as many atomic operations as possible and no locks for simplicity 266 to use as many atomic operations as possible and no locks for simplicity
226 but it could happen under heavy load that keys are added multiple times. 267 but it could happen under heavy load that keys are added multiple times.
227 268
228 :param threshold: the maximum number of items the cache stores before 269 :param threshold: the maximum number of items the cache stores before
229 it starts deleting some. 270 it starts deleting some.
230 :param default_timeout: the default timeout that is used if no timeout is 271 :param default_timeout: the default timeout that is used if no timeout is
231 specified on :meth:`~BaseCache.set`. 272 specified on :meth:`~BaseCache.set`. A timeout of
273 0 indicates that the cache never expires.
232 """ 274 """
233 275
234 def __init__(self, threshold=500, default_timeout=300): 276 def __init__(self, threshold=500, default_timeout=300):
235 BaseCache.__init__(self, default_timeout) 277 BaseCache.__init__(self, default_timeout)
236 self._cache = {} 278 self._cache = {}
238 self._threshold = threshold 280 self._threshold = threshold
239 281
240 def _prune(self): 282 def _prune(self):
241 if len(self._cache) > self._threshold: 283 if len(self._cache) > self._threshold:
242 now = time() 284 now = time()
285 toremove = []
243 for idx, (key, (expires, _)) in enumerate(self._cache.items()): 286 for idx, (key, (expires, _)) in enumerate(self._cache.items()):
244 if expires <= now or idx % 3 == 0: 287 if (expires != 0 and expires <= now) or idx % 3 == 0:
245 self._cache.pop(key, None) 288 toremove.append(key)
246 289 for key in toremove:
247 def get(self, key): 290 self._cache.pop(key, None)
248 now = time() 291
249 expires, value = self._cache.get(key, (0, None)) 292 def _get_expiration(self, timeout):
250 if expires > time():
251 return pickle.loads(value)
252
253 def set(self, key, value, timeout=None):
254 if timeout is None: 293 if timeout is None:
255 timeout = self.default_timeout 294 timeout = self.default_timeout
295 if timeout > 0:
296 timeout = time() + timeout
297 return timeout
298
299 def get(self, key):
300 try:
301 expires, value = self._cache[key]
302 if expires == 0 or expires > time():
303 return pickle.loads(value)
304 except (KeyError, pickle.PickleError):
305 return None
306
307 def set(self, key, value, timeout=None):
308 expires = self._get_expiration(timeout)
256 self._prune() 309 self._prune()
257 self._cache[key] = (time() + timeout, pickle.dumps(value, 310 self._cache[key] = (expires, pickle.dumps(value,
258 pickle.HIGHEST_PROTOCOL)) 311 pickle.HIGHEST_PROTOCOL))
312 return True
259 313
260 def add(self, key, value, timeout=None): 314 def add(self, key, value, timeout=None):
261 if timeout is None: 315 expires = self._get_expiration(timeout)
262 timeout = self.default_timeout 316 self._prune()
263 if len(self._cache) > self._threshold: 317 item = (expires, pickle.dumps(value,
264 self._prune() 318 pickle.HIGHEST_PROTOCOL))
265 item = (time() + timeout, pickle.dumps(value, 319 if key in self._cache:
266 pickle.HIGHEST_PROTOCOL)) 320 return False
267 self._cache.setdefault(key, item) 321 self._cache.setdefault(key, item)
322 return True
268 323
269 def delete(self, key): 324 def delete(self, key):
270 self._cache.pop(key, None) 325 return self._cache.pop(key, None) is not None
271 326
327 def has(self, key):
328 try:
329 expires, value = self._cache[key]
330 return expires == 0 or expires > time()
331 except KeyError:
332 return False
272 333
273 _test_memcached_key = re.compile(r'[^\x00-\x21\xff]{1,250}$').match 334 _test_memcached_key = re.compile(r'[^\x00-\x21\xff]{1,250}$').match
274 335
336
275 class MemcachedCache(BaseCache): 337 class MemcachedCache(BaseCache):
338
276 """A cache that uses memcached as backend. 339 """A cache that uses memcached as backend.
277 340
278 The first argument can either be an object that resembles the API of a 341 The first argument can either be an object that resembles the API of a
279 :class:`memcache.Client` or a tuple/list of server addresses. In the 342 :class:`memcache.Client` or a tuple/list of server addresses. In the
280 event that a tuple/list is passed, Werkzeug tries to import the best 343 event that a tuple/list is passed, Werkzeug tries to import the best
281 available memcache library. 344 available memcache library.
345
346 This cache looks into the following packages/modules to find bindings for
347 memcached:
348
349 - ``pylibmc``
350 - ``google.appengine.api.memcached``
351 - ``memcached``
282 352
283 Implementation notes: This cache backend works around some limitations in 353 Implementation notes: This cache backend works around some limitations in
284 memcached to simplify the interface. For example unicode keys are encoded 354 memcached to simplify the interface. For example unicode keys are encoded
285 to utf-8 on the fly. Methods such as :meth:`~BaseCache.get_dict` return 355 to utf-8 on the fly. Methods such as :meth:`~BaseCache.get_dict` return
286 the keys in the same format as passed. Furthermore all get methods 356 the keys in the same format as passed. Furthermore all get methods
288 is passed to the get methods which is often the case in web applications. 358 is passed to the get methods which is often the case in web applications.
289 359
290 :param servers: a list or tuple of server addresses or alternatively 360 :param servers: a list or tuple of server addresses or alternatively
291 a :class:`memcache.Client` or a compatible client. 361 a :class:`memcache.Client` or a compatible client.
292 :param default_timeout: the default timeout that is used if no timeout is 362 :param default_timeout: the default timeout that is used if no timeout is
293 specified on :meth:`~BaseCache.set`. 363 specified on :meth:`~BaseCache.set`. A timeout of
364 0 indicates taht the cache never expires.
294 :param key_prefix: a prefix that is added before all keys. This makes it 365 :param key_prefix: a prefix that is added before all keys. This makes it
295 possible to use the same memcached server for different 366 possible to use the same memcached server for different
296 applications. Keep in mind that 367 applications. Keep in mind that
297 :meth:`~BaseCache.clear` will also clear keys with a 368 :meth:`~BaseCache.clear` will also clear keys with a
298 different prefix. 369 different prefix.
309 else: 380 else:
310 # NOTE: servers is actually an already initialized memcache 381 # NOTE: servers is actually an already initialized memcache
311 # client. 382 # client.
312 self._client = servers 383 self._client = servers
313 384
314 self.key_prefix = key_prefix 385 self.key_prefix = to_native(key_prefix)
315 386
316 def get(self, key): 387 def _normalize_key(self, key):
317 if isinstance(key, unicode): 388 key = to_native(key, 'utf-8')
318 key = key.encode('utf-8')
319 if self.key_prefix: 389 if self.key_prefix:
320 key = self.key_prefix + key 390 key = self.key_prefix + key
391 return key
392
393 def _normalize_timeout(self, timeout):
394 if timeout is None:
395 timeout = self.default_timeout
396 if timeout > 0:
397 timeout = int(time()) + timeout
398 return timeout
399
400 def get(self, key):
401 key = self._normalize_key(key)
321 # memcached doesn't support keys longer than that. Because often 402 # memcached doesn't support keys longer than that. Because often
322 # checks for so long keys can occour because it's tested from user 403 # checks for so long keys can occur because it's tested from user
323 # submitted data etc we fail silently for getting. 404 # submitted data etc we fail silently for getting.
324 if _test_memcached_key(key): 405 if _test_memcached_key(key):
325 return self._client.get(key) 406 return self._client.get(key)
326 407
327 def get_dict(self, *keys): 408 def get_dict(self, *keys):
328 key_mapping = {} 409 key_mapping = {}
329 have_encoded_keys = False 410 have_encoded_keys = False
330 for key in keys: 411 for key in keys:
331 if isinstance(key, unicode): 412 encoded_key = self._normalize_key(key)
332 encoded_key = key.encode('utf-8') 413 if not isinstance(key, str):
333 have_encoded_keys = True 414 have_encoded_keys = True
334 else:
335 encoded_key = key
336 if self.key_prefix:
337 encoded_key = self.key_prefix + encoded_key
338 if _test_memcached_key(key): 415 if _test_memcached_key(key):
339 key_mapping[encoded_key] = key 416 key_mapping[encoded_key] = key
340 d = rv = self._client.get_multi(key_mapping.keys()) 417 d = rv = self._client.get_multi(key_mapping.keys())
341 if have_encoded_keys or self.key_prefix: 418 if have_encoded_keys or self.key_prefix:
342 rv = {} 419 rv = {}
343 for key, value in d.iteritems(): 420 for key, value in iteritems(d):
344 rv[key_mapping[key]] = value 421 rv[key_mapping[key]] = value
345 if len(rv) < len(keys): 422 if len(rv) < len(keys):
346 for key in keys: 423 for key in keys:
347 if key not in rv: 424 if key not in rv:
348 rv[key] = None 425 rv[key] = None
349 return rv 426 return rv
350 427
351 def add(self, key, value, timeout=None): 428 def add(self, key, value, timeout=None):
352 if timeout is None: 429 key = self._normalize_key(key)
353 timeout = self.default_timeout 430 timeout = self._normalize_timeout(timeout)
354 if isinstance(key, unicode): 431 return self._client.add(key, value, timeout)
355 key = key.encode('utf-8')
356 if self.key_prefix:
357 key = self.key_prefix + key
358 self._client.add(key, value, timeout)
359 432
360 def set(self, key, value, timeout=None): 433 def set(self, key, value, timeout=None):
361 if timeout is None: 434 key = self._normalize_key(key)
362 timeout = self.default_timeout 435 timeout = self._normalize_timeout(timeout)
363 if isinstance(key, unicode): 436 return self._client.set(key, value, timeout)
364 key = key.encode('utf-8')
365 if self.key_prefix:
366 key = self.key_prefix + key
367 self._client.set(key, value, timeout)
368 437
369 def get_many(self, *keys): 438 def get_many(self, *keys):
370 d = self.get_dict(*keys) 439 d = self.get_dict(*keys)
371 return [d[key] for key in keys] 440 return [d[key] for key in keys]
372 441
373 def set_many(self, mapping, timeout=None): 442 def set_many(self, mapping, timeout=None):
374 if timeout is None:
375 timeout = self.default_timeout
376 new_mapping = {} 443 new_mapping = {}
377 for key, value in _items(mapping): 444 for key, value in _items(mapping):
378 if isinstance(key, unicode): 445 key = self._normalize_key(key)
379 key = key.encode('utf-8')
380 if self.key_prefix:
381 key = self.key_prefix + key
382 new_mapping[key] = value 446 new_mapping[key] = value
383 self._client.set_multi(new_mapping, timeout) 447
448 timeout = self._normalize_timeout(timeout)
449 failed_keys = self._client.set_multi(new_mapping, timeout)
450 return not failed_keys
384 451
385 def delete(self, key): 452 def delete(self, key):
386 if isinstance(key, unicode): 453 key = self._normalize_key(key)
387 key = key.encode('utf-8')
388 if self.key_prefix:
389 key = self.key_prefix + key
390 if _test_memcached_key(key): 454 if _test_memcached_key(key):
391 self._client.delete(key) 455 return self._client.delete(key)
392 456
393 def delete_many(self, *keys): 457 def delete_many(self, *keys):
394 new_keys = [] 458 new_keys = []
395 for key in keys: 459 for key in keys:
396 if isinstance(key, unicode): 460 key = self._normalize_key(key)
397 key = key.encode('utf-8')
398 if self.key_prefix:
399 key = self.key_prefix + key
400 if _test_memcached_key(key): 461 if _test_memcached_key(key):
401 new_keys.append(key) 462 new_keys.append(key)
402 self._client.delete_multi(new_keys) 463 return self._client.delete_multi(new_keys)
464
465 def has(self, key):
466 key = self._normalize_key(key)
467 if _test_memcached_key(key):
468 return self._client.append(key, '')
469 return False
403 470
404 def clear(self): 471 def clear(self):
405 self._client.flush_all() 472 return self._client.flush_all()
406 473
407 def inc(self, key, delta=1): 474 def inc(self, key, delta=1):
408 if isinstance(key, unicode): 475 key = self._normalize_key(key)
409 key = key.encode('utf-8') 476 return self._client.incr(key, delta)
410 if self.key_prefix:
411 key = self.key_prefix + key
412 self._client.incr(key, delta)
413 477
414 def dec(self, key, delta=1): 478 def dec(self, key, delta=1):
415 if isinstance(key, unicode): 479 key = self._normalize_key(key)
416 key = key.encode('utf-8') 480 return self._client.decr(key, delta)
417 if self.key_prefix:
418 key = self.key_prefix + key
419 self._client.decr(key, delta)
420 481
421 def import_preferred_memcache_lib(self, servers): 482 def import_preferred_memcache_lib(self, servers):
422 """Returns an initialized memcache client. Used by the constructor.""" 483 """Returns an initialized memcache client. Used by the constructor."""
423 try: 484 try:
424 import pylibmc 485 import pylibmc
445 # backwards compatibility 506 # backwards compatibility
446 GAEMemcachedCache = MemcachedCache 507 GAEMemcachedCache = MemcachedCache
447 508
448 509
449 class RedisCache(BaseCache): 510 class RedisCache(BaseCache):
511
450 """Uses the Redis key-value store as a cache backend. 512 """Uses the Redis key-value store as a cache backend.
451 513
452 The first argument can be either a string denoting address of the Redis 514 The first argument can be either a string denoting address of the Redis
453 server or an object resembling an instance of a redis.Redis class. 515 server or an object resembling an instance of a redis.Redis class.
454 516
461 `key_prefix` was added. 523 `key_prefix` was added.
462 524
463 .. versionchanged:: 0.8 525 .. versionchanged:: 0.8
464 This cache backend now properly serializes objects. 526 This cache backend now properly serializes objects.
465 527
528 .. versionchanged:: 0.8.3
529 This cache backend now supports password authentication.
530
531 .. versionchanged:: 0.10
532 ``**kwargs`` is now passed to the redis object.
533
466 :param host: address of the Redis server or an object which API is 534 :param host: address of the Redis server or an object which API is
467 compatible with the official Python Redis client (redis-py). 535 compatible with the official Python Redis client (redis-py).
468 :param port: port number on which Redis server listens for connections 536 :param port: port number on which Redis server listens for connections.
537 :param password: password authentication for the Redis server.
538 :param db: db (zero-based numeric index) on Redis Server to connect.
469 :param default_timeout: the default timeout that is used if no timeout is 539 :param default_timeout: the default timeout that is used if no timeout is
470 specified on :meth:`~BaseCache.set`. 540 specified on :meth:`~BaseCache.set`. A timeout of
541 0 indicates that the cache never expires.
471 :param key_prefix: A prefix that should be added to all keys. 542 :param key_prefix: A prefix that should be added to all keys.
543
544 Any additional keyword arguments will be passed to ``redis.Redis``.
472 """ 545 """
473 546
474 def __init__(self, host='localhost', port=6379, password=None, 547 def __init__(self, host='localhost', port=6379, password=None,
475 default_timeout=300, key_prefix=None): 548 db=0, default_timeout=300, key_prefix=None, **kwargs):
476 BaseCache.__init__(self, default_timeout) 549 BaseCache.__init__(self, default_timeout)
477 if isinstance(host, basestring): 550 if isinstance(host, string_types):
478 try: 551 try:
479 import redis 552 import redis
480 except ImportError: 553 except ImportError:
481 raise RuntimeError('no redis module found') 554 raise RuntimeError('no redis module found')
482 self._client = redis.Redis(host=host, port=port, password=password) 555 if kwargs.get('decode_responses', None):
556 raise ValueError('decode_responses is not supported by '
557 'RedisCache.')
558 self._client = redis.Redis(host=host, port=port, password=password,
559 db=db, **kwargs)
483 else: 560 else:
484 self._client = host 561 self._client = host
485 self.key_prefix = key_prefix or '' 562 self.key_prefix = key_prefix or ''
563
564 def _get_expiration(self, timeout):
565 if timeout is None:
566 timeout = self.default_timeout
567 if timeout == 0:
568 timeout = -1
569 return timeout
486 570
487 def dump_object(self, value): 571 def dump_object(self, value):
488 """Dumps an object into a string for redis. By default it serializes 572 """Dumps an object into a string for redis. By default it serializes
489 integers as regular string and pickle dumps everything else. 573 integers as regular string and pickle dumps everything else.
490 """ 574 """
491 t = type(value) 575 t = type(value)
492 if t is int or t is long: 576 if t in integer_types:
493 return str(value) 577 return str(value).encode('ascii')
494 return '!' + pickle.dumps(value) 578 return b'!' + pickle.dumps(value)
495 579
496 def load_object(self, value): 580 def load_object(self, value):
497 """The reversal of :meth:`dump_object`. This might be callde with 581 """The reversal of :meth:`dump_object`. This might be called with
498 None. 582 None.
499 """ 583 """
500 if value is None: 584 if value is None:
501 return None 585 return None
502 if value.startswith('!'): 586 if value.startswith(b'!'):
503 return pickle.loads(value[1:]) 587 try:
588 return pickle.loads(value[1:])
589 except pickle.PickleError:
590 return None
504 try: 591 try:
505 return int(value) 592 return int(value)
506 except ValueError: 593 except ValueError:
507 # before 0.8 we did not have serialization. Still support that. 594 # before 0.8 we did not have serialization. Still support that.
508 return value 595 return value
514 if self.key_prefix: 601 if self.key_prefix:
515 keys = [self.key_prefix + key for key in keys] 602 keys = [self.key_prefix + key for key in keys]
516 return [self.load_object(x) for x in self._client.mget(keys)] 603 return [self.load_object(x) for x in self._client.mget(keys)]
517 604
518 def set(self, key, value, timeout=None): 605 def set(self, key, value, timeout=None):
519 if timeout is None: 606 timeout = self._get_expiration(timeout)
520 timeout = self.default_timeout
521 dump = self.dump_object(value) 607 dump = self.dump_object(value)
522 self._client.setex(self.key_prefix + key, dump, timeout) 608 if timeout == -1:
609 result = self._client.set(name=self.key_prefix + key,
610 value=dump)
611 else:
612 result = self._client.setex(name=self.key_prefix + key,
613 value=dump, time=timeout)
614 return result
523 615
524 def add(self, key, value, timeout=None): 616 def add(self, key, value, timeout=None):
525 if timeout is None: 617 timeout = self._get_expiration(timeout)
526 timeout = self.default_timeout
527 dump = self.dump_object(value) 618 dump = self.dump_object(value)
528 added = self._client.setnx(self.key_prefix + key, dump) 619 return (
529 if added: 620 self._client.setnx(name=self.key_prefix + key, value=dump) and
530 self._client.expire(self.key_prefix + key, timeout) 621 self._client.expire(name=self.key_prefix + key, time=timeout)
622 )
531 623
532 def set_many(self, mapping, timeout=None): 624 def set_many(self, mapping, timeout=None):
533 if timeout is None: 625 timeout = self._get_expiration(timeout)
534 timeout = self.default_timeout 626 # Use transaction=False to batch without calling redis MULTI
535 pipe = self._client.pipeline() 627 # which is not supported by twemproxy
628 pipe = self._client.pipeline(transaction=False)
629
536 for key, value in _items(mapping): 630 for key, value in _items(mapping):
537 dump = self.dump_object(value) 631 dump = self.dump_object(value)
538 pipe.setex(self.key_prefix + key, dump, timeout) 632 if timeout == -1:
539 pipe.execute() 633 pipe.set(name=self.key_prefix + key, value=dump)
634 else:
635 pipe.setex(name=self.key_prefix + key, value=dump,
636 time=timeout)
637 return pipe.execute()
540 638
541 def delete(self, key): 639 def delete(self, key):
542 self._client.delete(self.key_prefix + key) 640 return self._client.delete(self.key_prefix + key)
543 641
544 def delete_many(self, *keys): 642 def delete_many(self, *keys):
545 if not keys: 643 if not keys:
546 return 644 return
547 if self.key_prefix: 645 if self.key_prefix:
548 keys = [self.key_prefix + key for key in keys] 646 keys = [self.key_prefix + key for key in keys]
549 self._client.delete(*keys) 647 return self._client.delete(*keys)
648
649 def has(self, key):
650 return self._client.exists(self.key_prefix + key)
550 651
551 def clear(self): 652 def clear(self):
653 status = False
552 if self.key_prefix: 654 if self.key_prefix:
553 keys = self._client.keys(self.key_prefix + '*') 655 keys = self._client.keys(self.key_prefix + '*')
554 if keys: 656 if keys:
555 self._client.delete(*keys) 657 status = self._client.delete(*keys)
556 else: 658 else:
557 self._client.flushdb() 659 status = self._client.flushdb()
660 return status
558 661
559 def inc(self, key, delta=1): 662 def inc(self, key, delta=1):
560 return self._client.incr(self.key_prefix + key, delta) 663 return self._client.incr(name=self.key_prefix + key, amount=delta)
561 664
562 def dec(self, key, delta=1): 665 def dec(self, key, delta=1):
563 return self._client.decr(self.key_prefix + key, delta) 666 return self._client.decr(name=self.key_prefix + key, amount=delta)
564 667
565 668
566 class FileSystemCache(BaseCache): 669 class FileSystemCache(BaseCache):
670
567 """A cache that stores the items on the file system. This cache depends 671 """A cache that stores the items on the file system. This cache depends
568 on being the only user of the `cache_dir`. Make absolutely sure that 672 on being the only user of the `cache_dir`. Make absolutely sure that
569 nobody but this cache stores files there or otherwise the cache will 673 nobody but this cache stores files there or otherwise the cache will
570 randomly delete files therein. 674 randomly delete files therein.
571 675
572 :param cache_dir: the directory where cache files are stored. 676 :param cache_dir: the directory where cache files are stored.
573 :param threshold: the maximum number of items the cache stores before 677 :param threshold: the maximum number of items the cache stores before
574 it starts deleting some. 678 it starts deleting some.
575 :param default_timeout: the default timeout that is used if no timeout is 679 :param default_timeout: the default timeout that is used if no timeout is
576 specified on :meth:`~BaseCache.set`. 680 specified on :meth:`~BaseCache.set`. A timeout of
681 0 indicates that the cache never expires.
577 :param mode: the file mode wanted for the cache files, default 0600 682 :param mode: the file mode wanted for the cache files, default 0600
578 """ 683 """
579 684
580 #: used for temporary files by the FileSystemCache 685 #: used for temporary files by the FileSystemCache
581 _fs_transaction_suffix = '.__wz_cache' 686 _fs_transaction_suffix = '.__wz_cache'
582 687
583 def __init__(self, cache_dir, threshold=500, default_timeout=300, mode=0600): 688 def __init__(self, cache_dir, threshold=500, default_timeout=300,
689 mode=0o600):
584 BaseCache.__init__(self, default_timeout) 690 BaseCache.__init__(self, default_timeout)
585 self._path = cache_dir 691 self._path = cache_dir
586 self._threshold = threshold 692 self._threshold = threshold
587 self._mode = mode 693 self._mode = mode
588 if not os.path.exists(self._path): 694
695 try:
589 os.makedirs(self._path) 696 os.makedirs(self._path)
697 except OSError as ex:
698 if ex.errno != errno.EEXIST:
699 raise
590 700
591 def _list_dir(self): 701 def _list_dir(self):
592 """return a list of (fully qualified) cache filenames 702 """return a list of (fully qualified) cache filenames
593 """ 703 """
594 return [os.path.join(self._path, fn) for fn in os.listdir(self._path) 704 return [os.path.join(self._path, fn) for fn in os.listdir(self._path)
596 706
597 def _prune(self): 707 def _prune(self):
598 entries = self._list_dir() 708 entries = self._list_dir()
599 if len(entries) > self._threshold: 709 if len(entries) > self._threshold:
600 now = time() 710 now = time()
601 for idx, fname in enumerate(entries): 711 try:
602 remove = False 712 for idx, fname in enumerate(entries):
603 f = None 713 remove = False
604 try: 714 with open(fname, 'rb') as f:
605 try:
606 f = open(fname, 'rb')
607 expires = pickle.load(f) 715 expires = pickle.load(f)
608 remove = expires <= now or idx % 3 == 0 716 remove = (expires != 0 and expires <= now) or idx % 3 == 0
609 finally: 717
610 if f is not None: 718 if remove:
611 f.close()
612 except Exception:
613 pass
614 if remove:
615 try:
616 os.remove(fname) 719 os.remove(fname)
617 except (IOError, OSError): 720 except (IOError, OSError):
618 pass 721 pass
619 722
620 def clear(self): 723 def clear(self):
621 for fname in self._list_dir(): 724 for fname in self._list_dir():
622 try: 725 try:
623 os.remove(fname) 726 os.remove(fname)
624 except (IOError, OSError): 727 except (IOError, OSError):
625 pass 728 return False
729 return True
626 730
627 def _get_filename(self, key): 731 def _get_filename(self, key):
732 if isinstance(key, text_type):
733 key = key.encode('utf-8') # XXX unicode review
628 hash = md5(key).hexdigest() 734 hash = md5(key).hexdigest()
629 return os.path.join(self._path, hash) 735 return os.path.join(self._path, hash)
630 736
631 def get(self, key): 737 def get(self, key):
632 filename = self._get_filename(key) 738 filename = self._get_filename(key)
633 try: 739 try:
634 f = open(filename, 'rb') 740 with open(filename, 'rb') as f:
635 try: 741 pickle_time = pickle.load(f)
636 if pickle.load(f) >= time(): 742 if pickle_time == 0 or pickle_time >= time():
637 return pickle.load(f) 743 return pickle.load(f)
638 finally: 744 else:
639 f.close() 745 os.remove(filename)
640 os.remove(filename) 746 return None
641 except Exception: 747 except (IOError, OSError, pickle.PickleError):
642 return None 748 return None
643 749
644 def add(self, key, value, timeout=None): 750 def add(self, key, value, timeout=None):
645 filename = self._get_filename(key) 751 filename = self._get_filename(key)
646 if not os.path.exists(filename): 752 if not os.path.exists(filename):
647 self.set(key, value, timeout) 753 return self.set(key, value, timeout)
754 return False
648 755
649 def set(self, key, value, timeout=None): 756 def set(self, key, value, timeout=None):
650 if timeout is None: 757 if timeout is None:
651 timeout = self.default_timeout 758 timeout = int(time() + self.default_timeout)
759 elif timeout != 0:
760 timeout = int(time() + timeout)
652 filename = self._get_filename(key) 761 filename = self._get_filename(key)
653 self._prune() 762 self._prune()
654 try: 763 try:
655 fd, tmp = tempfile.mkstemp(suffix=self._fs_transaction_suffix, 764 fd, tmp = tempfile.mkstemp(suffix=self._fs_transaction_suffix,
656 dir=self._path) 765 dir=self._path)
657 f = os.fdopen(fd, 'wb') 766 with os.fdopen(fd, 'wb') as f:
658 try: 767 pickle.dump(timeout, f, 1)
659 pickle.dump(int(time() + timeout), f, 1)
660 pickle.dump(value, f, pickle.HIGHEST_PROTOCOL) 768 pickle.dump(value, f, pickle.HIGHEST_PROTOCOL)
661 finally:
662 f.close()
663 rename(tmp, filename) 769 rename(tmp, filename)
664 os.chmod(filename, self._mode) 770 os.chmod(filename, self._mode)
665 except (IOError, OSError): 771 except (IOError, OSError):
666 pass 772 return False
773 else:
774 return True
667 775
668 def delete(self, key): 776 def delete(self, key):
669 try: 777 try:
670 os.remove(self._get_filename(key)) 778 os.remove(self._get_filename(key))
671 except (IOError, OSError): 779 except (IOError, OSError):
672 pass 780 return False
781 else:
782 return True
783
784 def has(self, key):
785 filename = self._get_filename(key)
786 try:
787 with open(filename, 'rb') as f:
788 pickle_time = pickle.load(f)
789 if pickle_time == 0 or pickle_time >= time():
790 return True
791 else:
792 os.remove(filename)
793 return False
794 except (IOError, OSError, pickle.PickleError):
795 return False