1
2
3
4 """
5 This file is part of the web2py Web Framework
6 Copyrighted by Massimo Di Pierro <mdipierro@cs.depaul.edu>
7 License: LGPLv3 (http://www.gnu.org/licenses/lgpl.html)
8
9 Basic caching classes and methods
10 =================================
11
12 - Cache - The generic caching object interfacing with the others
13 - CacheInRam - providing caching in ram
14 - CacheInDisk - provides caches on disk
15
16 Memcache is also available via a different module (see gluon.contrib.memcache)
17
18 When web2py is running on Google App Engine,
19 caching will be provided by the GAE memcache
20 (see gluon.contrib.gae_memcache)
21 """
22
23 import time
24 import portalocker
25 import shelve
26 import thread
27 import os
28 import logging
29 import re
30
31 logger = logging.getLogger("web2py.cache")
32
33 __all__ = ['Cache']
34
35
36 DEFAULT_TIME_EXPIRE = 300
37
38
40 """
41 Abstract class for cache implementations.
42 Main function is now to provide referenced api documentation.
43
44 Use CacheInRam or CacheOnDisk instead which are derived from this class.
45 """
46
47 cache_stats_name = 'web2py_cache_statistics'
48
50 """
51 Paremeters
52 ----------
53 request:
54 the global request object
55 """
56 raise NotImplementedError
57
60 """
61 Tries retrieve the value corresponding to `key` from the cache of the
62 object exists and if it did not expire, else it called the function `f`
63 and stores the output in the cache corresponding to `key`. In the case
64 the output of the function is returned.
65
66 :param key: the key of the object to be store or retrieved
67 :param f: the function, whose output is to be cached
68 :param time_expire: expiration of the cache in microseconds
69
70 - `time_expire` is used to compare the current time with the time when
71 the requested object was last saved in cache. It does not affect
72 future requests.
73 - Setting `time_expire` to 0 or negative value forces the cache to
74 refresh.
75
76 If the function `f` is `None` the cache is cleared.
77 """
78 raise NotImplementedError
79
80 - def clear(self, regex=None):
81 """
82 Clears the cache of all keys that match the provided regular expression.
83 If no regular expression is provided, it clears all entries in cache.
84
85 Parameters
86 ----------
87 regex:
88 if provided, only keys matching the regex will be cleared.
89 Otherwise all keys are cleared.
90 """
91
92 raise NotImplementedError
93
95 """
96 Increments the cached value for the given key by the amount in value
97
98 Parameters
99 ----------
100 key:
101 key for the cached object to be incremeneted
102 value:
103 amount of the increment (defaults to 1, can be negative)
104 """
105 raise NotImplementedError
106
107 - def _clear(self, storage, regex):
108 """
109 Auxiliary function called by `clear` to search and clear cache entries
110 """
111 r = re.compile(regex)
112 for (key, value) in storage.items():
113 if r.match(str(key)):
114 del storage[key]
115
117 """
118 Ram based caching
119
120 This is implemented as global (per process, shared by all threads)
121 dictionary.
122 A mutex-lock mechanism avoid conflicts.
123 """
124
125 locker = thread.allocate_lock()
126 meta_storage = {}
127
143
144 - def clear(self, regex=None):
159
162 """
163 Attention! cache.ram does not copy the cached object. It just stores a reference to it.
164 Turns out the deepcopying the object has some problems:
165 1) would break backward compatibility
166 2) would be limiting because people may want to cache live objects
167 3) would work unless we deepcopy no storage and retrival which would make things slow.
168 Anyway. You can deepcopy explicitly in the function generating the value to be cached.
169 """
170
171 dt = time_expire
172
173 self.locker.acquire()
174 item = self.storage.get(key, None)
175 if item and f == None:
176 del self.storage[key]
177 self.storage[CacheAbstract.cache_stats_name]['hit_total'] += 1
178 self.locker.release()
179
180 if f is None:
181 return None
182 if item and (dt == None or item[0] > time.time() - dt):
183 return item[1]
184 value = f()
185
186 self.locker.acquire()
187 self.storage[key] = (time.time(), value)
188 self.storage[CacheAbstract.cache_stats_name]['misses'] += 1
189 self.locker.release()
190 return value
191
203
204
206 """
207 Disk based cache
208
209 This is implemented as a shelve object and it is shared by multiple web2py
210 processes (and threads) as long as they share the same filesystem.
211 The file is locked wen accessed.
212
213 Disk cache provides persistance when web2py is started/stopped but it slower
214 than `CacheInRam`
215
216 Values stored in disk cache must be pickable.
217 """
218
219 speedup_checks = set()
220
221 - def __init__(self, request, folder=None):
222 self.request = request
223
224
225
226 folder = folder or os.path.join(request.folder, 'cache')
227
228 if not os.path.exists(folder):
229 os.mkdir(folder)
230
231
232
233 self.locker_name = os.path.join(folder,'cache.lock')
234 self.shelve_name = os.path.join(folder,'cache.shelve')
235
236 locker, locker_locked = None, False
237 speedup_key = (folder,CacheAbstract.cache_stats_name)
238 if not speedup_key in self.speedup_checks or \
239 not os.path.exists(self.shelve_name):
240 try:
241 locker = open(self.locker_name, 'a')
242 portalocker.lock(locker, portalocker.LOCK_EX)
243 locker_locked = True
244 storage = shelve.open(self.shelve_name)
245 try:
246 if not storage.has_key(CacheAbstract.cache_stats_name):
247 storage[CacheAbstract.cache_stats_name] = {
248 'hit_total': 0,
249 'misses': 0,
250 }
251 storage.sync()
252 finally:
253 storage.close()
254 self.speedup_checks.add(speedup_key)
255 except ImportError:
256 pass
257 except:
258 logger.error('corrupted file %s, deleting it!' \
259 % self.shelve_name)
260 os.unlink(self.shelve_name)
261 if locker_locked:
262 portalocker.unlock(locker)
263 if locker:
264 locker.close()
265
266 - def clear(self, regex=None):
285
330
345
346
348 """
349 Sets up generic caching, creating an instance of both CacheInRam and
350 CacheOnDisk.
351 In case of GAE will make use of gluon.contrib.gae_memcache.
352
353 - self.ram is an instance of CacheInRam
354 - self.disk is an instance of CacheOnDisk
355 """
356
358 """
359 Parameters
360 ----------
361 request:
362 the global request object
363 """
364
365 import settings
366 if settings.global_settings.web2py_runtime_gae:
367 from contrib.gae_memcache import MemcacheClient
368 self.ram=self.disk=MemcacheClient(request)
369 else:
370
371 self.ram = CacheInRam(request)
372 try:
373 self.disk = CacheOnDisk(request)
374 except IOError:
375 logger.warning('no cache.disk (IOError)')
376 except AttributeError:
377
378
379 logger.warning('no cache.disk (AttributeError)')
380
385 """
386 Decorator function that can be used to cache any function/method.
387
388 Example::
389
390 @cache('key', 5000, cache.ram)
391 def f():
392 return time.ctime()
393
394 When the function f is called, web2py tries to retrieve
395 the value corresponding to `key` from the cache of the
396 object exists and if it did not expire, else it calles the function `f`
397 and stores the output in the cache corresponding to `key`. In the case
398 the output of the function is returned.
399
400 :param key: the key of the object to be store or retrieved
401 :param time_expire: expiration of the cache in microseconds
402 :param cache_model: `cache.ram`, `cache.disk`, or other
403 (like `cache.memcache` if defined). It defaults to `cache.ram`.
404
405 Notes
406 -----
407 `time_expire` is used to compare the curret time with the time when the
408 requested object was last saved in cache. It does not affect future
409 requests.
410 Setting `time_expire` to 0 or negative value forces the cache to
411 refresh.
412
413 If the function `f` is an action, we suggest using
414 `request.env.path_info` as key.
415 """
416 if not cache_model:
417 cache_model = self.ram
418
419 def tmp(func):
420 def action():
421 return cache_model(key, func, time_expire)
422 action.__name___ = func.__name__
423 action.__doc__ = func.__doc__
424 return action
425
426 return tmp
427