2 # Copyright 2009-2010 Joshua Roesslein
3 # See LICENSE for details.
5 from __future__ import print_function
13 import cPickle as pickle
26 # Probably on a windows system
34 def __init__(self, timeout=60):
35 """Initialize the cache
36 timeout: number of seconds to keep a cached entry
38 self.timeout = timeout
40 def store(self, key, value):
41 """Add new record to cache
45 raise NotImplementedError
47 def get(self, key, timeout=None):
48 """Get cached entry if exists and not expired
49 key: which entry to get
50 timeout: override timeout with this value [optional]
52 raise NotImplementedError
55 """Get count of entries currently stored in cache"""
56 raise NotImplementedError
59 """Delete any expired entries in cache."""
60 raise NotImplementedError
63 """Delete all cached entries"""
64 raise NotImplementedError
67 class MemoryCache(Cache):
70 def __init__(self, timeout=60):
71 Cache.__init__(self, timeout)
73 self.lock = threading.Lock()
75 def __getstate__(self):
77 return {'entries': self._entries, 'timeout': self.timeout}
79 def __setstate__(self, state):
81 self.lock = threading.Lock()
82 self._entries = state['entries']
83 self.timeout = state['timeout']
85 def _is_expired(self, entry, timeout):
86 return timeout > 0 and (time.time() - entry[0]) >= timeout
88 def store(self, key, value):
90 self._entries[key] = (time.time(), value)
93 def get(self, key, timeout=None):
96 # check to see if we have this key
97 entry = self._entries.get(key)
99 # no hit, return nothing
102 # use provided timeout in arguments if provided
103 # otherwise use the one provided during init.
105 timeout = self.timeout
107 # make sure entry is not expired
108 if self._is_expired(entry, timeout):
109 # entry expired, delete and return nothing
110 del self._entries[key]
113 # entry found and not expired, return it
119 return len(self._entries)
124 for k, v in dict(self._entries).items():
125 if self._is_expired(v, self.timeout):
132 self._entries.clear()
136 class FileCache(Cache):
137 """File-based cache"""
139 # locks used to make cache thread-safe
142 def __init__(self, cache_dir, timeout=60):
143 Cache.__init__(self, timeout)
144 if os.path.exists(cache_dir) is False:
146 self.cache_dir = cache_dir
147 if cache_dir in FileCache.cache_locks:
148 self.lock = FileCache.cache_locks[cache_dir]
150 self.lock = threading.Lock()
151 FileCache.cache_locks[cache_dir] = self.lock
153 if os.name == 'posix':
154 self._lock_file = self._lock_file_posix
155 self._unlock_file = self._unlock_file_posix
156 elif os.name == 'nt':
157 self._lock_file = self._lock_file_win32
158 self._unlock_file = self._unlock_file_win32
160 print('Warning! FileCache locking not supported on this system!')
161 self._lock_file = self._lock_file_dummy
162 self._unlock_file = self._unlock_file_dummy
164 def _get_path(self, key):
166 md5.update(key.encode('utf-8'))
167 return os.path.join(self.cache_dir, md5.hexdigest())
169 def _lock_file_dummy(self, path, exclusive=True):
172 def _unlock_file_dummy(self, lock):
175 def _lock_file_posix(self, path, exclusive=True):
176 lock_path = path + '.lock'
177 if exclusive is True:
178 f_lock = open(lock_path, 'w')
179 fcntl.lockf(f_lock, fcntl.LOCK_EX)
181 f_lock = open(lock_path, 'r')
182 fcntl.lockf(f_lock, fcntl.LOCK_SH)
183 if os.path.exists(lock_path) is False:
188 def _unlock_file_posix(self, lock):
191 def _lock_file_win32(self, path, exclusive=True):
195 def _unlock_file_win32(self, lock):
199 def _delete_file(self, path):
201 if os.path.exists(path + '.lock'):
202 os.remove(path + '.lock')
204 def store(self, key, value):
205 path = self._get_path(key)
208 # acquire lock and open file
209 f_lock = self._lock_file(path)
210 datafile = open(path, 'wb')
213 pickle.dump((time.time(), value), datafile)
215 # close and unlock file
217 self._unlock_file(f_lock)
221 def get(self, key, timeout=None):
222 return self._get(self._get_path(key), timeout)
224 def _get(self, path, timeout):
225 if os.path.exists(path) is False:
230 # acquire lock and open
231 f_lock = self._lock_file(path, False)
232 datafile = open(path, 'rb')
234 # read pickled object
235 created_time, value = pickle.load(datafile)
238 # check if value is expired
240 timeout = self.timeout
242 if (time.time() - created_time) >= timeout:
243 # expired! delete from cache
245 self._delete_file(path)
247 # unlock and return result
248 self._unlock_file(f_lock)
255 for entry in os.listdir(self.cache_dir):
256 if entry.endswith('.lock'):
262 for entry in os.listdir(self.cache_dir):
263 if entry.endswith('.lock'):
265 self._get(os.path.join(self.cache_dir, entry), None)
268 for entry in os.listdir(self.cache_dir):
269 if entry.endswith('.lock'):
271 self._delete_file(os.path.join(self.cache_dir, entry))
274 class MemCacheCache(Cache):
275 """Cache interface"""
277 def __init__(self, client, timeout=60):
278 """Initialize the cache
279 client: The memcache client
280 timeout: number of seconds to keep a cached entry
283 self.timeout = timeout
285 def store(self, key, value):
286 """Add new record to cache
290 self.client.set(key, value, time=self.timeout)
292 def get(self, key, timeout=None):
293 """Get cached entry if exists and not expired
294 key: which entry to get
295 timeout: override timeout with this value [optional].
298 return self.client.get(key)
301 """Get count of entries currently stored in cache. RETURN 0"""
302 raise NotImplementedError
305 """Delete any expired entries in cache. NO-OP"""
306 raise NotImplementedError
309 """Delete all cached entries. NO-OP"""
310 raise NotImplementedError
313 class RedisCache(Cache):
314 """Cache running in a redis server"""
316 def __init__(self, client,
318 keys_container='tweepy:keys',
319 pre_identifier='tweepy:'):
320 Cache.__init__(self, timeout)
322 self.keys_container = keys_container
323 self.pre_identifier = pre_identifier
325 def _is_expired(self, entry, timeout):
326 # Returns true if the entry has expired
327 return timeout > 0 and (time.time() - entry[0]) >= timeout
329 def store(self, key, value):
330 """Store the key, value pair in our redis server"""
331 # Prepend tweepy to our key,
332 # this makes it easier to identify tweepy keys in our redis server
333 key = self.pre_identifier + key
334 # Get a pipe (to execute several redis commands in one step)
335 pipe = self.client.pipeline()
336 # Set our values in a redis hash (similar to python dict)
337 pipe.set(key, pickle.dumps((time.time(), value)))
339 pipe.expire(key, self.timeout)
340 # Add the key to a set containing all the keys
341 pipe.sadd(self.keys_container, key)
342 # Execute the instructions in the redis server
345 def get(self, key, timeout=None):
346 """Given a key, returns an element from the redis table"""
347 key = self.pre_identifier + key
348 # Check to see if we have this key
349 unpickled_entry = self.client.get(key)
350 if not unpickled_entry:
351 # No hit, return nothing
354 entry = pickle.loads(unpickled_entry)
355 # Use provided timeout in arguments if provided
356 # otherwise use the one provided during init.
358 timeout = self.timeout
360 # Make sure entry is not expired
361 if self._is_expired(entry, timeout):
362 # entry expired, delete and return nothing
363 self.delete_entry(key)
365 # entry found and not expired, return it
369 """Note: This is not very efficient,
370 since it retreives all the keys from the redis
371 server to know how many keys we have"""
372 return len(self.client.smembers(self.keys_container))
374 def delete_entry(self, key):
375 """Delete an object from the redis table"""
376 pipe = self.client.pipeline()
377 pipe.srem(self.keys_container, key)
382 """Cleanup all the expired keys"""
383 keys = self.client.smembers(self.keys_container)
385 entry = self.client.get(key)
387 entry = pickle.loads(entry)
388 if self._is_expired(entry, self.timeout):
389 self.delete_entry(key)
392 """Delete all entries from the cache"""
393 keys = self.client.smembers(self.keys_container)
395 self.delete_entry(key)
398 class MongodbCache(Cache):
399 """A simple pickle-based MongoDB cache sytem."""
401 def __init__(self, db, timeout=3600, collection='tweepy_cache'):
402 """Should receive a "database" cursor from pymongo."""
403 Cache.__init__(self, timeout)
404 self.timeout = timeout
405 self.col = db[collection]
406 self.col.create_index('created', expireAfterSeconds=timeout)
408 def store(self, key, value):
409 from bson.binary import Binary
411 now = datetime.datetime.utcnow()
412 blob = Binary(pickle.dumps(value))
414 self.col.insert({'created': now, '_id': key, 'value': blob})
416 def get(self, key, timeout=None):
418 raise NotImplementedError
419 obj = self.col.find_one({'_id': key})
421 return pickle.loads(obj['value'])
424 return self.col.find({}).count()
426 def delete_entry(self, key):
427 return self.col.remove({'_id': key})
430 """MongoDB will automatically clear expired keys."""
435 self.col.create_index('created', expireAfterSeconds=self.timeout)