|
1 "File-based cache backend" |
|
2 |
|
3 import os |
|
4 import time |
|
5 try: |
|
6 import cPickle as pickle |
|
7 except ImportError: |
|
8 import pickle |
|
9 |
|
10 from django.core.cache.backends.base import BaseCache |
|
11 from django.utils.hashcompat import md5_constructor |
|
12 |
|
13 class CacheClass(BaseCache): |
|
14 def __init__(self, dir, params): |
|
15 BaseCache.__init__(self, params) |
|
16 |
|
17 max_entries = params.get('max_entries', 300) |
|
18 try: |
|
19 self._max_entries = int(max_entries) |
|
20 except (ValueError, TypeError): |
|
21 self._max_entries = 300 |
|
22 |
|
23 cull_frequency = params.get('cull_frequency', 3) |
|
24 try: |
|
25 self._cull_frequency = int(cull_frequency) |
|
26 except (ValueError, TypeError): |
|
27 self._cull_frequency = 3 |
|
28 |
|
29 self._dir = dir |
|
30 if not os.path.exists(self._dir): |
|
31 self._createdir() |
|
32 |
|
33 def add(self, key, value, timeout=None): |
|
34 if self.has_key(key): |
|
35 return False |
|
36 |
|
37 self.set(key, value, timeout) |
|
38 return True |
|
39 |
|
40 def get(self, key, default=None): |
|
41 fname = self._key_to_file(key) |
|
42 try: |
|
43 f = open(fname, 'rb') |
|
44 exp = pickle.load(f) |
|
45 now = time.time() |
|
46 if exp < now: |
|
47 f.close() |
|
48 self._delete(fname) |
|
49 else: |
|
50 return pickle.load(f) |
|
51 except (IOError, OSError, EOFError, pickle.PickleError): |
|
52 pass |
|
53 return default |
|
54 |
|
55 def set(self, key, value, timeout=None): |
|
56 fname = self._key_to_file(key) |
|
57 dirname = os.path.dirname(fname) |
|
58 |
|
59 if timeout is None: |
|
60 timeout = self.default_timeout |
|
61 |
|
62 self._cull() |
|
63 |
|
64 try: |
|
65 if not os.path.exists(dirname): |
|
66 os.makedirs(dirname) |
|
67 |
|
68 f = open(fname, 'wb') |
|
69 now = time.time() |
|
70 pickle.dump(now + timeout, f, pickle.HIGHEST_PROTOCOL) |
|
71 pickle.dump(value, f, pickle.HIGHEST_PROTOCOL) |
|
72 except (IOError, OSError): |
|
73 pass |
|
74 |
|
75 def delete(self, key): |
|
76 try: |
|
77 self._delete(self._key_to_file(key)) |
|
78 except (IOError, OSError): |
|
79 pass |
|
80 |
|
81 def _delete(self, fname): |
|
82 os.remove(fname) |
|
83 try: |
|
84 # Remove the 2 subdirs if they're empty |
|
85 dirname = os.path.dirname(fname) |
|
86 os.rmdir(dirname) |
|
87 os.rmdir(os.path.dirname(dirname)) |
|
88 except (IOError, OSError): |
|
89 pass |
|
90 |
|
91 def has_key(self, key): |
|
92 fname = self._key_to_file(key) |
|
93 try: |
|
94 f = open(fname, 'rb') |
|
95 exp = pickle.load(f) |
|
96 now = time.time() |
|
97 if exp < now: |
|
98 f.close() |
|
99 self._delete(fname) |
|
100 return False |
|
101 else: |
|
102 return True |
|
103 except (IOError, OSError, EOFError, pickle.PickleError): |
|
104 return False |
|
105 |
|
106 def _cull(self): |
|
107 if int(self._num_entries) < self._max_entries: |
|
108 return |
|
109 |
|
110 try: |
|
111 filelist = os.listdir(self._dir) |
|
112 except (IOError, OSError): |
|
113 return |
|
114 |
|
115 if self._cull_frequency == 0: |
|
116 doomed = filelist |
|
117 else: |
|
118 doomed = [os.path.join(self._dir, k) for (i, k) in enumerate(filelist) if i % self._cull_frequency == 0] |
|
119 |
|
120 for topdir in doomed: |
|
121 try: |
|
122 for root, _, files in os.walk(topdir): |
|
123 for f in files: |
|
124 self._delete(os.path.join(root, f)) |
|
125 except (IOError, OSError): |
|
126 pass |
|
127 |
|
128 def _createdir(self): |
|
129 try: |
|
130 os.makedirs(self._dir) |
|
131 except OSError: |
|
132 raise EnvironmentError, "Cache directory '%s' does not exist and could not be created'" % self._dir |
|
133 |
|
134 def _key_to_file(self, key): |
|
135 """ |
|
136 Convert the filename into an md5 string. We'll turn the first couple |
|
137 bits of the path into directory prefixes to be nice to filesystems |
|
138 that have problems with large numbers of files in a directory. |
|
139 |
|
140 Thus, a cache key of "foo" gets turnned into a file named |
|
141 ``{cache-dir}ac/bd/18db4cc2f85cedef654fccc4a4d8``. |
|
142 """ |
|
143 path = md5_constructor(key.encode('utf-8')).hexdigest() |
|
144 path = os.path.join(path[:2], path[2:4], path[4:]) |
|
145 return os.path.join(self._dir, path) |
|
146 |
|
147 def _get_num_entries(self): |
|
148 count = 0 |
|
149 for _,_,files in os.walk(self._dir): |
|
150 count += len(files) |
|
151 return count |
|
152 _num_entries = property(_get_num_entries) |