gvsig-scripting / org.gvsig.scripting / trunk / org.gvsig.scripting / org.gvsig.scripting.app / org.gvsig.scripting.app.mainplugin / src / main / resources-plugin / scripting / lib / dulwich / tests / test_lru_cache.py @ 959
History | View | Annotate | Download (15.8 KB)
1 |
# Copyright (C) 2006, 2008 Canonical Ltd
|
---|---|
2 |
#
|
3 |
# Dulwich is dual-licensed under the Apache License, Version 2.0 and the GNU
|
4 |
# General Public License as public by the Free Software Foundation; version 2.0
|
5 |
# or (at your option) any later version. You can redistribute it and/or
|
6 |
# modify it under the terms of either of these two licenses.
|
7 |
#
|
8 |
# Unless required by applicable law or agreed to in writing, software
|
9 |
# distributed under the License is distributed on an "AS IS" BASIS,
|
10 |
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
11 |
# See the License for the specific language governing permissions and
|
12 |
# limitations under the License.
|
13 |
#
|
14 |
# You should have received a copy of the licenses; if not, see
|
15 |
# <http://www.gnu.org/licenses/> for a copy of the GNU General Public License
|
16 |
# and <http://www.apache.org/licenses/LICENSE-2.0> for a copy of the Apache
|
17 |
# License, Version 2.0.
|
18 |
#
|
19 |
|
20 |
"""Tests for the lru_cache module."""
|
21 |
|
22 |
from dulwich import ( |
23 |
lru_cache, |
24 |
) |
25 |
from dulwich.tests import ( |
26 |
TestCase, |
27 |
) |
28 |
|
29 |
class TestLRUCache(TestCase): |
30 |
"""Test that LRU cache properly keeps track of entries."""
|
31 |
|
32 |
def test_cache_size(self): |
33 |
cache = lru_cache.LRUCache(max_cache=10)
|
34 |
self.assertEqual(10, cache.cache_size()) |
35 |
|
36 |
cache = lru_cache.LRUCache(max_cache=256)
|
37 |
self.assertEqual(256, cache.cache_size()) |
38 |
|
39 |
cache.resize(512)
|
40 |
self.assertEqual(512, cache.cache_size()) |
41 |
|
42 |
def test_missing(self): |
43 |
cache = lru_cache.LRUCache(max_cache=10)
|
44 |
|
45 |
self.assertFalse('foo' in cache) |
46 |
self.assertRaises(KeyError, cache.__getitem__, 'foo') |
47 |
|
48 |
cache['foo'] = 'bar' |
49 |
self.assertEqual('bar', cache['foo']) |
50 |
self.assertTrue('foo' in cache) |
51 |
self.assertFalse('bar' in cache) |
52 |
|
53 |
def test_map_None(self): |
54 |
# Make sure that we can properly map None as a key.
|
55 |
cache = lru_cache.LRUCache(max_cache=10)
|
56 |
self.assertFalse(None in cache) |
57 |
cache[None] = 1 |
58 |
self.assertEqual(1, cache[None]) |
59 |
cache[None] = 2 |
60 |
self.assertEqual(2, cache[None]) |
61 |
# Test the various code paths of __getitem__, to make sure that we can
|
62 |
# handle when None is the key for the LRU and the MRU
|
63 |
cache[1] = 3 |
64 |
cache[None] = 1 |
65 |
cache[None]
|
66 |
cache[1]
|
67 |
cache[None]
|
68 |
self.assertEqual([None, 1], [n.key for n in cache._walk_lru()]) |
69 |
|
70 |
def test_add__null_key(self): |
71 |
cache = lru_cache.LRUCache(max_cache=10)
|
72 |
self.assertRaises(ValueError, cache.add, lru_cache._null_key, 1) |
73 |
|
74 |
def test_overflow(self): |
75 |
"""Adding extra entries will pop out old ones."""
|
76 |
cache = lru_cache.LRUCache(max_cache=1, after_cleanup_count=1) |
77 |
|
78 |
cache['foo'] = 'bar' |
79 |
# With a max cache of 1, adding 'baz' should pop out 'foo'
|
80 |
cache['baz'] = 'biz' |
81 |
|
82 |
self.assertFalse('foo' in cache) |
83 |
self.assertTrue('baz' in cache) |
84 |
|
85 |
self.assertEqual('biz', cache['baz']) |
86 |
|
87 |
def test_by_usage(self): |
88 |
"""Accessing entries bumps them up in priority."""
|
89 |
cache = lru_cache.LRUCache(max_cache=2)
|
90 |
|
91 |
cache['baz'] = 'biz' |
92 |
cache['foo'] = 'bar' |
93 |
|
94 |
self.assertEqual('biz', cache['baz']) |
95 |
|
96 |
# This must kick out 'foo' because it was the last accessed
|
97 |
cache['nub'] = 'in' |
98 |
|
99 |
self.assertFalse('foo' in cache) |
100 |
|
101 |
def test_cleanup(self): |
102 |
"""Test that we can use a cleanup function."""
|
103 |
cleanup_called = [] |
104 |
def cleanup_func(key, val): |
105 |
cleanup_called.append((key, val)) |
106 |
|
107 |
cache = lru_cache.LRUCache(max_cache=2, after_cleanup_count=2) |
108 |
|
109 |
cache.add('baz', '1', cleanup=cleanup_func) |
110 |
cache.add('foo', '2', cleanup=cleanup_func) |
111 |
cache.add('biz', '3', cleanup=cleanup_func) |
112 |
|
113 |
self.assertEqual([('baz', '1')], cleanup_called) |
114 |
|
115 |
# 'foo' is now most recent, so final cleanup will call it last
|
116 |
cache['foo']
|
117 |
cache.clear() |
118 |
self.assertEqual([('baz', '1'), ('biz', '3'), ('foo', '2')], |
119 |
cleanup_called) |
120 |
|
121 |
def test_cleanup_on_replace(self): |
122 |
"""Replacing an object should cleanup the old value."""
|
123 |
cleanup_called = [] |
124 |
def cleanup_func(key, val): |
125 |
cleanup_called.append((key, val)) |
126 |
|
127 |
cache = lru_cache.LRUCache(max_cache=2)
|
128 |
cache.add(1, 10, cleanup=cleanup_func) |
129 |
cache.add(2, 20, cleanup=cleanup_func) |
130 |
cache.add(2, 25, cleanup=cleanup_func) |
131 |
|
132 |
self.assertEqual([(2, 20)], cleanup_called) |
133 |
self.assertEqual(25, cache[2]) |
134 |
|
135 |
# Even __setitem__ should make sure cleanup() is called
|
136 |
cache[2] = 26 |
137 |
self.assertEqual([(2, 20), (2, 25)], cleanup_called) |
138 |
|
139 |
def test_len(self): |
140 |
cache = lru_cache.LRUCache(max_cache=10, after_cleanup_count=10) |
141 |
|
142 |
cache[1] = 10 |
143 |
cache[2] = 20 |
144 |
cache[3] = 30 |
145 |
cache[4] = 40 |
146 |
|
147 |
self.assertEqual(4, len(cache)) |
148 |
|
149 |
cache[5] = 50 |
150 |
cache[6] = 60 |
151 |
cache[7] = 70 |
152 |
cache[8] = 80 |
153 |
|
154 |
self.assertEqual(8, len(cache)) |
155 |
|
156 |
cache[1] = 15 # replacement |
157 |
|
158 |
self.assertEqual(8, len(cache)) |
159 |
|
160 |
cache[9] = 90 |
161 |
cache[10] = 100 |
162 |
cache[11] = 110 |
163 |
|
164 |
# We hit the max
|
165 |
self.assertEqual(10, len(cache)) |
166 |
self.assertEqual([11, 10, 9, 1, 8, 7, 6, 5, 4, 3], |
167 |
[n.key for n in cache._walk_lru()]) |
168 |
|
169 |
def test_cleanup_shrinks_to_after_clean_count(self): |
170 |
cache = lru_cache.LRUCache(max_cache=5, after_cleanup_count=3) |
171 |
|
172 |
cache.add(1, 10) |
173 |
cache.add(2, 20) |
174 |
cache.add(3, 25) |
175 |
cache.add(4, 30) |
176 |
cache.add(5, 35) |
177 |
|
178 |
self.assertEqual(5, len(cache)) |
179 |
# This will bump us over the max, which causes us to shrink down to
|
180 |
# after_cleanup_cache size
|
181 |
cache.add(6, 40) |
182 |
self.assertEqual(3, len(cache)) |
183 |
|
184 |
def test_after_cleanup_larger_than_max(self): |
185 |
cache = lru_cache.LRUCache(max_cache=5, after_cleanup_count=10) |
186 |
self.assertEqual(5, cache._after_cleanup_count) |
187 |
|
188 |
def test_after_cleanup_none(self): |
189 |
cache = lru_cache.LRUCache(max_cache=5, after_cleanup_count=None) |
190 |
# By default _after_cleanup_size is 80% of the normal size
|
191 |
self.assertEqual(4, cache._after_cleanup_count) |
192 |
|
193 |
def test_cleanup_2(self): |
194 |
cache = lru_cache.LRUCache(max_cache=5, after_cleanup_count=2) |
195 |
|
196 |
# Add these in order
|
197 |
cache.add(1, 10) |
198 |
cache.add(2, 20) |
199 |
cache.add(3, 25) |
200 |
cache.add(4, 30) |
201 |
cache.add(5, 35) |
202 |
|
203 |
self.assertEqual(5, len(cache)) |
204 |
# Force a compaction
|
205 |
cache.cleanup() |
206 |
self.assertEqual(2, len(cache)) |
207 |
|
208 |
def test_preserve_last_access_order(self): |
209 |
cache = lru_cache.LRUCache(max_cache=5)
|
210 |
|
211 |
# Add these in order
|
212 |
cache.add(1, 10) |
213 |
cache.add(2, 20) |
214 |
cache.add(3, 25) |
215 |
cache.add(4, 30) |
216 |
cache.add(5, 35) |
217 |
|
218 |
self.assertEqual([5, 4, 3, 2, 1], [n.key for n in cache._walk_lru()]) |
219 |
|
220 |
# Now access some randomly
|
221 |
cache[2]
|
222 |
cache[5]
|
223 |
cache[3]
|
224 |
cache[2]
|
225 |
self.assertEqual([2, 3, 5, 4, 1], [n.key for n in cache._walk_lru()]) |
226 |
|
227 |
def test_get(self): |
228 |
cache = lru_cache.LRUCache(max_cache=5)
|
229 |
|
230 |
cache.add(1, 10) |
231 |
cache.add(2, 20) |
232 |
self.assertEqual(20, cache.get(2)) |
233 |
self.assertEqual(None, cache.get(3)) |
234 |
obj = object()
|
235 |
self.assertTrue(obj is cache.get(3, obj)) |
236 |
self.assertEqual([2, 1], [n.key for n in cache._walk_lru()]) |
237 |
self.assertEqual(10, cache.get(1)) |
238 |
self.assertEqual([1, 2], [n.key for n in cache._walk_lru()]) |
239 |
|
240 |
def test_keys(self): |
241 |
cache = lru_cache.LRUCache(max_cache=5, after_cleanup_count=5) |
242 |
|
243 |
cache[1] = 2 |
244 |
cache[2] = 3 |
245 |
cache[3] = 4 |
246 |
self.assertEqual([1, 2, 3], sorted(cache.keys())) |
247 |
cache[4] = 5 |
248 |
cache[5] = 6 |
249 |
cache[6] = 7 |
250 |
self.assertEqual([2, 3, 4, 5, 6], sorted(cache.keys())) |
251 |
|
252 |
def test_resize_smaller(self): |
253 |
cache = lru_cache.LRUCache(max_cache=5, after_cleanup_count=4) |
254 |
cache[1] = 2 |
255 |
cache[2] = 3 |
256 |
cache[3] = 4 |
257 |
cache[4] = 5 |
258 |
cache[5] = 6 |
259 |
self.assertEqual([1, 2, 3, 4, 5], sorted(cache.keys())) |
260 |
cache[6] = 7 |
261 |
self.assertEqual([3, 4, 5, 6], sorted(cache.keys())) |
262 |
# Now resize to something smaller, which triggers a cleanup
|
263 |
cache.resize(max_cache=3, after_cleanup_count=2) |
264 |
self.assertEqual([5, 6], sorted(cache.keys())) |
265 |
# Adding something will use the new size
|
266 |
cache[7] = 8 |
267 |
self.assertEqual([5, 6, 7], sorted(cache.keys())) |
268 |
cache[8] = 9 |
269 |
self.assertEqual([7, 8], sorted(cache.keys())) |
270 |
|
271 |
def test_resize_larger(self): |
272 |
cache = lru_cache.LRUCache(max_cache=5, after_cleanup_count=4) |
273 |
cache[1] = 2 |
274 |
cache[2] = 3 |
275 |
cache[3] = 4 |
276 |
cache[4] = 5 |
277 |
cache[5] = 6 |
278 |
self.assertEqual([1, 2, 3, 4, 5], sorted(cache.keys())) |
279 |
cache[6] = 7 |
280 |
self.assertEqual([3, 4, 5, 6], sorted(cache.keys())) |
281 |
cache.resize(max_cache=8, after_cleanup_count=6) |
282 |
self.assertEqual([3, 4, 5, 6], sorted(cache.keys())) |
283 |
cache[7] = 8 |
284 |
cache[8] = 9 |
285 |
cache[9] = 10 |
286 |
cache[10] = 11 |
287 |
self.assertEqual([3, 4, 5, 6, 7, 8, 9, 10], sorted(cache.keys())) |
288 |
cache[11] = 12 # triggers cleanup back to new after_cleanup_count |
289 |
self.assertEqual([6, 7, 8, 9, 10, 11], sorted(cache.keys())) |
290 |
|
291 |
|
292 |
class TestLRUSizeCache(TestCase): |
293 |
|
294 |
def test_basic_init(self): |
295 |
cache = lru_cache.LRUSizeCache() |
296 |
self.assertEqual(2048, cache._max_cache) |
297 |
self.assertEqual(int(cache._max_size*0.8), cache._after_cleanup_size) |
298 |
self.assertEqual(0, cache._value_size) |
299 |
|
300 |
def test_add__null_key(self): |
301 |
cache = lru_cache.LRUSizeCache() |
302 |
self.assertRaises(ValueError, cache.add, lru_cache._null_key, 1) |
303 |
|
304 |
def test_add_tracks_size(self): |
305 |
cache = lru_cache.LRUSizeCache() |
306 |
self.assertEqual(0, cache._value_size) |
307 |
cache.add('my key', 'my value text') |
308 |
self.assertEqual(13, cache._value_size) |
309 |
|
310 |
def test_remove_tracks_size(self): |
311 |
cache = lru_cache.LRUSizeCache() |
312 |
self.assertEqual(0, cache._value_size) |
313 |
cache.add('my key', 'my value text') |
314 |
self.assertEqual(13, cache._value_size) |
315 |
node = cache._cache['my key']
|
316 |
cache._remove_node(node) |
317 |
self.assertEqual(0, cache._value_size) |
318 |
|
319 |
def test_no_add_over_size(self): |
320 |
"""Adding a large value may not be cached at all."""
|
321 |
cache = lru_cache.LRUSizeCache(max_size=10, after_cleanup_size=5) |
322 |
self.assertEqual(0, cache._value_size) |
323 |
self.assertEqual({}, cache.items())
|
324 |
cache.add('test', 'key') |
325 |
self.assertEqual(3, cache._value_size) |
326 |
self.assertEqual({'test': 'key'}, cache.items()) |
327 |
cache.add('test2', 'key that is too big') |
328 |
self.assertEqual(3, cache._value_size) |
329 |
self.assertEqual({'test':'key'}, cache.items()) |
330 |
# If we would add a key, only to cleanup and remove all cached entries,
|
331 |
# then obviously that value should not be stored
|
332 |
cache.add('test3', 'bigkey') |
333 |
self.assertEqual(3, cache._value_size) |
334 |
self.assertEqual({'test':'key'}, cache.items()) |
335 |
|
336 |
cache.add('test4', 'bikey') |
337 |
self.assertEqual(3, cache._value_size) |
338 |
self.assertEqual({'test':'key'}, cache.items()) |
339 |
|
340 |
def test_no_add_over_size_cleanup(self): |
341 |
"""If a large value is not cached, we will call cleanup right away."""
|
342 |
cleanup_calls = [] |
343 |
def cleanup(key, value): |
344 |
cleanup_calls.append((key, value)) |
345 |
|
346 |
cache = lru_cache.LRUSizeCache(max_size=10, after_cleanup_size=5) |
347 |
self.assertEqual(0, cache._value_size) |
348 |
self.assertEqual({}, cache.items())
|
349 |
cache.add('test', 'key that is too big', cleanup=cleanup) |
350 |
# key was not added
|
351 |
self.assertEqual(0, cache._value_size) |
352 |
self.assertEqual({}, cache.items())
|
353 |
# and cleanup was called
|
354 |
self.assertEqual([('test', 'key that is too big')], cleanup_calls) |
355 |
|
356 |
def test_adding_clears_cache_based_on_size(self): |
357 |
"""The cache is cleared in LRU order until small enough"""
|
358 |
cache = lru_cache.LRUSizeCache(max_size=20)
|
359 |
cache.add('key1', 'value') # 5 chars |
360 |
cache.add('key2', 'value2') # 6 chars |
361 |
cache.add('key3', 'value23') # 7 chars |
362 |
self.assertEqual(5+6+7, cache._value_size) |
363 |
cache['key2'] # reference key2 so it gets a newer reference time |
364 |
cache.add('key4', 'value234') # 8 chars, over limit |
365 |
# We have to remove 2 keys to get back under limit
|
366 |
self.assertEqual(6+8, cache._value_size) |
367 |
self.assertEqual({'key2':'value2', 'key4':'value234'}, |
368 |
cache.items()) |
369 |
|
370 |
def test_adding_clears_to_after_cleanup_size(self): |
371 |
cache = lru_cache.LRUSizeCache(max_size=20, after_cleanup_size=10) |
372 |
cache.add('key1', 'value') # 5 chars |
373 |
cache.add('key2', 'value2') # 6 chars |
374 |
cache.add('key3', 'value23') # 7 chars |
375 |
self.assertEqual(5+6+7, cache._value_size) |
376 |
cache['key2'] # reference key2 so it gets a newer reference time |
377 |
cache.add('key4', 'value234') # 8 chars, over limit |
378 |
# We have to remove 3 keys to get back under limit
|
379 |
self.assertEqual(8, cache._value_size) |
380 |
self.assertEqual({'key4':'value234'}, cache.items()) |
381 |
|
382 |
def test_custom_sizes(self): |
383 |
def size_of_list(lst): |
384 |
return sum(len(x) for x in lst) |
385 |
cache = lru_cache.LRUSizeCache(max_size=20, after_cleanup_size=10, |
386 |
compute_size=size_of_list) |
387 |
|
388 |
cache.add('key1', ['val', 'ue']) # 5 chars |
389 |
cache.add('key2', ['val', 'ue2']) # 6 chars |
390 |
cache.add('key3', ['val', 'ue23']) # 7 chars |
391 |
self.assertEqual(5+6+7, cache._value_size) |
392 |
cache['key2'] # reference key2 so it gets a newer reference time |
393 |
cache.add('key4', ['value', '234']) # 8 chars, over limit |
394 |
# We have to remove 3 keys to get back under limit
|
395 |
self.assertEqual(8, cache._value_size) |
396 |
self.assertEqual({'key4':['value', '234']}, cache.items()) |
397 |
|
398 |
def test_cleanup(self): |
399 |
cache = lru_cache.LRUSizeCache(max_size=20, after_cleanup_size=10) |
400 |
|
401 |
# Add these in order
|
402 |
cache.add('key1', 'value') # 5 chars |
403 |
cache.add('key2', 'value2') # 6 chars |
404 |
cache.add('key3', 'value23') # 7 chars |
405 |
self.assertEqual(5+6+7, cache._value_size) |
406 |
|
407 |
cache.cleanup() |
408 |
# Only the most recent fits after cleaning up
|
409 |
self.assertEqual(7, cache._value_size) |
410 |
|
411 |
def test_keys(self): |
412 |
cache = lru_cache.LRUSizeCache(max_size=10)
|
413 |
|
414 |
cache[1] = 'a' |
415 |
cache[2] = 'b' |
416 |
cache[3] = 'cdef' |
417 |
self.assertEqual([1, 2, 3], sorted(cache.keys())) |
418 |
|
419 |
def test_resize_smaller(self): |
420 |
cache = lru_cache.LRUSizeCache(max_size=10, after_cleanup_size=9) |
421 |
cache[1] = 'abc' |
422 |
cache[2] = 'def' |
423 |
cache[3] = 'ghi' |
424 |
cache[4] = 'jkl' |
425 |
# Triggers a cleanup
|
426 |
self.assertEqual([2, 3, 4], sorted(cache.keys())) |
427 |
# Resize should also cleanup again
|
428 |
cache.resize(max_size=6, after_cleanup_size=4) |
429 |
self.assertEqual([4], sorted(cache.keys())) |
430 |
# Adding should use the new max size
|
431 |
cache[5] = 'mno' |
432 |
self.assertEqual([4, 5], sorted(cache.keys())) |
433 |
cache[6] = 'pqr' |
434 |
self.assertEqual([6], sorted(cache.keys())) |
435 |
|
436 |
def test_resize_larger(self): |
437 |
cache = lru_cache.LRUSizeCache(max_size=10, after_cleanup_size=9) |
438 |
cache[1] = 'abc' |
439 |
cache[2] = 'def' |
440 |
cache[3] = 'ghi' |
441 |
cache[4] = 'jkl' |
442 |
# Triggers a cleanup
|
443 |
self.assertEqual([2, 3, 4], sorted(cache.keys())) |
444 |
cache.resize(max_size=15, after_cleanup_size=12) |
445 |
self.assertEqual([2, 3, 4], sorted(cache.keys())) |
446 |
cache[5] = 'mno' |
447 |
cache[6] = 'pqr' |
448 |
self.assertEqual([2, 3, 4, 5, 6], sorted(cache.keys())) |
449 |
cache[7] = 'stu' |
450 |
self.assertEqual([4, 5, 6, 7], sorted(cache.keys())) |
451 |
|