1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
|
# Authors: see git history
#
# Copyright (c) 2010 Authors
# Licensed under the GNU GPL version 3.0 or later. See the file LICENSE for details.
import os
import atexit
import hashlib
import pickle
import appdirs
import diskcache
try:
from functools import lru_cache
except ImportError:
from backports.functools_lru_cache import lru_cache
# simplify use of lru_cache decorator
def cache(*args, **kwargs):
return lru_cache(maxsize=None)(*args, **kwargs)
__stitch_plan_cache = None
def get_stitch_plan_cache():
global __stitch_plan_cache
if __stitch_plan_cache is None:
cache_dir = os.path.join(appdirs.user_config_dir('inkstitch'), 'cache', 'stitch_plan')
__stitch_plan_cache = diskcache.Cache(cache_dir, size=1024 * 1024 * 100)
atexit.register(__stitch_plan_cache.close)
return __stitch_plan_cache
class CacheKeyGenerator(object):
"""Generate cache keys given arbitrary data.
Given arbitrary data, generate short cache key that is extremely likely
to be unique.
Use example:
>>> generator = CacheKeyGenerator()
>>> generator.update(b'12345')
>>> generator.update([1, 2, 3, {4, 5, 6}])
>>> generator.get_cache_key()
"""
def __init__(self):
# SHA1 is chosen for speed. We don't need cryptography-grade hashing
# for this use case.
self._hasher = hashlib.sha1()
def update(self, data):
"""Provide data to be hashed into a cache key.
Arguments:
data -- a bytes object or any object that can be pickled
"""
if not isinstance(data, bytes):
data = pickle.dumps(data)
self._hasher.update(data)
def get_cache_key(self):
return self._hasher.hexdigest()
|