summaryrefslogtreecommitdiff
path: root/lib/utils/cache.py
blob: 6d51ea08e3ba6df850e2a4aad24e36f270dda066 (plain)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
# Authors: see git history
#
# Copyright (c) 2010 Authors
# Licensed under the GNU GPL version 3.0 or later.  See the file LICENSE for details.
import atexit
import hashlib
import os
import pickle
import sqlite3

import appdirs
import diskcache

from lib.utils.settings import global_settings

try:
    from functools import lru_cache
except ImportError:
    from backports.functools_lru_cache import lru_cache


# simplify use of lru_cache decorator
def cache(*args, **kwargs):
    return lru_cache(maxsize=None)(*args, **kwargs)


__stitch_plan_cache = None


def get_stitch_plan_cache():
    global __stitch_plan_cache

    if __stitch_plan_cache is None:
        cache_dir = os.path.join(appdirs.user_config_dir('inkstitch'), 'cache', 'stitch_plan')
        size_limit = global_settings['cache_size'] * 1024 * 1024
        try:
            __stitch_plan_cache = diskcache.Cache(cache_dir, size=size_limit)
        except sqlite3.DatabaseError:
            # reset cache database file if it couldn't parse correctly
            cache_file = os.path.join(appdirs.user_config_dir('inkstitch'), 'cache', 'stitch_plan', 'cache.db')
            if os.path.exists(cache_file):
                os.remove(cache_file)
                __stitch_plan_cache = diskcache.Cache(cache_dir, size=size_limit)
        __stitch_plan_cache.size_limit = size_limit

        # reset cache if warnings appear within the files
        warnings = __stitch_plan_cache.check()
        if warnings:
            __stitch_plan_cache.clear()

        atexit.register(__stitch_plan_cache.close)
    return __stitch_plan_cache


def is_cache_disabled():
    return not global_settings['cache_size']


class CacheKeyGenerator(object):
    """Generate cache keys given arbitrary data.

    Given arbitrary data, generate short cache key that is extremely likely
    to be unique.

    Use example:

        >>> generator = CacheKeyGenerator()
        >>> generator.update(b'12345')
        >>> generator.update([1, 2, 3, {4, 5, 6}])
        >>> generator.get_cache_key()
    """

    def __init__(self):
        # SHA1 is chosen for speed.  We don't need cryptography-grade hashing
        # for this use case.
        self._hasher = hashlib.sha1()

    def update(self, data):
        """Provide data to be hashed into a cache key.

        Arguments:
            data -- a bytes object or any object that can be pickled
        """

        if not isinstance(data, bytes):
            data = pickle.dumps(data)

        self._hasher.update(data)

    def get_cache_key(self):
        return self._hasher.hexdigest()