Skip to content

Commit 8126bdc

Browse files
authored
refactor: simplify using property line methods (#199)
* refactor: simplify using property line methods * public & just property
1 parent 4e9e033 commit 8126bdc

3 files changed

Lines changed: 42 additions & 38 deletions

File tree

cachier/cores/base.py

Lines changed: 5 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -9,6 +9,7 @@
99
import abc # for the _BaseCore abstract base class
1010
import inspect
1111
import threading
12+
from typing import Callable
1213

1314
from .._types import HashFunc
1415
from ..config import _update_with_defaults
@@ -18,6 +19,10 @@ class RecalculationNeeded(Exception):
1819
pass
1920

2021

22+
def _get_func_str(func: Callable) -> str:
23+
return f".{func.__module__}.{func.__name__}"
24+
25+
2126
class _BaseCore:
2227
__metaclass__ = abc.ABCMeta
2328

cachier/cores/mongo.py

Lines changed: 11 additions & 13 deletions
Original file line numberDiff line numberDiff line change
@@ -21,7 +21,7 @@
2121
from pymongo import ASCENDING, IndexModel
2222
from pymongo.errors import OperationFailure
2323

24-
from .base import RecalculationNeeded, _BaseCore
24+
from .base import RecalculationNeeded, _BaseCore, _get_func_str
2525

2626
MONGO_SLEEP_DURATION_IN_SEC = 1
2727

@@ -60,13 +60,13 @@ def __init__(
6060
)
6161
self.mongo_collection.create_indexes([func1key1])
6262

63-
@staticmethod
64-
def _get_func_str(func):
65-
return f".{func.__module__}.{func.__name__}"
63+
@property
64+
def _func_str(self) -> str:
65+
return _get_func_str(self.func)
6666

6767
def get_entry_by_key(self, key):
6868
res = self.mongo_collection.find_one(
69-
{"func": _MongoCore._get_func_str(self.func), "key": key}
69+
{"func": self._func_str, "key": key}
7070
)
7171
if not res:
7272
return key, None
@@ -89,10 +89,10 @@ def get_entry_by_key(self, key):
8989
def set_entry(self, key, func_res):
9090
thebytes = pickle.dumps(func_res)
9191
self.mongo_collection.update_one(
92-
filter={"func": _MongoCore._get_func_str(self.func), "key": key},
92+
filter={"func": self._func_str, "key": key},
9393
update={
9494
"$set": {
95-
"func": _MongoCore._get_func_str(self.func),
95+
"func": self._func_str,
9696
"key": key,
9797
"value": Binary(thebytes),
9898
"time": datetime.now(),
@@ -105,7 +105,7 @@ def set_entry(self, key, func_res):
105105

106106
def mark_entry_being_calculated(self, key):
107107
self.mongo_collection.update_one(
108-
filter={"func": _MongoCore._get_func_str(self.func), "key": key},
108+
filter={"func": self._func_str, "key": key},
109109
update={"$set": {"being_calculated": True}},
110110
upsert=True,
111111
)
@@ -114,7 +114,7 @@ def mark_entry_not_calculated(self, key):
114114
with suppress(OperationFailure): # don't care in this case
115115
self.mongo_collection.update_one(
116116
filter={
117-
"func": _MongoCore._get_func_str(self.func),
117+
"func": self._func_str,
118118
"key": key,
119119
},
120120
update={"$set": {"being_calculated": False}},
@@ -134,14 +134,12 @@ def wait_on_entry_calc(self, key):
134134
self.check_calc_timeout(time_spent)
135135

136136
def clear_cache(self):
137-
self.mongo_collection.delete_many(
138-
filter={"func": _MongoCore._get_func_str(self.func)}
139-
)
137+
self.mongo_collection.delete_many(filter={"func": self._func_str})
140138

141139
def clear_being_calculated(self):
142140
self.mongo_collection.update_many(
143141
filter={
144-
"func": _MongoCore._get_func_str(self.func),
142+
"func": self._func_str,
145143
"being_calculated": True,
146144
},
147145
update={"$set": {"being_calculated": False}},

cachier/cores/pickle.py

Lines changed: 26 additions & 25 deletions
Original file line numberDiff line numberDiff line change
@@ -86,30 +86,33 @@ def __init__(
8686
self.separate_files = _update_with_defaults(
8787
separate_files, "separate_files"
8888
)
89-
self.cache_fname = None
90-
self.cache_fpath = None
89+
self._cache_fname = None
90+
self._cache_fpath = None
9191

92-
def _cache_fname(self):
93-
if self.cache_fname is None:
92+
@property
93+
def cache_fname(self) -> str:
94+
if self._cache_fname is None:
9495
fname = f".{self.func.__module__}.{self.func.__qualname__}"
95-
self.cache_fname = fname.replace("<", "_").replace(">", "_")
96-
return self.cache_fname
96+
self._cache_fname = fname.replace("<", "_").replace(">", "_")
97+
return self._cache_fname
9798

98-
def _cache_fpath(self):
99-
if self.cache_fpath is None:
99+
@property
100+
def cache_fpath(self) -> str:
101+
if self._cache_fpath is None:
100102
os.makedirs(self.cache_dir, exist_ok=True)
101-
self.cache_fpath = os.path.abspath(
103+
self._cache_fpath = os.path.abspath(
102104
os.path.join(
103-
os.path.realpath(self.cache_dir), self._cache_fname()
105+
os.path.realpath(self.cache_dir), self.cache_fname
104106
)
105107
)
106-
return self.cache_fpath
108+
return self._cache_fpath
107109

108110
def _reload_cache(self):
109111
with self.lock:
110-
fpath = self._cache_fpath()
111112
try:
112-
with portalocker.Lock(fpath, mode="rb") as cache_file:
113+
with portalocker.Lock(
114+
self.cache_fpath, mode="rb"
115+
) as cache_file:
113116
try:
114117
self.cache = pickle.load(cache_file) # noqa: S301
115118
except EOFError:
@@ -124,7 +127,7 @@ def _get_cache(self):
124127
return self.cache
125128

126129
def _get_cache_by_key(self, key=None, hash=None):
127-
fpath = self._cache_fpath()
130+
fpath = self.cache_fpath
128131
fpath += f"_{key}" if hash is None else f"_{hash}"
129132
try:
130133
with portalocker.Lock(fpath, mode="rb") as cache_file:
@@ -133,15 +136,13 @@ def _get_cache_by_key(self, key=None, hash=None):
133136
return None
134137

135138
def _clear_all_cache_files(self):
136-
fpath = self._cache_fpath()
137-
path, name = os.path.split(fpath)
139+
path, name = os.path.split(self.cache_fpath)
138140
for subpath in os.listdir(path):
139141
if subpath.startswith(f"{name}_"):
140142
os.remove(os.path.join(path, subpath))
141143

142144
def _clear_being_calculated_all_cache_files(self):
143-
fpath = self._cache_fpath()
144-
path, name = os.path.split(fpath)
145+
path, name = os.path.split(self.cache_fpath)
145146
for subpath in os.listdir(path):
146147
if subpath.startswith(name):
147148
entry = self._get_cache_by_key(hash=subpath.split("_")[-1])
@@ -150,13 +151,13 @@ def _clear_being_calculated_all_cache_files(self):
150151
self._save_cache(entry, hash=subpath.split("_")[-1])
151152

152153
def _save_cache(self, cache, key=None, hash=None):
154+
fpath = self.cache_fpath
155+
if key is not None:
156+
fpath += f"_{key}"
157+
elif hash is not None:
158+
fpath += f"_{hash}"
153159
with self.lock:
154160
self.cache = cache
155-
fpath = self._cache_fpath()
156-
if key is not None:
157-
fpath += f"_{key}"
158-
elif hash is not None:
159-
fpath += f"_{hash}"
160161
with portalocker.Lock(fpath, mode="wb") as cache_file:
161162
pickle.dump(cache, cache_file, protocol=4)
162163
if key is None:
@@ -231,12 +232,12 @@ def mark_entry_not_calculated(self, key):
231232
def wait_on_entry_calc(self, key):
232233
if self.separate_files:
233234
entry = self._get_cache_by_key(key)
234-
filename = f"{self._cache_fname()}_{key}"
235+
filename = f"{self.cache_fname}_{key}"
235236
else:
236237
with self.lock:
237238
self._reload_cache()
238239
entry = self._get_cache()[key]
239-
filename = self._cache_fname()
240+
filename = self.cache_fname
240241
if not entry["being_calculated"]:
241242
return entry["value"]
242243
event_handler = _PickleCore.CacheChangeHandler(

0 commit comments

Comments
 (0)