Improve content caching system

This commit is contained in:
Ivan Kravets
2016-12-07 00:38:59 +02:00
parent 22a272061f
commit ac01a37f5f
2 changed files with 42 additions and 11 deletions

View File

@ -14,7 +14,7 @@
import sys import sys
VERSION = (3, 2, "0b6") VERSION = (3, 2, "0b7")
__version__ = ".".join([str(s) for s in VERSION]) __version__ = ".".join([str(s) for s in VERSION])
__title__ = "platformio" __title__ = "platformio"

View File

@ -130,20 +130,23 @@ class ContentCache(object):
def __init__(self, cache_dir=None): def __init__(self, cache_dir=None):
self.cache_dir = None self.cache_dir = None
self.db_path = None self._db_path = None
self._lockfile = None
if not get_setting("enable_cache"): if not get_setting("enable_cache"):
return return
self.cache_dir = cache_dir or join(util.get_home_dir(), ".cache") self.cache_dir = cache_dir or join(util.get_home_dir(), ".cache")
if not self.cache_dir: if not self.cache_dir:
os.makedirs(self.cache_dir) os.makedirs(self.cache_dir)
self.db_path = join(self.cache_dir, "db.data") self._db_path = join(self.cache_dir, "db.data")
def __enter__(self): def __enter__(self):
if not self.db_path or not isfile(self.db_path): if not self._db_path or not isfile(self._db_path):
return self return self
newlines = []
found = False found = False
with open(self.db_path) as fp: newlines = []
with open(self._db_path) as fp:
for line in fp.readlines(): for line in fp.readlines():
if "=" not in line: if "=" not in line:
continue continue
@ -157,14 +160,34 @@ class ContentCache(object):
remove(path) remove(path)
if not len(listdir(dirname(path))): if not len(listdir(dirname(path))):
util.rmtree_(dirname(path)) util.rmtree_(dirname(path))
if found:
with open(self.db_path, "w") as fp: if found and self._lock_dbindex():
with open(self._db_path, "w") as fp:
fp.write("\n".join(newlines) + "\n") fp.write("\n".join(newlines) + "\n")
self._unlock_dbindex()
return self return self
def __exit__(self, type_, value, traceback): def __exit__(self, type_, value, traceback):
pass pass
def _lock_dbindex(self):
self._lockfile = LockFile(self.cache_dir)
if self._lockfile.is_locked() and \
(time() - getmtime(self._lockfile.lock_file)) > 10:
self._lockfile.break_lock()
try:
self._lockfile.acquire()
except LockFailed:
return False
return True
def _unlock_dbindex(self):
if self._lockfile:
self._lockfile.release()
def get_cache_path(self, key): def get_cache_path(self, key):
assert len(key) > 3 assert len(key) > 3
return join(self.cache_dir, key[-2:], key) return join(self.cache_dir, key[-2:], key)
@ -191,9 +214,19 @@ class ContentCache(object):
def set(self, key, data, valid): def set(self, key, data, valid):
if not self.cache_dir or not data: if not self.cache_dir or not data:
return return
if not isdir(self.cache_dir):
os.makedirs(self.cache_dir)
tdmap = {"s": 1, "m": 60, "h": 3600, "d": 86400} tdmap = {"s": 1, "m": 60, "h": 3600, "d": 86400}
assert valid.endswith(tuple(tdmap.keys())) assert valid.endswith(tuple(tdmap.keys()))
cache_path = self.get_cache_path(key) cache_path = self.get_cache_path(key)
expire_time = int(time() + tdmap[valid[-1]] * int(valid[:-1]))
if not self._lock_dbindex():
return False
with open(self._db_path, "a") as fp:
fp.write("%s=%s\n" % (str(expire_time), cache_path))
self._unlock_dbindex()
if not isdir(dirname(cache_path)): if not isdir(dirname(cache_path)):
os.makedirs(dirname(cache_path)) os.makedirs(dirname(cache_path))
with open(cache_path, "wb") as fp: with open(cache_path, "wb") as fp:
@ -201,9 +234,7 @@ class ContentCache(object):
json.dump(data, fp) json.dump(data, fp)
else: else:
fp.write(str(data)) fp.write(str(data))
expire_time = int(time() + tdmap[valid[-1]] * int(valid[:-1]))
with open(self.db_path, "w+") as fp:
fp.write("%s=%s\n" % (str(expire_time), cache_path))
return True return True
def clean(self): def clean(self):