2017-06-05 16:02:39 +03:00
|
|
|
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
|
2015-11-18 17:16:17 +02:00
|
|
|
#
|
|
|
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
|
|
|
# you may not use this file except in compliance with the License.
|
|
|
|
# You may obtain a copy of the License at
|
|
|
|
#
|
|
|
|
# http://www.apache.org/licenses/LICENSE-2.0
|
|
|
|
#
|
|
|
|
# Unless required by applicable law or agreed to in writing, software
|
|
|
|
# distributed under the License is distributed on an "AS IS" BASIS,
|
|
|
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
|
|
# See the License for the specific language governing permissions and
|
|
|
|
# limitations under the License.
|
2014-11-29 22:39:44 +02:00
|
|
|
|
2016-08-29 20:20:12 +03:00
|
|
|
import hashlib
|
2014-11-29 22:39:44 +02:00
|
|
|
import json
|
2016-09-14 19:06:22 +03:00
|
|
|
import os
|
2016-08-27 23:15:32 +03:00
|
|
|
import uuid
|
2015-11-25 19:54:06 +02:00
|
|
|
from copy import deepcopy
|
2016-09-14 19:06:22 +03:00
|
|
|
from os import environ, getenv, listdir, remove
|
|
|
|
from os.path import dirname, getmtime, isdir, isfile, join
|
2015-09-23 16:35:31 +03:00
|
|
|
from time import time
|
2014-11-29 22:39:44 +02:00
|
|
|
|
2016-10-10 20:59:47 +03:00
|
|
|
import requests
|
2016-10-10 23:06:09 +03:00
|
|
|
from lockfile import LockFailed, LockFile
|
2015-09-03 19:04:09 +03:00
|
|
|
|
2016-10-10 23:06:09 +03:00
|
|
|
from platformio import __version__, exception, util
|
2014-11-29 22:39:44 +02:00
|
|
|
from platformio.exception import InvalidSettingName, InvalidSettingValue
|
|
|
|
|
|
|
|
DEFAULT_SETTINGS = {
|
|
|
|
"check_platformio_interval": {
|
|
|
|
"description": "Check for the new PlatformIO interval (days)",
|
|
|
|
"value": 3
|
|
|
|
},
|
|
|
|
"check_platforms_interval": {
|
|
|
|
"description": "Check for the platform updates interval (days)",
|
|
|
|
"value": 7
|
|
|
|
},
|
|
|
|
"check_libraries_interval": {
|
|
|
|
"description": "Check for the library updates interval (days)",
|
|
|
|
"value": 7
|
|
|
|
},
|
|
|
|
"auto_update_platforms": {
|
|
|
|
"description": "Automatically update platforms (Yes/No)",
|
2015-04-17 12:28:10 +01:00
|
|
|
"value": False
|
2014-11-29 22:39:44 +02:00
|
|
|
},
|
|
|
|
"auto_update_libraries": {
|
|
|
|
"description": "Automatically update libraries (Yes/No)",
|
2015-04-17 12:28:10 +01:00
|
|
|
"value": False
|
2016-07-17 00:48:59 +03:00
|
|
|
},
|
|
|
|
"force_verbose": {
|
|
|
|
"description": "Force verbose output when processing environments",
|
|
|
|
"value": False
|
2014-11-29 22:39:44 +02:00
|
|
|
},
|
2016-09-12 02:11:04 +03:00
|
|
|
"enable_ssl": {
|
|
|
|
"description": "Enable SSL for PlatformIO Services",
|
|
|
|
"value": False
|
2016-08-25 22:57:52 +03:00
|
|
|
},
|
2016-12-05 18:51:25 +02:00
|
|
|
"enable_cache": {
|
|
|
|
"description": "Enable caching for API requests and Library Manager",
|
|
|
|
"value": True
|
|
|
|
},
|
2014-11-29 22:39:44 +02:00
|
|
|
"enable_telemetry": {
|
2016-08-03 23:38:20 +03:00
|
|
|
"description":
|
2016-12-05 21:25:10 +02:00
|
|
|
("Telemetry service <http://docs.platformio.org/page/"
|
2016-08-03 23:38:20 +03:00
|
|
|
"userguide/cmd_settings.html?#enable-telemetry> (Yes/No)"),
|
2017-06-05 16:05:05 +03:00
|
|
|
"value":
|
|
|
|
True
|
2014-11-29 22:39:44 +02:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2016-08-03 23:38:20 +03:00
|
|
|
SESSION_VARS = {"command_ctx": None, "force_option": False, "caller_id": None}
|
2015-04-16 17:04:45 +01:00
|
|
|
|
|
|
|
|
2014-11-29 22:39:44 +02:00
|
|
|
class State(object):
|
|
|
|
|
2015-11-25 19:54:06 +02:00
|
|
|
def __init__(self, path=None, lock=False):
|
2014-11-29 22:39:44 +02:00
|
|
|
self.path = path
|
2015-11-25 19:54:06 +02:00
|
|
|
self.lock = lock
|
2014-11-29 22:39:44 +02:00
|
|
|
if not self.path:
|
2016-04-09 14:15:59 +03:00
|
|
|
self.path = join(util.get_home_dir(), "appstate.json")
|
2014-11-29 22:39:44 +02:00
|
|
|
self._state = {}
|
2015-09-03 19:04:09 +03:00
|
|
|
self._prev_state = {}
|
2015-11-25 19:54:06 +02:00
|
|
|
self._lockfile = None
|
2014-11-29 22:39:44 +02:00
|
|
|
|
|
|
|
def __enter__(self):
|
|
|
|
try:
|
2015-09-23 16:35:31 +03:00
|
|
|
self._lock_state_file()
|
2014-11-29 22:39:44 +02:00
|
|
|
if isfile(self.path):
|
2016-04-09 14:15:59 +03:00
|
|
|
self._state = util.load_json(self.path)
|
2014-11-29 22:39:44 +02:00
|
|
|
except ValueError:
|
|
|
|
self._state = {}
|
2015-11-25 19:54:06 +02:00
|
|
|
self._prev_state = deepcopy(self._state)
|
2014-11-29 22:39:44 +02:00
|
|
|
return self._state
|
|
|
|
|
|
|
|
def __exit__(self, type_, value, traceback):
|
2015-09-03 19:04:09 +03:00
|
|
|
if self._prev_state != self._state:
|
|
|
|
with open(self.path, "w") as fp:
|
|
|
|
if "dev" in __version__:
|
|
|
|
json.dump(self._state, fp, indent=4)
|
|
|
|
else:
|
|
|
|
json.dump(self._state, fp)
|
2015-09-23 16:35:31 +03:00
|
|
|
self._unlock_state_file()
|
|
|
|
|
|
|
|
def _lock_state_file(self):
|
2015-11-25 19:54:06 +02:00
|
|
|
if not self.lock:
|
|
|
|
return
|
|
|
|
self._lockfile = LockFile(self.path)
|
2015-09-23 16:35:31 +03:00
|
|
|
|
2016-08-03 23:38:20 +03:00
|
|
|
if self._lockfile.is_locked() and \
|
|
|
|
(time() - getmtime(self._lockfile.lock_file)) > 10:
|
2015-11-25 19:54:06 +02:00
|
|
|
self._lockfile.break_lock()
|
2015-09-23 16:35:31 +03:00
|
|
|
|
2016-10-10 23:06:09 +03:00
|
|
|
try:
|
|
|
|
self._lockfile.acquire()
|
|
|
|
except LockFailed:
|
|
|
|
raise exception.PlatformioException(
|
|
|
|
"The directory `{0}` or its parent directory is not owned by "
|
|
|
|
"the current user and PlatformIO can not store configuration "
|
|
|
|
"data. \nPlease check the permissions and owner of that "
|
|
|
|
"directory. Otherwise, please remove manually `{0}` "
|
|
|
|
"directory and PlatformIO will create new from the current "
|
|
|
|
"user.".format(dirname(self.path)))
|
2015-09-23 16:35:31 +03:00
|
|
|
|
|
|
|
def _unlock_state_file(self):
|
2015-11-25 19:54:06 +02:00
|
|
|
if self._lockfile:
|
|
|
|
self._lockfile.release()
|
2014-11-29 22:39:44 +02:00
|
|
|
|
|
|
|
|
2016-12-05 18:51:25 +02:00
|
|
|
class ContentCache(object):
|
2016-09-14 19:06:22 +03:00
|
|
|
|
|
|
|
def __init__(self, cache_dir=None):
|
2016-12-05 18:51:25 +02:00
|
|
|
self.cache_dir = None
|
2016-12-07 00:38:59 +02:00
|
|
|
self._db_path = None
|
|
|
|
self._lockfile = None
|
|
|
|
|
2016-12-05 18:51:25 +02:00
|
|
|
if not get_setting("enable_cache"):
|
|
|
|
return
|
2016-12-07 00:38:59 +02:00
|
|
|
|
2016-09-14 19:06:22 +03:00
|
|
|
self.cache_dir = cache_dir or join(util.get_home_dir(), ".cache")
|
2016-12-07 00:38:59 +02:00
|
|
|
self._db_path = join(self.cache_dir, "db.data")
|
2016-09-14 19:06:22 +03:00
|
|
|
|
|
|
|
def __enter__(self):
|
2016-12-07 00:38:59 +02:00
|
|
|
if not self._db_path or not isfile(self._db_path):
|
2016-09-14 19:06:22 +03:00
|
|
|
return self
|
2016-12-07 00:38:59 +02:00
|
|
|
|
2017-05-26 00:45:56 +03:00
|
|
|
self.delete()
|
2016-09-14 19:06:22 +03:00
|
|
|
return self
|
|
|
|
|
|
|
|
def __exit__(self, type_, value, traceback):
|
|
|
|
pass
|
|
|
|
|
2016-12-07 00:38:59 +02:00
|
|
|
def _lock_dbindex(self):
|
2017-02-01 02:49:25 +02:00
|
|
|
if not self.cache_dir:
|
|
|
|
os.makedirs(self.cache_dir)
|
2016-12-07 00:38:59 +02:00
|
|
|
self._lockfile = LockFile(self.cache_dir)
|
|
|
|
if self._lockfile.is_locked() and \
|
|
|
|
(time() - getmtime(self._lockfile.lock_file)) > 10:
|
|
|
|
self._lockfile.break_lock()
|
|
|
|
|
|
|
|
try:
|
|
|
|
self._lockfile.acquire()
|
|
|
|
except LockFailed:
|
|
|
|
return False
|
|
|
|
|
|
|
|
return True
|
|
|
|
|
|
|
|
def _unlock_dbindex(self):
|
|
|
|
if self._lockfile:
|
|
|
|
self._lockfile.release()
|
2017-05-26 00:45:56 +03:00
|
|
|
return True
|
2016-12-07 00:38:59 +02:00
|
|
|
|
2016-09-14 19:06:22 +03:00
|
|
|
def get_cache_path(self, key):
|
|
|
|
assert len(key) > 3
|
|
|
|
return join(self.cache_dir, key[-2:], key)
|
|
|
|
|
|
|
|
@staticmethod
|
|
|
|
def key_from_args(*args):
|
|
|
|
h = hashlib.md5()
|
|
|
|
for data in args:
|
|
|
|
h.update(str(data))
|
|
|
|
return h.hexdigest()
|
|
|
|
|
|
|
|
def get(self, key):
|
|
|
|
cache_path = self.get_cache_path(key)
|
|
|
|
if not isfile(cache_path):
|
|
|
|
return None
|
2016-12-05 18:51:25 +02:00
|
|
|
with open(cache_path, "rb") as fp:
|
2016-09-14 19:06:22 +03:00
|
|
|
data = fp.read()
|
2016-12-26 14:44:19 +02:00
|
|
|
if data and data[0] in ("{", "["):
|
2016-09-14 19:06:22 +03:00
|
|
|
return json.loads(data)
|
|
|
|
return data
|
|
|
|
|
|
|
|
def set(self, key, data, valid):
|
2017-05-26 00:45:56 +03:00
|
|
|
cache_path = self.get_cache_path(key)
|
|
|
|
if isfile(cache_path):
|
|
|
|
self.delete(key)
|
2017-02-01 02:49:25 +02:00
|
|
|
if not data:
|
2016-09-14 19:06:22 +03:00
|
|
|
return
|
2016-12-07 00:38:59 +02:00
|
|
|
if not isdir(self.cache_dir):
|
|
|
|
os.makedirs(self.cache_dir)
|
2016-09-14 19:06:22 +03:00
|
|
|
tdmap = {"s": 1, "m": 60, "h": 3600, "d": 86400}
|
|
|
|
assert valid.endswith(tuple(tdmap.keys()))
|
2016-12-07 00:38:59 +02:00
|
|
|
expire_time = int(time() + tdmap[valid[-1]] * int(valid[:-1]))
|
|
|
|
|
|
|
|
if not self._lock_dbindex():
|
|
|
|
return False
|
|
|
|
|
2016-09-14 19:06:22 +03:00
|
|
|
if not isdir(dirname(cache_path)):
|
|
|
|
os.makedirs(dirname(cache_path))
|
2016-12-05 18:51:25 +02:00
|
|
|
with open(cache_path, "wb") as fp:
|
2017-04-15 16:36:59 +03:00
|
|
|
if isinstance(data, (dict, list)):
|
2016-09-14 19:06:22 +03:00
|
|
|
json.dump(data, fp)
|
|
|
|
else:
|
|
|
|
fp.write(str(data))
|
2017-05-26 00:45:56 +03:00
|
|
|
with open(self._db_path, "a") as fp:
|
|
|
|
fp.write("%s=%s\n" % (str(expire_time), cache_path))
|
|
|
|
|
|
|
|
return self._unlock_dbindex()
|
|
|
|
|
|
|
|
def delete(self, keys=None):
|
|
|
|
""" Keys=None, delete expired items """
|
|
|
|
if not keys:
|
|
|
|
keys = []
|
|
|
|
if not isinstance(keys, list):
|
|
|
|
keys = [keys]
|
|
|
|
paths_for_delete = [self.get_cache_path(k) for k in keys]
|
|
|
|
found = False
|
|
|
|
newlines = []
|
|
|
|
with open(self._db_path) as fp:
|
|
|
|
for line in fp.readlines():
|
|
|
|
if "=" not in line:
|
|
|
|
continue
|
|
|
|
line = line.strip()
|
|
|
|
expire, path = line.split("=")
|
|
|
|
if time() < int(expire) and isfile(path) and \
|
|
|
|
path not in paths_for_delete:
|
|
|
|
newlines.append(line)
|
|
|
|
continue
|
|
|
|
found = True
|
|
|
|
if isfile(path):
|
|
|
|
try:
|
|
|
|
remove(path)
|
|
|
|
if not listdir(dirname(path)):
|
|
|
|
util.rmtree_(dirname(path))
|
|
|
|
except OSError:
|
|
|
|
pass
|
|
|
|
|
|
|
|
if found and self._lock_dbindex():
|
|
|
|
with open(self._db_path, "w") as fp:
|
|
|
|
fp.write("\n".join(newlines) + "\n")
|
|
|
|
self._unlock_dbindex()
|
2016-12-07 00:38:59 +02:00
|
|
|
|
2016-09-14 19:06:22 +03:00
|
|
|
return True
|
|
|
|
|
|
|
|
def clean(self):
|
2017-02-01 02:49:25 +02:00
|
|
|
if not self.cache_dir or not isdir(self.cache_dir):
|
|
|
|
return
|
|
|
|
util.rmtree_(self.cache_dir)
|
2016-09-14 19:06:22 +03:00
|
|
|
|
|
|
|
|
2017-02-01 14:52:32 +02:00
|
|
|
def clean_cache():
|
|
|
|
with ContentCache() as cc:
|
|
|
|
cc.clean()
|
|
|
|
|
|
|
|
|
2015-03-05 01:36:31 +02:00
|
|
|
def sanitize_setting(name, value):
|
|
|
|
if name not in DEFAULT_SETTINGS:
|
|
|
|
raise InvalidSettingName(name)
|
|
|
|
|
|
|
|
defdata = DEFAULT_SETTINGS[name]
|
|
|
|
try:
|
|
|
|
if "validator" in defdata:
|
|
|
|
value = defdata['validator']()
|
|
|
|
elif isinstance(defdata['value'], bool):
|
|
|
|
if not isinstance(value, bool):
|
|
|
|
value = str(value).lower() in ("true", "yes", "y", "1")
|
|
|
|
elif isinstance(defdata['value'], int):
|
|
|
|
value = int(value)
|
|
|
|
except Exception:
|
|
|
|
raise InvalidSettingValue(value, name)
|
|
|
|
return value
|
|
|
|
|
|
|
|
|
2014-11-29 22:39:44 +02:00
|
|
|
def get_state_item(name, default=None):
|
|
|
|
with State() as data:
|
|
|
|
return data.get(name, default)
|
|
|
|
|
|
|
|
|
|
|
|
def set_state_item(name, value):
|
2015-11-25 19:54:06 +02:00
|
|
|
with State(lock=True) as data:
|
2014-11-29 22:39:44 +02:00
|
|
|
data[name] = value
|
|
|
|
|
|
|
|
|
2017-05-26 00:45:56 +03:00
|
|
|
def delete_state_item(name):
|
|
|
|
with State(lock=True) as data:
|
|
|
|
if name in data:
|
|
|
|
del data[name]
|
|
|
|
|
|
|
|
|
2014-11-29 22:39:44 +02:00
|
|
|
def get_setting(name):
|
2015-03-05 01:36:31 +02:00
|
|
|
_env_name = "PLATFORMIO_SETTING_%s" % name.upper()
|
|
|
|
if _env_name in environ:
|
|
|
|
return sanitize_setting(name, getenv(_env_name))
|
2014-11-29 22:39:44 +02:00
|
|
|
|
|
|
|
with State() as data:
|
|
|
|
if "settings" in data and name in data['settings']:
|
|
|
|
return data['settings'][name]
|
|
|
|
|
|
|
|
return DEFAULT_SETTINGS[name]['value']
|
|
|
|
|
|
|
|
|
|
|
|
def set_setting(name, value):
|
2015-11-25 19:54:06 +02:00
|
|
|
with State(lock=True) as data:
|
2014-11-29 22:39:44 +02:00
|
|
|
if "settings" not in data:
|
|
|
|
data['settings'] = {}
|
2015-03-05 01:36:31 +02:00
|
|
|
data['settings'][name] = sanitize_setting(name, value)
|
2014-11-29 22:39:44 +02:00
|
|
|
|
|
|
|
|
|
|
|
def reset_settings():
|
2015-11-25 19:54:06 +02:00
|
|
|
with State(lock=True) as data:
|
2014-11-29 22:39:44 +02:00
|
|
|
if "settings" in data:
|
|
|
|
del data['settings']
|
2015-04-16 17:04:45 +01:00
|
|
|
|
|
|
|
|
|
|
|
def get_session_var(name, default=None):
|
|
|
|
return SESSION_VARS.get(name, default)
|
|
|
|
|
|
|
|
|
|
|
|
def set_session_var(name, value):
|
|
|
|
assert name in SESSION_VARS
|
|
|
|
SESSION_VARS[name] = value
|
2016-01-24 16:45:04 +02:00
|
|
|
|
|
|
|
|
|
|
|
def is_disabled_progressbar():
|
2016-10-31 20:05:34 +02:00
|
|
|
return any([
|
2017-06-05 16:05:05 +03:00
|
|
|
get_session_var("force_option"),
|
|
|
|
util.is_ci(),
|
2016-10-31 20:05:34 +02:00
|
|
|
getenv("PLATFORMIO_DISABLE_PROGRESSBAR") == "true"
|
|
|
|
])
|
2016-08-27 23:15:32 +03:00
|
|
|
|
|
|
|
|
|
|
|
def get_cid():
|
|
|
|
cid = get_state_item("cid")
|
|
|
|
if not cid:
|
2016-10-10 20:59:47 +03:00
|
|
|
_uid = None
|
|
|
|
if getenv("C9_UID"):
|
|
|
|
_uid = getenv("C9_UID")
|
2016-10-30 23:31:51 +02:00
|
|
|
elif getenv("CHE_API", getenv("CHE_API_ENDPOINT")):
|
2016-10-10 20:59:47 +03:00
|
|
|
try:
|
|
|
|
_uid = requests.get("{api}/user?token={token}".format(
|
2016-10-31 14:43:30 +02:00
|
|
|
api=getenv("CHE_API", getenv("CHE_API_ENDPOINT")),
|
|
|
|
token=getenv("USER_TOKEN"))).json().get("id")
|
2016-10-10 20:59:47 +03:00
|
|
|
except: # pylint: disable=bare-except
|
|
|
|
pass
|
2016-08-27 23:15:32 +03:00
|
|
|
cid = str(
|
2016-12-23 21:57:11 +02:00
|
|
|
uuid.UUID(bytes=hashlib.md5(str(_uid if _uid else uuid.getnode()))
|
|
|
|
.digest()))
|
2016-08-27 23:15:32 +03:00
|
|
|
set_state_item("cid", cid)
|
|
|
|
return cid
|