Files
platformio-core/platformio/app.py

373 lines
11 KiB
Python
Raw Normal View History

# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
2014-11-29 22:39:44 +02:00
import codecs
2016-08-29 20:20:12 +03:00
import hashlib
import os
2016-08-27 23:15:32 +03:00
import uuid
from copy import deepcopy
from os import environ, getenv, listdir, remove
from os.path import abspath, dirname, expanduser, isdir, isfile, join
from time import time
2014-11-29 22:39:44 +02:00
2016-10-10 20:59:47 +03:00
import requests
from platformio import exception, lockfile, util
from platformio.compat import (WINDOWS, dump_json_to_unicode,
hashlib_encode_data)
from platformio.proc import is_ci
2019-05-27 22:25:22 +03:00
from platformio.project.helpers import (get_project_cache_dir,
get_project_core_dir)
2014-11-29 22:39:44 +02:00
def projects_dir_validate(projects_dir):
assert isdir(projects_dir)
return abspath(projects_dir)
2014-11-29 22:39:44 +02:00
DEFAULT_SETTINGS = {
"auto_update_libraries": {
"description": "Automatically update libraries (Yes/No)",
"value": False
2014-11-29 22:39:44 +02:00
},
"auto_update_platforms": {
"description": "Automatically update platforms (Yes/No)",
"value": False
2014-11-29 22:39:44 +02:00
},
"check_libraries_interval": {
"description": "Check for the library updates interval (days)",
"value": 7
},
"check_platformio_interval": {
"description": "Check for the new PlatformIO interval (days)",
"value": 3
2014-11-29 22:39:44 +02:00
},
"check_platforms_interval": {
"description": "Check for the platform updates interval (days)",
"value": 7
},
"enable_cache": {
"description": "Enable caching for API requests and Library Manager",
"value": True
2014-11-29 22:39:44 +02:00
},
"enable_ssl": {
"description": "Enable SSL for PlatformIO Services",
"value": False
2016-08-25 22:57:52 +03:00
},
2014-11-29 22:39:44 +02:00
"enable_telemetry": {
2016-08-03 23:38:20 +03:00
"description":
2018-08-15 19:44:02 +03:00
("Telemetry service <https://docs.platformio.org/page/"
2016-08-03 23:38:20 +03:00
"userguide/cmd_settings.html?#enable-telemetry> (Yes/No)"),
2017-06-05 16:05:05 +03:00
"value":
True
},
"force_verbose": {
"description": "Force verbose output when processing environments",
"value": False
},
"projects_dir": {
"description": "Default location for PlatformIO projects (PIO Home)",
"value": join(expanduser("~"), "Documents", "PlatformIO", "Projects"),
"validator": projects_dir_validate
},
2014-11-29 22:39:44 +02:00
}
2016-08-03 23:38:20 +03:00
SESSION_VARS = {"command_ctx": None, "force_option": False, "caller_id": None}
2014-11-29 22:39:44 +02:00
class State(object):
def __init__(self, path=None, lock=False):
2014-11-29 22:39:44 +02:00
self.path = path
self.lock = lock
2014-11-29 22:39:44 +02:00
if not self.path:
self.path = join(get_project_core_dir(), "appstate.json")
2014-11-29 22:39:44 +02:00
self._state = {}
self._prev_state = {}
self._lockfile = None
2014-11-29 22:39:44 +02:00
def __enter__(self):
try:
self._lock_state_file()
2014-11-29 22:39:44 +02:00
if isfile(self.path):
2016-04-09 14:15:59 +03:00
self._state = util.load_json(self.path)
assert isinstance(self._state, dict)
except (AssertionError, UnicodeDecodeError,
exception.PlatformioException):
2014-11-29 22:39:44 +02:00
self._state = {}
self._prev_state = deepcopy(self._state)
2014-11-29 22:39:44 +02:00
return self._state
def __exit__(self, type_, value, traceback):
new_state = dump_json_to_unicode(self._state)
if self._prev_state != new_state:
try:
with open(self.path, "w") as fp:
fp.write(new_state)
except IOError:
raise exception.HomeDirPermissionsError(get_project_core_dir())
self._unlock_state_file()
def _lock_state_file(self):
if not self.lock:
return
self._lockfile = lockfile.LockFile(self.path)
try:
self._lockfile.acquire()
except IOError:
2017-12-23 19:48:16 +02:00
raise exception.HomeDirPermissionsError(dirname(self.path))
def _unlock_state_file(self):
2018-12-03 18:31:12 -08:00
if hasattr(self, "_lockfile") and self._lockfile:
self._lockfile.release()
2014-11-29 22:39:44 +02:00
def __del__(self):
self._unlock_state_file()
2014-11-29 22:39:44 +02:00
class ContentCache(object):
def __init__(self, cache_dir=None):
self.cache_dir = None
2016-12-07 00:38:59 +02:00
self._db_path = None
self._lockfile = None
2019-05-27 22:25:22 +03:00
self.cache_dir = cache_dir or get_project_cache_dir()
2016-12-07 00:38:59 +02:00
self._db_path = join(self.cache_dir, "db.data")
def __enter__(self):
self.delete()
return self
def __exit__(self, type_, value, traceback):
pass
2016-12-07 00:38:59 +02:00
def _lock_dbindex(self):
2017-02-01 02:49:25 +02:00
if not self.cache_dir:
os.makedirs(self.cache_dir)
self._lockfile = lockfile.LockFile(self.cache_dir)
2016-12-07 00:38:59 +02:00
try:
self._lockfile.acquire()
except: # pylint: disable=bare-except
2016-12-07 00:38:59 +02:00
return False
return True
def _unlock_dbindex(self):
if self._lockfile:
self._lockfile.release()
return True
2016-12-07 00:38:59 +02:00
def get_cache_path(self, key):
key = str(key)
assert len(key) > 3
return join(self.cache_dir, key[-2:], key)
@staticmethod
def key_from_args(*args):
h = hashlib.md5()
for arg in args:
if arg:
h.update(hashlib_encode_data(arg))
return h.hexdigest()
def get(self, key):
cache_path = self.get_cache_path(key)
if not isfile(cache_path):
return None
2018-03-20 01:14:29 +02:00
with codecs.open(cache_path, "rb", encoding="utf8") as fp:
return fp.read()
def set(self, key, data, valid):
2017-12-14 14:55:04 +02:00
if not get_setting("enable_cache"):
return False
cache_path = self.get_cache_path(key)
if isfile(cache_path):
self.delete(key)
2017-02-01 02:49:25 +02:00
if not data:
return False
2016-12-07 00:38:59 +02:00
if not isdir(self.cache_dir):
os.makedirs(self.cache_dir)
tdmap = {"s": 1, "m": 60, "h": 3600, "d": 86400}
assert valid.endswith(tuple(tdmap))
2016-12-07 00:38:59 +02:00
expire_time = int(time() + tdmap[valid[-1]] * int(valid[:-1]))
if not self._lock_dbindex():
return False
if not isdir(dirname(cache_path)):
os.makedirs(dirname(cache_path))
try:
2018-03-20 01:14:29 +02:00
with codecs.open(cache_path, "wb", encoding="utf8") as fp:
fp.write(data)
2018-03-20 01:14:29 +02:00
with open(self._db_path, "a") as fp:
fp.write("%s=%s\n" % (str(expire_time), cache_path))
except UnicodeError:
if isfile(cache_path):
try:
remove(cache_path)
except OSError:
pass
return self._unlock_dbindex()
def delete(self, keys=None):
""" Keys=None, delete expired items """
2017-12-14 14:55:04 +02:00
if not isfile(self._db_path):
return None
if not keys:
keys = []
if not isinstance(keys, list):
keys = [keys]
paths_for_delete = [self.get_cache_path(k) for k in keys]
found = False
newlines = []
2018-03-20 01:14:29 +02:00
with open(self._db_path) as fp:
for line in fp.readlines():
line = line.strip()
if "=" not in line:
continue
expire, path = line.split("=")
try:
if time() < int(expire) and isfile(path) and \
path not in paths_for_delete:
newlines.append(line)
continue
except ValueError:
pass
found = True
if isfile(path):
try:
remove(path)
if not listdir(dirname(path)):
util.rmtree_(dirname(path))
except OSError:
pass
if found and self._lock_dbindex():
2018-03-20 01:14:29 +02:00
with open(self._db_path, "w") as fp:
fp.write("\n".join(newlines) + "\n")
self._unlock_dbindex()
2016-12-07 00:38:59 +02:00
return True
def clean(self):
2017-02-01 02:49:25 +02:00
if not self.cache_dir or not isdir(self.cache_dir):
return
util.rmtree_(self.cache_dir)
2017-02-01 14:52:32 +02:00
def clean_cache():
with ContentCache() as cc:
cc.clean()
def sanitize_setting(name, value):
if name not in DEFAULT_SETTINGS:
raise exception.InvalidSettingName(name)
defdata = DEFAULT_SETTINGS[name]
try:
if "validator" in defdata:
value = defdata['validator'](value)
elif isinstance(defdata['value'], bool):
if not isinstance(value, bool):
value = str(value).lower() in ("true", "yes", "y", "1")
elif isinstance(defdata['value'], int):
value = int(value)
except Exception:
raise exception.InvalidSettingValue(value, name)
return value
2014-11-29 22:39:44 +02:00
def get_state_item(name, default=None):
with State() as data:
return data.get(name, default)
def set_state_item(name, value):
with State(lock=True) as data:
2014-11-29 22:39:44 +02:00
data[name] = value
def delete_state_item(name):
with State(lock=True) as data:
if name in data:
del data[name]
2014-11-29 22:39:44 +02:00
def get_setting(name):
_env_name = "PLATFORMIO_SETTING_%s" % name.upper()
if _env_name in environ:
return sanitize_setting(name, getenv(_env_name))
2014-11-29 22:39:44 +02:00
with State() as data:
if "settings" in data and name in data['settings']:
return data['settings'][name]
return DEFAULT_SETTINGS[name]['value']
def set_setting(name, value):
with State(lock=True) as data:
2014-11-29 22:39:44 +02:00
if "settings" not in data:
data['settings'] = {}
data['settings'][name] = sanitize_setting(name, value)
2014-11-29 22:39:44 +02:00
def reset_settings():
with State(lock=True) as data:
2014-11-29 22:39:44 +02:00
if "settings" in data:
del data['settings']
def get_session_var(name, default=None):
return SESSION_VARS.get(name, default)
def set_session_var(name, value):
assert name in SESSION_VARS
SESSION_VARS[name] = value
def is_disabled_progressbar():
2016-10-31 20:05:34 +02:00
return any([
2017-06-05 16:05:05 +03:00
get_session_var("force_option"),
is_ci(),
2016-10-31 20:05:34 +02:00
getenv("PLATFORMIO_DISABLE_PROGRESSBAR") == "true"
])
2016-08-27 23:15:32 +03:00
def get_cid():
cid = get_state_item("cid")
if cid:
return cid
uid = None
if getenv("C9_UID"):
uid = getenv("C9_UID")
elif getenv("CHE_API", getenv("CHE_API_ENDPOINT")):
try:
uid = requests.get("{api}/user?token={token}".format(
api=getenv("CHE_API", getenv("CHE_API_ENDPOINT")),
token=getenv("USER_TOKEN"))).json().get("id")
except: # pylint: disable=bare-except
pass
if not uid:
uid = uuid.getnode()
cid = uuid.UUID(bytes=hashlib.md5(hashlib_encode_data(uid)).digest())
cid = str(cid)
if WINDOWS or os.getuid() > 0: # yapf: disable pylint: disable=no-member
set_state_item("cid", cid)
2016-08-27 23:15:32 +03:00
return cid