2017-06-05 16:02:39 +03:00
|
|
|
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
|
2015-11-18 17:16:17 +02:00
|
|
|
#
|
|
|
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
|
|
|
# you may not use this file except in compliance with the License.
|
|
|
|
# You may obtain a copy of the License at
|
|
|
|
#
|
|
|
|
# http://www.apache.org/licenses/LICENSE-2.0
|
|
|
|
#
|
|
|
|
# Unless required by applicable law or agreed to in writing, software
|
|
|
|
# distributed under the License is distributed on an "AS IS" BASIS,
|
|
|
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
|
|
# See the License for the specific language governing permissions and
|
|
|
|
# limitations under the License.
|
2014-11-29 22:39:44 +02:00
|
|
|
|
2018-03-19 17:16:51 +02:00
|
|
|
import codecs
|
2016-08-29 20:20:12 +03:00
|
|
|
import hashlib
|
2016-09-14 19:06:22 +03:00
|
|
|
import os
|
2016-08-27 23:15:32 +03:00
|
|
|
import uuid
|
2016-09-14 19:06:22 +03:00
|
|
|
from os import environ, getenv, listdir, remove
|
2018-07-14 22:10:56 +03:00
|
|
|
from os.path import abspath, dirname, expanduser, isdir, isfile, join
|
2015-09-23 16:35:31 +03:00
|
|
|
from time import time
|
2014-11-29 22:39:44 +02:00
|
|
|
|
2016-10-10 20:59:47 +03:00
|
|
|
import requests
|
2015-09-03 19:04:09 +03:00
|
|
|
|
2018-07-14 22:10:56 +03:00
|
|
|
from platformio import exception, lockfile, util
|
2019-06-05 17:57:22 +03:00
|
|
|
from platformio.compat import (WINDOWS, dump_json_to_unicode,
|
|
|
|
hashlib_encode_data)
|
2019-05-16 21:03:15 +03:00
|
|
|
from platformio.proc import is_ci
|
2019-05-27 22:25:22 +03:00
|
|
|
from platformio.project.helpers import (get_project_cache_dir,
|
|
|
|
get_project_core_dir)
|
2014-11-29 22:39:44 +02:00
|
|
|
|
2017-12-14 16:52:13 +02:00
|
|
|
|
2019-07-08 17:21:28 +03:00
|
|
|
def get_default_projects_dir():
|
|
|
|
docs_dir = join(expanduser("~"), "Documents")
|
|
|
|
try:
|
|
|
|
assert WINDOWS
|
|
|
|
import ctypes.wintypes
|
|
|
|
buf = ctypes.create_unicode_buffer(ctypes.wintypes.MAX_PATH)
|
|
|
|
ctypes.windll.shell32.SHGetFolderPathW(None, 5, None, 0, buf)
|
|
|
|
docs_dir = buf.value
|
|
|
|
except: # pylint: disable=bare-except
|
|
|
|
pass
|
|
|
|
return join(docs_dir, "PlatformIO", "Projects")
|
|
|
|
|
|
|
|
|
2017-12-14 16:52:13 +02:00
|
|
|
def projects_dir_validate(projects_dir):
|
|
|
|
assert isdir(projects_dir)
|
|
|
|
return abspath(projects_dir)
|
|
|
|
|
|
|
|
|
2014-11-29 22:39:44 +02:00
|
|
|
DEFAULT_SETTINGS = {
|
2017-12-14 16:52:13 +02:00
|
|
|
"auto_update_libraries": {
|
|
|
|
"description": "Automatically update libraries (Yes/No)",
|
|
|
|
"value": False
|
2014-11-29 22:39:44 +02:00
|
|
|
},
|
2017-12-14 16:52:13 +02:00
|
|
|
"auto_update_platforms": {
|
|
|
|
"description": "Automatically update platforms (Yes/No)",
|
|
|
|
"value": False
|
2014-11-29 22:39:44 +02:00
|
|
|
},
|
|
|
|
"check_libraries_interval": {
|
|
|
|
"description": "Check for the library updates interval (days)",
|
|
|
|
"value": 7
|
|
|
|
},
|
2017-12-14 16:52:13 +02:00
|
|
|
"check_platformio_interval": {
|
|
|
|
"description": "Check for the new PlatformIO interval (days)",
|
|
|
|
"value": 3
|
2014-11-29 22:39:44 +02:00
|
|
|
},
|
2017-12-14 16:52:13 +02:00
|
|
|
"check_platforms_interval": {
|
|
|
|
"description": "Check for the platform updates interval (days)",
|
|
|
|
"value": 7
|
2016-07-17 00:48:59 +03:00
|
|
|
},
|
2017-12-14 16:52:13 +02:00
|
|
|
"enable_cache": {
|
|
|
|
"description": "Enable caching for API requests and Library Manager",
|
|
|
|
"value": True
|
2014-11-29 22:39:44 +02:00
|
|
|
},
|
2019-07-17 00:53:40 +03:00
|
|
|
"strict_ssl": {
|
|
|
|
"description": "Strict SSL for PlatformIO Services",
|
2016-09-12 02:11:04 +03:00
|
|
|
"value": False
|
2016-08-25 22:57:52 +03:00
|
|
|
},
|
2014-11-29 22:39:44 +02:00
|
|
|
"enable_telemetry": {
|
2016-08-03 23:38:20 +03:00
|
|
|
"description":
|
2018-08-15 19:44:02 +03:00
|
|
|
("Telemetry service <https://docs.platformio.org/page/"
|
2016-08-03 23:38:20 +03:00
|
|
|
"userguide/cmd_settings.html?#enable-telemetry> (Yes/No)"),
|
2017-06-05 16:05:05 +03:00
|
|
|
"value":
|
|
|
|
True
|
2017-12-14 16:52:13 +02:00
|
|
|
},
|
|
|
|
"force_verbose": {
|
|
|
|
"description": "Force verbose output when processing environments",
|
|
|
|
"value": False
|
|
|
|
},
|
|
|
|
"projects_dir": {
|
|
|
|
"description": "Default location for PlatformIO projects (PIO Home)",
|
2019-07-08 17:21:28 +03:00
|
|
|
"value": get_default_projects_dir(),
|
2017-12-14 16:52:13 +02:00
|
|
|
"validator": projects_dir_validate
|
|
|
|
},
|
2014-11-29 22:39:44 +02:00
|
|
|
}
|
|
|
|
|
2016-08-03 23:38:20 +03:00
|
|
|
SESSION_VARS = {"command_ctx": None, "force_option": False, "caller_id": None}
|
2015-04-16 17:04:45 +01:00
|
|
|
|
|
|
|
|
2014-11-29 22:39:44 +02:00
|
|
|
class State(object):
|
|
|
|
|
2015-11-25 19:54:06 +02:00
|
|
|
def __init__(self, path=None, lock=False):
|
2014-11-29 22:39:44 +02:00
|
|
|
self.path = path
|
2015-11-25 19:54:06 +02:00
|
|
|
self.lock = lock
|
2014-11-29 22:39:44 +02:00
|
|
|
if not self.path:
|
2019-05-24 20:49:05 +03:00
|
|
|
self.path = join(get_project_core_dir(), "appstate.json")
|
2019-07-02 00:41:47 +03:00
|
|
|
self._storage = {}
|
2015-11-25 19:54:06 +02:00
|
|
|
self._lockfile = None
|
2019-07-02 15:52:12 +03:00
|
|
|
self.modified = False
|
2014-11-29 22:39:44 +02:00
|
|
|
|
|
|
|
def __enter__(self):
|
|
|
|
try:
|
2015-09-23 16:35:31 +03:00
|
|
|
self._lock_state_file()
|
2014-11-29 22:39:44 +02:00
|
|
|
if isfile(self.path):
|
2019-07-02 00:41:47 +03:00
|
|
|
self._storage = util.load_json(self.path)
|
|
|
|
assert isinstance(self._storage, dict)
|
|
|
|
except (AssertionError, ValueError, UnicodeDecodeError,
|
|
|
|
exception.InvalidJSONFile):
|
|
|
|
self._storage = {}
|
|
|
|
return self
|
2014-11-29 22:39:44 +02:00
|
|
|
|
|
|
|
def __exit__(self, type_, value, traceback):
|
2019-07-02 15:52:12 +03:00
|
|
|
if self.modified:
|
2017-12-19 15:05:41 +02:00
|
|
|
try:
|
2019-06-05 17:57:22 +03:00
|
|
|
with open(self.path, "w") as fp:
|
2019-07-02 00:41:47 +03:00
|
|
|
fp.write(dump_json_to_unicode(self._storage))
|
2017-12-19 15:05:41 +02:00
|
|
|
except IOError:
|
2019-05-24 20:49:05 +03:00
|
|
|
raise exception.HomeDirPermissionsError(get_project_core_dir())
|
2015-09-23 16:35:31 +03:00
|
|
|
self._unlock_state_file()
|
|
|
|
|
|
|
|
def _lock_state_file(self):
|
2015-11-25 19:54:06 +02:00
|
|
|
if not self.lock:
|
|
|
|
return
|
2018-07-14 22:10:56 +03:00
|
|
|
self._lockfile = lockfile.LockFile(self.path)
|
2016-10-10 23:06:09 +03:00
|
|
|
try:
|
|
|
|
self._lockfile.acquire()
|
2018-07-14 22:10:56 +03:00
|
|
|
except IOError:
|
2017-12-23 19:48:16 +02:00
|
|
|
raise exception.HomeDirPermissionsError(dirname(self.path))
|
2015-09-23 16:35:31 +03:00
|
|
|
|
|
|
|
def _unlock_state_file(self):
|
2018-12-03 18:31:12 -08:00
|
|
|
if hasattr(self, "_lockfile") and self._lockfile:
|
2015-11-25 19:54:06 +02:00
|
|
|
self._lockfile.release()
|
2014-11-29 22:39:44 +02:00
|
|
|
|
2019-07-02 00:45:35 +03:00
|
|
|
def __del__(self):
|
|
|
|
self._unlock_state_file()
|
|
|
|
|
2019-07-02 00:41:47 +03:00
|
|
|
# Dictionary Proxy
|
|
|
|
|
|
|
|
def as_dict(self):
|
|
|
|
return self._storage
|
|
|
|
|
|
|
|
def get(self, key, default=True):
|
|
|
|
return self._storage.get(key, default)
|
|
|
|
|
|
|
|
def update(self, *args, **kwargs):
|
2019-07-02 15:52:12 +03:00
|
|
|
self.modified = True
|
2019-07-02 00:41:47 +03:00
|
|
|
return self._storage.update(*args, **kwargs)
|
|
|
|
|
2019-07-16 14:15:48 +03:00
|
|
|
def clear(self):
|
|
|
|
return self._storage.clear()
|
|
|
|
|
2019-07-02 00:41:47 +03:00
|
|
|
def __getitem__(self, key):
|
|
|
|
return self._storage[key]
|
|
|
|
|
|
|
|
def __setitem__(self, key, value):
|
2019-07-02 15:52:12 +03:00
|
|
|
self.modified = True
|
2019-07-02 00:41:47 +03:00
|
|
|
self._storage[key] = value
|
|
|
|
|
|
|
|
def __delitem__(self, key):
|
2019-07-02 15:52:12 +03:00
|
|
|
self.modified = True
|
2019-07-02 00:41:47 +03:00
|
|
|
del self._storage[key]
|
|
|
|
|
|
|
|
def __contains__(self, item):
|
|
|
|
return item in self._storage
|
2018-07-14 22:10:56 +03:00
|
|
|
|
2014-11-29 22:39:44 +02:00
|
|
|
|
2016-12-05 18:51:25 +02:00
|
|
|
class ContentCache(object):
|
2016-09-14 19:06:22 +03:00
|
|
|
|
|
|
|
def __init__(self, cache_dir=None):
|
2016-12-05 18:51:25 +02:00
|
|
|
self.cache_dir = None
|
2016-12-07 00:38:59 +02:00
|
|
|
self._db_path = None
|
|
|
|
self._lockfile = None
|
|
|
|
|
2019-05-27 22:25:22 +03:00
|
|
|
self.cache_dir = cache_dir or get_project_cache_dir()
|
2016-12-07 00:38:59 +02:00
|
|
|
self._db_path = join(self.cache_dir, "db.data")
|
2016-09-14 19:06:22 +03:00
|
|
|
|
|
|
|
def __enter__(self):
|
2017-05-26 00:45:56 +03:00
|
|
|
self.delete()
|
2016-09-14 19:06:22 +03:00
|
|
|
return self
|
|
|
|
|
|
|
|
def __exit__(self, type_, value, traceback):
|
|
|
|
pass
|
|
|
|
|
2016-12-07 00:38:59 +02:00
|
|
|
def _lock_dbindex(self):
|
2017-02-01 02:49:25 +02:00
|
|
|
if not self.cache_dir:
|
|
|
|
os.makedirs(self.cache_dir)
|
2018-07-14 22:10:56 +03:00
|
|
|
self._lockfile = lockfile.LockFile(self.cache_dir)
|
2016-12-07 00:38:59 +02:00
|
|
|
try:
|
|
|
|
self._lockfile.acquire()
|
2018-07-14 22:10:56 +03:00
|
|
|
except: # pylint: disable=bare-except
|
2016-12-07 00:38:59 +02:00
|
|
|
return False
|
|
|
|
|
|
|
|
return True
|
|
|
|
|
|
|
|
def _unlock_dbindex(self):
|
|
|
|
if self._lockfile:
|
|
|
|
self._lockfile.release()
|
2017-05-26 00:45:56 +03:00
|
|
|
return True
|
2016-12-07 00:38:59 +02:00
|
|
|
|
2016-09-14 19:06:22 +03:00
|
|
|
def get_cache_path(self, key):
|
2019-06-05 17:57:22 +03:00
|
|
|
key = str(key)
|
2016-09-14 19:06:22 +03:00
|
|
|
assert len(key) > 3
|
|
|
|
return join(self.cache_dir, key[-2:], key)
|
|
|
|
|
|
|
|
@staticmethod
|
|
|
|
def key_from_args(*args):
|
|
|
|
h = hashlib.md5()
|
2018-12-26 20:54:29 +02:00
|
|
|
for arg in args:
|
2019-06-03 13:30:35 +03:00
|
|
|
if arg:
|
|
|
|
h.update(hashlib_encode_data(arg))
|
2016-09-14 19:06:22 +03:00
|
|
|
return h.hexdigest()
|
|
|
|
|
|
|
|
def get(self, key):
|
|
|
|
cache_path = self.get_cache_path(key)
|
|
|
|
if not isfile(cache_path):
|
|
|
|
return None
|
2018-03-20 01:14:29 +02:00
|
|
|
with codecs.open(cache_path, "rb", encoding="utf8") as fp:
|
2018-03-19 17:16:51 +02:00
|
|
|
return fp.read()
|
2016-09-14 19:06:22 +03:00
|
|
|
|
|
|
|
def set(self, key, data, valid):
|
2017-12-14 14:55:04 +02:00
|
|
|
if not get_setting("enable_cache"):
|
2017-12-15 22:16:37 +02:00
|
|
|
return False
|
2017-05-26 00:45:56 +03:00
|
|
|
cache_path = self.get_cache_path(key)
|
|
|
|
if isfile(cache_path):
|
|
|
|
self.delete(key)
|
2017-02-01 02:49:25 +02:00
|
|
|
if not data:
|
2017-12-15 22:16:37 +02:00
|
|
|
return False
|
2016-12-07 00:38:59 +02:00
|
|
|
if not isdir(self.cache_dir):
|
|
|
|
os.makedirs(self.cache_dir)
|
2016-09-14 19:06:22 +03:00
|
|
|
tdmap = {"s": 1, "m": 60, "h": 3600, "d": 86400}
|
2018-12-26 20:54:29 +02:00
|
|
|
assert valid.endswith(tuple(tdmap))
|
2016-12-07 00:38:59 +02:00
|
|
|
expire_time = int(time() + tdmap[valid[-1]] * int(valid[:-1]))
|
|
|
|
|
|
|
|
if not self._lock_dbindex():
|
|
|
|
return False
|
|
|
|
|
2016-09-14 19:06:22 +03:00
|
|
|
if not isdir(dirname(cache_path)):
|
|
|
|
os.makedirs(dirname(cache_path))
|
2018-03-20 01:06:05 +02:00
|
|
|
try:
|
2018-03-20 01:14:29 +02:00
|
|
|
with codecs.open(cache_path, "wb", encoding="utf8") as fp:
|
2018-03-20 01:06:05 +02:00
|
|
|
fp.write(data)
|
2018-03-20 01:14:29 +02:00
|
|
|
with open(self._db_path, "a") as fp:
|
2018-03-20 01:06:05 +02:00
|
|
|
fp.write("%s=%s\n" % (str(expire_time), cache_path))
|
|
|
|
except UnicodeError:
|
|
|
|
if isfile(cache_path):
|
|
|
|
try:
|
|
|
|
remove(cache_path)
|
|
|
|
except OSError:
|
|
|
|
pass
|
2017-05-26 00:45:56 +03:00
|
|
|
|
|
|
|
return self._unlock_dbindex()
|
|
|
|
|
|
|
|
def delete(self, keys=None):
|
|
|
|
""" Keys=None, delete expired items """
|
2017-12-14 14:55:04 +02:00
|
|
|
if not isfile(self._db_path):
|
2017-12-15 22:16:37 +02:00
|
|
|
return None
|
2017-05-26 00:45:56 +03:00
|
|
|
if not keys:
|
|
|
|
keys = []
|
|
|
|
if not isinstance(keys, list):
|
|
|
|
keys = [keys]
|
|
|
|
paths_for_delete = [self.get_cache_path(k) for k in keys]
|
|
|
|
found = False
|
|
|
|
newlines = []
|
2018-03-20 01:14:29 +02:00
|
|
|
with open(self._db_path) as fp:
|
2017-05-26 00:45:56 +03:00
|
|
|
for line in fp.readlines():
|
2019-01-22 21:59:26 +02:00
|
|
|
line = line.strip()
|
2017-05-26 00:45:56 +03:00
|
|
|
if "=" not in line:
|
|
|
|
continue
|
|
|
|
expire, path = line.split("=")
|
2019-01-23 17:54:45 +02:00
|
|
|
try:
|
|
|
|
if time() < int(expire) and isfile(path) and \
|
|
|
|
path not in paths_for_delete:
|
|
|
|
newlines.append(line)
|
|
|
|
continue
|
|
|
|
except ValueError:
|
|
|
|
pass
|
2017-05-26 00:45:56 +03:00
|
|
|
found = True
|
|
|
|
if isfile(path):
|
|
|
|
try:
|
|
|
|
remove(path)
|
|
|
|
if not listdir(dirname(path)):
|
|
|
|
util.rmtree_(dirname(path))
|
|
|
|
except OSError:
|
|
|
|
pass
|
|
|
|
|
|
|
|
if found and self._lock_dbindex():
|
2018-03-20 01:14:29 +02:00
|
|
|
with open(self._db_path, "w") as fp:
|
2017-05-26 00:45:56 +03:00
|
|
|
fp.write("\n".join(newlines) + "\n")
|
|
|
|
self._unlock_dbindex()
|
2016-12-07 00:38:59 +02:00
|
|
|
|
2016-09-14 19:06:22 +03:00
|
|
|
return True
|
|
|
|
|
|
|
|
def clean(self):
|
2017-02-01 02:49:25 +02:00
|
|
|
if not self.cache_dir or not isdir(self.cache_dir):
|
|
|
|
return
|
|
|
|
util.rmtree_(self.cache_dir)
|
2016-09-14 19:06:22 +03:00
|
|
|
|
|
|
|
|
2017-02-01 14:52:32 +02:00
|
|
|
def clean_cache():
|
|
|
|
with ContentCache() as cc:
|
|
|
|
cc.clean()
|
|
|
|
|
|
|
|
|
2015-03-05 01:36:31 +02:00
|
|
|
def sanitize_setting(name, value):
|
|
|
|
if name not in DEFAULT_SETTINGS:
|
2017-12-19 15:05:41 +02:00
|
|
|
raise exception.InvalidSettingName(name)
|
2015-03-05 01:36:31 +02:00
|
|
|
|
|
|
|
defdata = DEFAULT_SETTINGS[name]
|
|
|
|
try:
|
|
|
|
if "validator" in defdata:
|
2017-12-14 16:52:13 +02:00
|
|
|
value = defdata['validator'](value)
|
2015-03-05 01:36:31 +02:00
|
|
|
elif isinstance(defdata['value'], bool):
|
|
|
|
if not isinstance(value, bool):
|
|
|
|
value = str(value).lower() in ("true", "yes", "y", "1")
|
|
|
|
elif isinstance(defdata['value'], int):
|
|
|
|
value = int(value)
|
|
|
|
except Exception:
|
2017-12-19 15:05:41 +02:00
|
|
|
raise exception.InvalidSettingValue(value, name)
|
2015-03-05 01:36:31 +02:00
|
|
|
return value
|
|
|
|
|
|
|
|
|
2014-11-29 22:39:44 +02:00
|
|
|
def get_state_item(name, default=None):
|
2019-07-02 15:52:12 +03:00
|
|
|
with State() as state:
|
|
|
|
return state.get(name, default)
|
2014-11-29 22:39:44 +02:00
|
|
|
|
|
|
|
|
|
|
|
def set_state_item(name, value):
|
2019-07-02 15:52:12 +03:00
|
|
|
with State(lock=True) as state:
|
|
|
|
state[name] = value
|
|
|
|
state.modified = True
|
2014-11-29 22:39:44 +02:00
|
|
|
|
|
|
|
|
2017-05-26 00:45:56 +03:00
|
|
|
def delete_state_item(name):
|
2019-07-02 15:52:12 +03:00
|
|
|
with State(lock=True) as state:
|
|
|
|
if name in state:
|
|
|
|
del state[name]
|
2017-05-26 00:45:56 +03:00
|
|
|
|
|
|
|
|
2014-11-29 22:39:44 +02:00
|
|
|
def get_setting(name):
|
2015-03-05 01:36:31 +02:00
|
|
|
_env_name = "PLATFORMIO_SETTING_%s" % name.upper()
|
|
|
|
if _env_name in environ:
|
|
|
|
return sanitize_setting(name, getenv(_env_name))
|
2014-11-29 22:39:44 +02:00
|
|
|
|
2019-07-02 15:52:12 +03:00
|
|
|
with State() as state:
|
|
|
|
if "settings" in state and name in state['settings']:
|
|
|
|
return state['settings'][name]
|
2014-11-29 22:39:44 +02:00
|
|
|
|
|
|
|
return DEFAULT_SETTINGS[name]['value']
|
|
|
|
|
|
|
|
|
|
|
|
def set_setting(name, value):
|
2019-07-02 15:52:12 +03:00
|
|
|
with State(lock=True) as state:
|
|
|
|
if "settings" not in state:
|
|
|
|
state['settings'] = {}
|
|
|
|
state['settings'][name] = sanitize_setting(name, value)
|
|
|
|
state.modified = True
|
2014-11-29 22:39:44 +02:00
|
|
|
|
|
|
|
|
|
|
|
def reset_settings():
|
2019-07-02 15:52:12 +03:00
|
|
|
with State(lock=True) as state:
|
|
|
|
if "settings" in state:
|
|
|
|
del state['settings']
|
2015-04-16 17:04:45 +01:00
|
|
|
|
|
|
|
|
|
|
|
def get_session_var(name, default=None):
|
|
|
|
return SESSION_VARS.get(name, default)
|
|
|
|
|
|
|
|
|
|
|
|
def set_session_var(name, value):
|
|
|
|
assert name in SESSION_VARS
|
|
|
|
SESSION_VARS[name] = value
|
2016-01-24 16:45:04 +02:00
|
|
|
|
|
|
|
|
|
|
|
def is_disabled_progressbar():
|
2016-10-31 20:05:34 +02:00
|
|
|
return any([
|
2017-06-05 16:05:05 +03:00
|
|
|
get_session_var("force_option"),
|
2019-05-16 21:03:15 +03:00
|
|
|
is_ci(),
|
2016-10-31 20:05:34 +02:00
|
|
|
getenv("PLATFORMIO_DISABLE_PROGRESSBAR") == "true"
|
|
|
|
])
|
2016-08-27 23:15:32 +03:00
|
|
|
|
|
|
|
|
|
|
|
def get_cid():
|
|
|
|
cid = get_state_item("cid")
|
2018-12-26 20:54:29 +02:00
|
|
|
if cid:
|
|
|
|
return cid
|
|
|
|
uid = None
|
|
|
|
if getenv("C9_UID"):
|
|
|
|
uid = getenv("C9_UID")
|
|
|
|
elif getenv("CHE_API", getenv("CHE_API_ENDPOINT")):
|
|
|
|
try:
|
|
|
|
uid = requests.get("{api}/user?token={token}".format(
|
|
|
|
api=getenv("CHE_API", getenv("CHE_API_ENDPOINT")),
|
|
|
|
token=getenv("USER_TOKEN"))).json().get("id")
|
|
|
|
except: # pylint: disable=bare-except
|
|
|
|
pass
|
|
|
|
if not uid:
|
|
|
|
uid = uuid.getnode()
|
2019-06-03 13:30:35 +03:00
|
|
|
cid = uuid.UUID(bytes=hashlib.md5(hashlib_encode_data(uid)).digest())
|
2018-12-26 20:54:29 +02:00
|
|
|
cid = str(cid)
|
2019-05-10 17:26:10 +03:00
|
|
|
if WINDOWS or os.getuid() > 0: # yapf: disable pylint: disable=no-member
|
2018-12-26 20:54:29 +02:00
|
|
|
set_state_item("cid", cid)
|
2016-08-27 23:15:32 +03:00
|
|
|
return cid
|