2017-06-05 16:02:39 +03:00
|
|
|
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
|
2015-11-18 17:16:17 +02:00
|
|
|
#
|
|
|
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
|
|
|
# you may not use this file except in compliance with the License.
|
|
|
|
# You may obtain a copy of the License at
|
|
|
|
#
|
|
|
|
# http://www.apache.org/licenses/LICENSE-2.0
|
|
|
|
#
|
|
|
|
# Unless required by applicable law or agreed to in writing, software
|
|
|
|
# distributed under the License is distributed on an "AS IS" BASIS,
|
|
|
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
|
|
# See the License for the specific language governing permissions and
|
|
|
|
# limitations under the License.
|
2014-05-18 23:38:59 +03:00
|
|
|
|
2015-04-23 14:11:30 +01:00
|
|
|
import collections
|
|
|
|
import functools
|
2014-12-27 23:59:20 +02:00
|
|
|
import json
|
2015-02-15 23:48:04 +02:00
|
|
|
import os
|
2016-10-13 00:42:16 +03:00
|
|
|
import platform
|
2015-03-09 12:27:54 +02:00
|
|
|
import re
|
2017-12-13 18:14:01 +02:00
|
|
|
import socket
|
2016-08-05 18:43:20 +03:00
|
|
|
import stat
|
2015-02-15 23:48:04 +02:00
|
|
|
import subprocess
|
2016-01-19 18:42:59 +02:00
|
|
|
import sys
|
2018-01-10 02:06:05 +02:00
|
|
|
from configparser import ConfigParser
|
2017-08-15 22:57:20 +03:00
|
|
|
from functools import wraps
|
2015-10-13 18:24:40 +01:00
|
|
|
from glob import glob
|
2016-03-21 18:08:15 +02:00
|
|
|
from os.path import (abspath, basename, dirname, expanduser, isdir, isfile,
|
2016-08-29 20:20:12 +03:00
|
|
|
join, normpath, splitdrive)
|
2016-08-05 18:43:20 +03:00
|
|
|
from shutil import rmtree
|
2015-02-15 23:48:04 +02:00
|
|
|
from threading import Thread
|
2017-08-15 22:57:20 +03:00
|
|
|
from time import sleep, time
|
2014-06-07 13:34:31 +03:00
|
|
|
|
2016-09-01 16:05:02 +03:00
|
|
|
import click
|
|
|
|
import requests
|
|
|
|
|
|
|
|
from platformio import __apiurl__, __version__, exception
|
2014-07-27 22:35:40 +03:00
|
|
|
|
2018-01-10 02:06:05 +02:00
|
|
|
# pylint: too-many-ancestors
|
2014-05-18 23:38:59 +03:00
|
|
|
|
|
|
|
|
2016-09-17 23:46:53 +03:00
|
|
|
class ProjectConfig(ConfigParser):
|
|
|
|
|
|
|
|
VARTPL_RE = re.compile(r"\$\{([^\.\}]+)\.([^\}]+)\}")
|
|
|
|
|
2017-04-15 16:36:59 +03:00
|
|
|
def items(self, section, **_): # pylint: disable=arguments-differ
|
2016-09-17 23:46:53 +03:00
|
|
|
items = []
|
|
|
|
for option in ConfigParser.options(self, section):
|
|
|
|
items.append((option, self.get(section, option)))
|
|
|
|
return items
|
|
|
|
|
|
|
|
def get(self, section, option, **kwargs):
|
|
|
|
value = ConfigParser.get(self, section, option, **kwargs)
|
|
|
|
if "${" not in value or "}" not in value:
|
|
|
|
return value
|
|
|
|
return self.VARTPL_RE.sub(self._re_sub_handler, value)
|
|
|
|
|
|
|
|
def _re_sub_handler(self, match):
|
2016-10-31 17:04:34 +02:00
|
|
|
section, option = match.group(1), match.group(2)
|
|
|
|
if section == "env" and not self.has_section(section):
|
|
|
|
return os.getenv(option)
|
|
|
|
return self.get(section, option)
|
2016-09-17 23:46:53 +03:00
|
|
|
|
|
|
|
|
2015-02-15 23:48:04 +02:00
|
|
|
class AsyncPipe(Thread):
|
|
|
|
|
|
|
|
def __init__(self, outcallback=None):
|
|
|
|
Thread.__init__(self)
|
|
|
|
self.outcallback = outcallback
|
|
|
|
|
|
|
|
self._fd_read, self._fd_write = os.pipe()
|
|
|
|
self._pipe_reader = os.fdopen(self._fd_read)
|
|
|
|
self._buffer = []
|
|
|
|
|
|
|
|
self.start()
|
|
|
|
|
|
|
|
def get_buffer(self):
|
|
|
|
return self._buffer
|
|
|
|
|
|
|
|
def fileno(self):
|
|
|
|
return self._fd_write
|
|
|
|
|
|
|
|
def run(self):
|
|
|
|
for line in iter(self._pipe_reader.readline, ""):
|
|
|
|
line = line.strip()
|
|
|
|
self._buffer.append(line)
|
|
|
|
if self.outcallback:
|
|
|
|
self.outcallback(line)
|
|
|
|
else:
|
|
|
|
print line
|
|
|
|
self._pipe_reader.close()
|
|
|
|
|
|
|
|
def close(self):
|
|
|
|
os.close(self._fd_write)
|
|
|
|
self.join()
|
|
|
|
|
|
|
|
|
2015-05-25 23:29:10 +03:00
|
|
|
class cd(object):
|
2015-11-30 01:11:57 +02:00
|
|
|
|
2015-05-25 23:26:35 +03:00
|
|
|
def __init__(self, new_path):
|
|
|
|
self.new_path = new_path
|
|
|
|
self.prev_path = os.getcwd()
|
|
|
|
|
|
|
|
def __enter__(self):
|
|
|
|
os.chdir(self.new_path)
|
|
|
|
|
|
|
|
def __exit__(self, etype, value, traceback):
|
|
|
|
os.chdir(self.prev_path)
|
|
|
|
|
|
|
|
|
2015-04-23 14:11:30 +01:00
|
|
|
class memoized(object):
|
|
|
|
'''
|
|
|
|
Decorator. Caches a function's return value each time it is called.
|
|
|
|
If called later with the same arguments, the cached value is returned
|
|
|
|
(not reevaluated).
|
|
|
|
https://wiki.python.org/moin/PythonDecoratorLibrary#Memoize
|
|
|
|
'''
|
|
|
|
|
|
|
|
def __init__(self, func):
|
|
|
|
self.func = func
|
|
|
|
self.cache = {}
|
|
|
|
|
|
|
|
def __call__(self, *args):
|
|
|
|
if not isinstance(args, collections.Hashable):
|
|
|
|
# uncacheable. a list, for instance.
|
|
|
|
# better to not cache than blow up.
|
|
|
|
return self.func(*args)
|
|
|
|
if args in self.cache:
|
|
|
|
return self.cache[args]
|
2017-04-15 16:36:59 +03:00
|
|
|
value = self.func(*args)
|
|
|
|
self.cache[args] = value
|
|
|
|
return value
|
2015-04-23 14:11:30 +01:00
|
|
|
|
|
|
|
def __repr__(self):
|
|
|
|
'''Return the function's docstring.'''
|
|
|
|
return self.func.__doc__
|
|
|
|
|
|
|
|
def __get__(self, obj, objtype):
|
|
|
|
'''Support instance methods.'''
|
2017-03-08 17:24:58 +02:00
|
|
|
fn = functools.partial(self.__call__, obj)
|
|
|
|
fn.reset = self._reset
|
|
|
|
return fn
|
|
|
|
|
|
|
|
def _reset(self):
|
|
|
|
self.cache = {}
|
2015-04-23 14:11:30 +01:00
|
|
|
|
|
|
|
|
2017-08-15 22:57:20 +03:00
|
|
|
class throttle(object):
|
|
|
|
|
2017-08-17 23:55:42 +03:00
|
|
|
def __init__(self, threshhold):
|
|
|
|
self.threshhold = threshhold # milliseconds
|
2017-08-15 22:57:20 +03:00
|
|
|
self.last = 0
|
|
|
|
|
|
|
|
def __call__(self, fn):
|
|
|
|
|
|
|
|
@wraps(fn)
|
|
|
|
def wrapper(*args, **kwargs):
|
|
|
|
diff = int(round((time() - self.last) * 1000))
|
2017-08-17 23:55:42 +03:00
|
|
|
if diff < self.threshhold:
|
|
|
|
sleep((self.threshhold - diff) * 0.001)
|
2017-08-15 22:57:20 +03:00
|
|
|
self.last = time()
|
|
|
|
return fn(*args, **kwargs)
|
|
|
|
|
|
|
|
return wrapper
|
|
|
|
|
|
|
|
|
2015-09-03 19:04:09 +03:00
|
|
|
def singleton(cls):
|
|
|
|
""" From PEP-318 http://www.python.org/dev/peps/pep-0318/#examples """
|
|
|
|
_instances = {}
|
|
|
|
|
|
|
|
def get_instance(*args, **kwargs):
|
|
|
|
if cls not in _instances:
|
|
|
|
_instances[cls] = cls(*args, **kwargs)
|
|
|
|
return _instances[cls]
|
2016-08-03 23:38:20 +03:00
|
|
|
|
2015-09-03 19:04:09 +03:00
|
|
|
return get_instance
|
|
|
|
|
|
|
|
|
2017-11-02 23:14:32 +02:00
|
|
|
def path_to_unicode(path):
|
|
|
|
return path.decode(sys.getfilesystemencoding()).encode("utf-8")
|
2017-04-27 18:28:50 +03:00
|
|
|
|
|
|
|
|
2016-04-09 14:15:59 +03:00
|
|
|
def load_json(file_path):
|
2017-03-08 19:55:56 +02:00
|
|
|
try:
|
|
|
|
with open(file_path, "r") as f:
|
|
|
|
return json.load(f)
|
|
|
|
except ValueError:
|
2017-06-05 16:05:05 +03:00
|
|
|
raise exception.PlatformioException(
|
|
|
|
"Could not load broken JSON: %s" % file_path)
|
2016-04-09 14:15:59 +03:00
|
|
|
|
|
|
|
|
2014-07-31 16:20:31 +03:00
|
|
|
def get_systype():
|
2016-10-13 00:42:16 +03:00
|
|
|
type_ = platform.system().lower()
|
|
|
|
arch = platform.machine().lower()
|
|
|
|
if type_ == "windows":
|
|
|
|
arch = "amd64" if platform.architecture()[0] == "64bit" else "x86"
|
2015-09-10 19:47:14 +03:00
|
|
|
return "%s_%s" % (type_, arch) if arch else type_
|
2014-06-12 23:29:47 +03:00
|
|
|
|
|
|
|
|
2015-03-13 17:54:24 +02:00
|
|
|
def pioversion_to_intstr():
|
|
|
|
vermatch = re.match(r"^([\d\.]+)", __version__)
|
|
|
|
assert vermatch
|
|
|
|
return [int(i) for i in vermatch.group(1).split(".")[:3]]
|
|
|
|
|
|
|
|
|
2016-12-05 14:43:06 +02:00
|
|
|
def get_project_optional_dir(name, default=None):
|
|
|
|
data = None
|
|
|
|
var_name = "PLATFORMIO_%s" % name.upper()
|
|
|
|
if var_name in os.environ:
|
|
|
|
data = os.getenv(var_name)
|
|
|
|
else:
|
|
|
|
try:
|
|
|
|
config = load_project_config()
|
2017-07-24 17:35:41 +03:00
|
|
|
if (config.has_section("platformio")
|
|
|
|
and config.has_option("platformio", name)):
|
2016-12-05 14:43:06 +02:00
|
|
|
data = config.get("platformio", name)
|
|
|
|
except exception.NotPlatformIOProject:
|
|
|
|
pass
|
2015-03-05 01:36:31 +02:00
|
|
|
|
2016-12-05 14:43:06 +02:00
|
|
|
if not data:
|
|
|
|
return default
|
|
|
|
|
|
|
|
items = []
|
|
|
|
for item in data.split(", "):
|
|
|
|
if item.startswith("~"):
|
|
|
|
item = expanduser(item)
|
|
|
|
items.append(abspath(item))
|
|
|
|
return ", ".join(items)
|
2015-02-22 22:24:22 +02:00
|
|
|
|
|
|
|
|
|
|
|
def get_home_dir():
|
2016-12-05 14:43:06 +02:00
|
|
|
home_dir = get_project_optional_dir("home_dir",
|
2016-08-03 23:38:20 +03:00
|
|
|
join(expanduser("~"), ".platformio"))
|
2017-05-04 21:02:32 +03:00
|
|
|
win_home_dir = None
|
2016-03-21 18:50:45 +02:00
|
|
|
if "windows" in get_systype():
|
2017-05-04 21:02:32 +03:00
|
|
|
win_home_dir = splitdrive(home_dir)[0] + "\\.platformio"
|
|
|
|
if isdir(win_home_dir):
|
|
|
|
home_dir = win_home_dir
|
2016-03-21 18:50:45 +02:00
|
|
|
|
|
|
|
if not isdir(home_dir):
|
2017-05-04 21:02:32 +03:00
|
|
|
try:
|
|
|
|
os.makedirs(home_dir)
|
2017-05-05 13:02:20 +03:00
|
|
|
except: # pylint: disable=bare-except
|
2017-05-04 21:02:32 +03:00
|
|
|
if win_home_dir:
|
|
|
|
os.makedirs(win_home_dir)
|
|
|
|
home_dir = win_home_dir
|
2014-11-22 23:55:17 +02:00
|
|
|
|
|
|
|
assert isdir(home_dir)
|
|
|
|
return home_dir
|
2014-05-18 23:38:59 +03:00
|
|
|
|
|
|
|
|
|
|
|
def get_source_dir():
|
2016-01-19 18:42:59 +02:00
|
|
|
curpath = abspath(__file__)
|
|
|
|
if not isfile(curpath):
|
|
|
|
for p in sys.path:
|
|
|
|
if isfile(join(p, __file__)):
|
|
|
|
curpath = join(p, __file__)
|
|
|
|
break
|
|
|
|
return dirname(curpath)
|
2014-05-18 23:38:59 +03:00
|
|
|
|
|
|
|
|
|
|
|
def get_project_dir():
|
2015-02-15 23:48:04 +02:00
|
|
|
return os.getcwd()
|
2014-05-18 23:38:59 +03:00
|
|
|
|
|
|
|
|
2016-08-27 20:32:21 +03:00
|
|
|
def find_project_dir_above(path):
|
|
|
|
if isfile(path):
|
|
|
|
path = dirname(path)
|
|
|
|
if is_platformio_project(path):
|
|
|
|
return path
|
|
|
|
if isdir(dirname(path)):
|
|
|
|
return find_project_dir_above(dirname(path))
|
|
|
|
return None
|
|
|
|
|
|
|
|
|
2016-08-01 00:14:22 +03:00
|
|
|
def is_platformio_project(project_dir=None):
|
|
|
|
if not project_dir:
|
|
|
|
project_dir = get_project_dir()
|
|
|
|
return isfile(join(project_dir, "platformio.ini"))
|
2015-02-22 22:24:22 +02:00
|
|
|
|
|
|
|
|
2016-08-01 00:14:22 +03:00
|
|
|
def get_projectlib_dir():
|
2016-12-05 14:43:06 +02:00
|
|
|
return get_project_optional_dir("lib_dir", join(get_project_dir(), "lib"))
|
2016-05-30 17:50:37 +03:00
|
|
|
|
|
|
|
|
2016-08-01 00:14:22 +03:00
|
|
|
def get_projectlibdeps_dir():
|
2016-12-05 14:43:06 +02:00
|
|
|
return get_project_optional_dir("libdeps_dir",
|
2016-08-01 00:14:22 +03:00
|
|
|
join(get_project_dir(), ".piolibdeps"))
|
2015-06-04 22:50:13 +03:00
|
|
|
|
|
|
|
|
2016-08-01 00:14:22 +03:00
|
|
|
def get_projectsrc_dir():
|
2016-12-05 14:43:06 +02:00
|
|
|
return get_project_optional_dir("src_dir", join(get_project_dir(), "src"))
|
2016-08-01 00:14:22 +03:00
|
|
|
|
|
|
|
|
2017-12-14 22:02:41 +02:00
|
|
|
def get_projectinclude_dir():
|
|
|
|
return get_project_optional_dir("include_dir",
|
|
|
|
join(get_project_dir(), "include"))
|
|
|
|
|
|
|
|
|
2016-08-01 00:14:22 +03:00
|
|
|
def get_projecttest_dir():
|
2017-06-05 16:05:05 +03:00
|
|
|
return get_project_optional_dir("test_dir", join(get_project_dir(),
|
|
|
|
"test"))
|
2016-08-01 00:14:22 +03:00
|
|
|
|
|
|
|
|
2016-10-31 17:41:01 +02:00
|
|
|
def get_projectboards_dir():
|
2016-12-05 14:43:06 +02:00
|
|
|
return get_project_optional_dir("boards_dir",
|
2016-10-31 20:05:34 +02:00
|
|
|
join(get_project_dir(), "boards"))
|
2016-10-31 17:41:01 +02:00
|
|
|
|
|
|
|
|
2018-01-03 15:47:02 +02:00
|
|
|
def get_projectbuild_dir(force=False):
|
|
|
|
path = get_project_optional_dir("build_dir",
|
2016-08-01 00:14:22 +03:00
|
|
|
join(get_project_dir(), ".pioenvs"))
|
2016-08-26 01:42:05 +03:00
|
|
|
try:
|
|
|
|
if not isdir(path):
|
|
|
|
os.makedirs(path)
|
|
|
|
dontmod_path = join(path, "do-not-modify-files-here.url")
|
|
|
|
if not isfile(dontmod_path):
|
|
|
|
with open(dontmod_path, "w") as fp:
|
|
|
|
fp.write("""
|
2016-07-11 19:34:36 +03:00
|
|
|
[InternetShortcut]
|
2018-01-06 01:04:43 +02:00
|
|
|
URL=http://docs.platformio.org/page/projectconf/section_platformio.html#build-dir
|
2016-07-11 19:34:36 +03:00
|
|
|
""")
|
2016-08-26 11:46:59 +03:00
|
|
|
except Exception as e: # pylint: disable=broad-except
|
2016-08-26 01:42:05 +03:00
|
|
|
if not force:
|
|
|
|
raise Exception(e)
|
2016-07-11 19:34:36 +03:00
|
|
|
return path
|
2014-07-27 22:35:40 +03:00
|
|
|
|
|
|
|
|
2018-01-03 15:47:02 +02:00
|
|
|
# compatibility with PIO Core+
|
|
|
|
get_projectpioenvs_dir = get_projectbuild_dir
|
|
|
|
|
|
|
|
|
2015-12-28 01:15:06 +02:00
|
|
|
def get_projectdata_dir():
|
2017-06-05 16:05:05 +03:00
|
|
|
return get_project_optional_dir("data_dir", join(get_project_dir(),
|
|
|
|
"data"))
|
2015-12-28 01:15:06 +02:00
|
|
|
|
|
|
|
|
2016-09-17 23:46:53 +03:00
|
|
|
def load_project_config(path=None):
|
|
|
|
if not path or isdir(path):
|
2017-12-03 02:15:49 +02:00
|
|
|
path = join(path or get_project_dir(), "platformio.ini")
|
|
|
|
if not isfile(path):
|
|
|
|
raise exception.NotPlatformIOProject(
|
|
|
|
dirname(path) if path.endswith("platformio.ini") else path)
|
2016-09-17 23:46:53 +03:00
|
|
|
cp = ProjectConfig()
|
|
|
|
cp.read(path)
|
2014-06-07 13:34:31 +03:00
|
|
|
return cp
|
|
|
|
|
2014-05-18 23:38:59 +03:00
|
|
|
|
2017-06-30 00:15:49 +03:00
|
|
|
def parse_conf_multi_values(items):
|
|
|
|
if not items:
|
|
|
|
return []
|
|
|
|
return [
|
|
|
|
item.strip() for item in items.split("\n" if "\n" in items else ", ")
|
|
|
|
if item.strip()
|
|
|
|
]
|
|
|
|
|
|
|
|
|
2017-08-15 22:57:20 +03:00
|
|
|
def change_filemtime(path, mtime):
|
|
|
|
os.utime(path, (mtime, mtime))
|
2014-06-03 21:27:36 +03:00
|
|
|
|
|
|
|
|
2015-05-23 18:36:11 +03:00
|
|
|
def is_ci():
|
2015-05-25 13:17:50 +03:00
|
|
|
return os.getenv("CI", "").lower() == "true"
|
2015-05-23 18:36:11 +03:00
|
|
|
|
|
|
|
|
2016-09-29 23:53:19 +03:00
|
|
|
def is_container():
|
|
|
|
if not isfile("/proc/1/cgroup"):
|
|
|
|
return False
|
|
|
|
with open("/proc/1/cgroup") as fp:
|
|
|
|
for line in fp:
|
|
|
|
line = line.strip()
|
|
|
|
if ":" in line and not line.endswith(":/"):
|
|
|
|
return True
|
|
|
|
return False
|
|
|
|
|
|
|
|
|
2015-01-29 18:54:28 +02:00
|
|
|
def exec_command(*args, **kwargs):
|
2016-08-03 23:38:20 +03:00
|
|
|
result = {"out": None, "err": None, "returncode": None}
|
2015-02-19 22:02:50 +02:00
|
|
|
|
2016-08-18 12:18:27 +03:00
|
|
|
default = dict(stdout=subprocess.PIPE, stderr=subprocess.PIPE)
|
2015-02-15 23:48:04 +02:00
|
|
|
default.update(kwargs)
|
|
|
|
kwargs = default
|
|
|
|
|
|
|
|
p = subprocess.Popen(*args, **kwargs)
|
|
|
|
try:
|
|
|
|
result['out'], result['err'] = p.communicate()
|
2015-02-19 22:02:50 +02:00
|
|
|
result['returncode'] = p.returncode
|
2015-02-15 23:48:04 +02:00
|
|
|
except KeyboardInterrupt:
|
2015-02-23 11:50:14 +02:00
|
|
|
raise exception.AbortedByUser()
|
|
|
|
finally:
|
2015-02-15 23:48:04 +02:00
|
|
|
for s in ("stdout", "stderr"):
|
|
|
|
if isinstance(kwargs[s], AsyncPipe):
|
|
|
|
kwargs[s].close()
|
|
|
|
|
|
|
|
for s in ("stdout", "stderr"):
|
|
|
|
if isinstance(kwargs[s], AsyncPipe):
|
|
|
|
result[s[3:]] = "\n".join(kwargs[s].get_buffer())
|
|
|
|
|
|
|
|
for k, v in result.iteritems():
|
2015-02-19 22:02:50 +02:00
|
|
|
if v and isinstance(v, basestring):
|
|
|
|
result[k].strip()
|
2015-01-29 18:54:28 +02:00
|
|
|
|
2015-02-15 23:48:04 +02:00
|
|
|
return result
|
2014-07-27 22:29:32 +03:00
|
|
|
|
|
|
|
|
2016-08-29 20:20:12 +03:00
|
|
|
def copy_pythonpath_to_osenv():
|
|
|
|
_PYTHONPATH = []
|
|
|
|
if "PYTHONPATH" in os.environ:
|
|
|
|
_PYTHONPATH = os.environ.get("PYTHONPATH").split(os.pathsep)
|
|
|
|
for p in os.sys.path:
|
2016-12-05 18:36:55 +02:00
|
|
|
conditions = [p not in _PYTHONPATH]
|
|
|
|
if "windows" not in get_systype():
|
|
|
|
conditions.append(
|
|
|
|
isdir(join(p, "click")) or isdir(join(p, "platformio")))
|
|
|
|
if all(conditions):
|
2016-08-29 20:20:12 +03:00
|
|
|
_PYTHONPATH.append(p)
|
|
|
|
os.environ['PYTHONPATH'] = os.pathsep.join(_PYTHONPATH)
|
|
|
|
|
|
|
|
|
2017-12-18 21:31:49 +02:00
|
|
|
def get_serial_ports(filter_hwid=False):
|
2015-10-03 12:28:21 +01:00
|
|
|
try:
|
|
|
|
from serial.tools.list_ports import comports
|
|
|
|
except ImportError:
|
2015-02-15 23:48:04 +02:00
|
|
|
raise exception.GetSerialPortsError(os.name)
|
2016-01-26 20:30:45 +02:00
|
|
|
|
|
|
|
result = []
|
|
|
|
for p, d, h in comports():
|
|
|
|
if not p:
|
|
|
|
continue
|
2016-10-13 00:42:16 +03:00
|
|
|
if platform.system() == "Windows":
|
2016-01-26 20:30:45 +02:00
|
|
|
try:
|
|
|
|
d = unicode(d, errors="ignore")
|
|
|
|
except TypeError:
|
|
|
|
pass
|
2016-10-31 20:06:48 +02:00
|
|
|
if not filter_hwid or "VID:PID" in h:
|
|
|
|
result.append({"port": p, "description": d, "hwid": h})
|
|
|
|
|
|
|
|
if filter_hwid:
|
|
|
|
return result
|
2016-01-26 20:30:45 +02:00
|
|
|
|
2015-11-17 23:20:04 +02:00
|
|
|
# fix for PySerial
|
2016-10-13 00:42:16 +03:00
|
|
|
if not result and platform.system() == "Darwin":
|
2016-06-03 18:35:47 +03:00
|
|
|
for p in glob("/dev/tty.*"):
|
2016-06-03 20:14:37 +03:00
|
|
|
result.append({"port": p, "description": "n/a", "hwid": "n/a"})
|
2015-11-17 23:20:04 +02:00
|
|
|
return result
|
2015-03-09 12:27:54 +02:00
|
|
|
|
|
|
|
|
2018-01-10 02:06:05 +02:00
|
|
|
# Backward compatibility for PIO Core <3.5
|
|
|
|
get_serialports = get_serial_ports
|
|
|
|
|
|
|
|
|
2017-12-18 21:31:49 +02:00
|
|
|
def get_logical_devices():
|
|
|
|
items = []
|
2016-10-13 00:42:16 +03:00
|
|
|
if platform.system() == "Windows":
|
2017-08-14 15:27:12 +03:00
|
|
|
try:
|
|
|
|
result = exec_command(
|
|
|
|
["wmic", "logicaldisk", "get", "name,VolumeName"]).get(
|
|
|
|
"out", "")
|
2017-12-18 21:31:49 +02:00
|
|
|
devicenamere = re.compile(r"^([A-Z]{1}\:)\s*(\S+)?")
|
2017-08-14 15:27:12 +03:00
|
|
|
for line in result.split("\n"):
|
2017-12-18 21:31:49 +02:00
|
|
|
match = devicenamere.match(line.strip())
|
2017-08-14 15:27:12 +03:00
|
|
|
if not match:
|
|
|
|
continue
|
2017-12-18 21:31:49 +02:00
|
|
|
items.append({
|
2017-12-27 16:02:36 +02:00
|
|
|
"path": match.group(1) + "\\",
|
2017-08-31 20:56:25 +03:00
|
|
|
"name": match.group(2)
|
|
|
|
})
|
2017-12-18 21:31:49 +02:00
|
|
|
return items
|
2017-08-14 15:27:12 +03:00
|
|
|
except WindowsError: # pylint: disable=undefined-variable
|
|
|
|
pass
|
|
|
|
# try "fsutil"
|
|
|
|
result = exec_command(["fsutil", "fsinfo", "drives"]).get("out", "")
|
2017-12-18 21:31:49 +02:00
|
|
|
for device in re.findall(r"[A-Z]:\\", result):
|
2017-12-27 16:02:36 +02:00
|
|
|
items.append({"path": device, "name": None})
|
2017-12-18 21:31:49 +02:00
|
|
|
return items
|
2015-03-09 12:27:54 +02:00
|
|
|
else:
|
|
|
|
result = exec_command(["df"]).get("out")
|
2017-12-18 21:31:49 +02:00
|
|
|
devicenamere = re.compile(r"^/.+\d+\%\s+([a-z\d\-_/]+)$", flags=re.I)
|
2015-03-09 12:27:54 +02:00
|
|
|
for line in result.split("\n"):
|
2017-12-18 21:31:49 +02:00
|
|
|
match = devicenamere.match(line.strip())
|
2015-03-09 12:27:54 +02:00
|
|
|
if not match:
|
|
|
|
continue
|
2017-12-18 21:31:49 +02:00
|
|
|
items.append({
|
2017-12-27 16:02:36 +02:00
|
|
|
"path": match.group(1),
|
2016-10-24 21:03:54 +03:00
|
|
|
"name": basename(match.group(1))
|
|
|
|
})
|
2017-12-18 21:31:49 +02:00
|
|
|
return items
|
|
|
|
|
|
|
|
|
|
|
|
def get_mdns_services():
|
2017-12-19 12:41:56 +02:00
|
|
|
try:
|
|
|
|
import zeroconf
|
|
|
|
except ImportError:
|
2017-12-19 14:47:31 +02:00
|
|
|
from site import addsitedir
|
|
|
|
from platformio.managers.core import get_core_package_dir
|
|
|
|
contrib_pysite_dir = get_core_package_dir("contrib-pysite")
|
|
|
|
addsitedir(contrib_pysite_dir)
|
|
|
|
sys.path.insert(0, contrib_pysite_dir)
|
|
|
|
import zeroconf
|
2017-12-18 21:31:49 +02:00
|
|
|
|
|
|
|
class mDNSListener(object):
|
|
|
|
|
|
|
|
def __init__(self):
|
|
|
|
self._zc = zeroconf.Zeroconf(
|
|
|
|
interfaces=zeroconf.InterfaceChoice.All)
|
|
|
|
self._found_types = []
|
|
|
|
self._found_services = []
|
|
|
|
|
|
|
|
def __enter__(self):
|
|
|
|
zeroconf.ServiceBrowser(self._zc, "_services._dns-sd._udp.local.",
|
|
|
|
self)
|
|
|
|
return self
|
|
|
|
|
|
|
|
def __exit__(self, etype, value, traceback):
|
|
|
|
self._zc.close()
|
|
|
|
|
|
|
|
def remove_service(self, zc, type_, name):
|
|
|
|
pass
|
|
|
|
|
|
|
|
def add_service(self, zc, type_, name):
|
|
|
|
try:
|
2017-12-23 22:59:49 +02:00
|
|
|
assert zeroconf.service_type_name(name)
|
|
|
|
assert str(name)
|
|
|
|
except (AssertionError, UnicodeError,
|
|
|
|
zeroconf.BadTypeInNameException):
|
2017-12-18 21:31:49 +02:00
|
|
|
return
|
|
|
|
if name not in self._found_types:
|
|
|
|
self._found_types.append(name)
|
|
|
|
zeroconf.ServiceBrowser(self._zc, name, self)
|
|
|
|
if type_ in self._found_types:
|
|
|
|
s = zc.get_service_info(type_, name)
|
|
|
|
if s:
|
|
|
|
self._found_services.append(s)
|
|
|
|
|
|
|
|
def get_services(self):
|
|
|
|
return self._found_services
|
|
|
|
|
|
|
|
items = []
|
|
|
|
with mDNSListener() as mdns:
|
2017-12-23 22:59:49 +02:00
|
|
|
sleep(3)
|
2017-12-18 21:31:49 +02:00
|
|
|
for service in mdns.get_services():
|
|
|
|
items.append({
|
|
|
|
"type":
|
|
|
|
service.type,
|
|
|
|
"name":
|
|
|
|
service.name,
|
|
|
|
"ip":
|
|
|
|
".".join([str(ord(c)) for c in service.address]),
|
|
|
|
"port":
|
|
|
|
service.port,
|
|
|
|
"properties":
|
|
|
|
service.properties
|
|
|
|
})
|
|
|
|
return items
|
2014-09-04 18:58:12 +03:00
|
|
|
|
|
|
|
|
2015-04-29 18:17:14 +01:00
|
|
|
def get_request_defheaders():
|
2016-08-03 23:38:20 +03:00
|
|
|
data = (__version__, int(is_ci()), requests.utils.default_user_agent())
|
|
|
|
return {"User-Agent": "PlatformIO/%s CI/%d %s" % data}
|
2015-04-29 18:17:14 +01:00
|
|
|
|
|
|
|
|
2016-08-01 00:14:22 +03:00
|
|
|
@memoized
|
|
|
|
def _api_request_session():
|
|
|
|
return requests.Session()
|
|
|
|
|
|
|
|
|
2017-08-15 22:57:20 +03:00
|
|
|
@throttle(500)
|
2016-10-31 20:05:34 +02:00
|
|
|
def _get_api_result(
|
|
|
|
url, # pylint: disable=too-many-branches
|
|
|
|
params=None,
|
|
|
|
data=None,
|
|
|
|
auth=None):
|
2016-08-26 11:46:59 +03:00
|
|
|
from platformio.app import get_setting
|
2016-08-25 22:57:52 +03:00
|
|
|
|
2014-09-04 18:58:12 +03:00
|
|
|
result = None
|
|
|
|
r = None
|
2017-09-05 14:23:49 +03:00
|
|
|
verify_ssl = sys.version_info >= (2, 7, 9)
|
2014-12-01 22:45:53 +02:00
|
|
|
|
2016-03-21 14:17:36 +02:00
|
|
|
headers = get_request_defheaders()
|
2016-10-24 21:03:54 +03:00
|
|
|
if not url.startswith("http"):
|
|
|
|
url = __apiurl__ + url
|
|
|
|
if not get_setting("enable_ssl"):
|
|
|
|
url = url.replace("https://", "http://")
|
2016-08-25 22:57:52 +03:00
|
|
|
|
2014-09-04 18:58:12 +03:00
|
|
|
try:
|
2015-02-22 23:31:51 +02:00
|
|
|
if data:
|
2016-08-01 00:14:22 +03:00
|
|
|
r = _api_request_session().post(
|
2016-10-24 21:03:54 +03:00
|
|
|
url,
|
|
|
|
params=params,
|
|
|
|
data=data,
|
|
|
|
headers=headers,
|
|
|
|
auth=auth,
|
2017-09-05 14:23:49 +03:00
|
|
|
verify=verify_ssl)
|
2014-09-08 22:02:57 +03:00
|
|
|
else:
|
2017-03-02 17:09:22 +02:00
|
|
|
r = _api_request_session().get(
|
|
|
|
url,
|
|
|
|
params=params,
|
|
|
|
headers=headers,
|
|
|
|
auth=auth,
|
2017-09-05 14:23:49 +03:00
|
|
|
verify=verify_ssl)
|
2016-04-09 14:15:59 +03:00
|
|
|
result = r.json()
|
2016-08-01 00:14:22 +03:00
|
|
|
r.raise_for_status()
|
2014-11-22 23:55:17 +02:00
|
|
|
except requests.exceptions.HTTPError as e:
|
2016-10-24 21:03:54 +03:00
|
|
|
if result and "message" in result:
|
|
|
|
raise exception.APIRequestError(result['message'])
|
|
|
|
elif result and "errors" in result:
|
2015-02-13 23:01:01 +02:00
|
|
|
raise exception.APIRequestError(result['errors'][0]['title'])
|
2014-09-04 18:58:12 +03:00
|
|
|
else:
|
2015-02-13 23:01:01 +02:00
|
|
|
raise exception.APIRequestError(e)
|
2014-09-04 18:58:12 +03:00
|
|
|
except ValueError:
|
2017-06-05 16:05:05 +03:00
|
|
|
raise exception.APIRequestError(
|
|
|
|
"Invalid response: %s" % r.text.encode("utf-8"))
|
2014-09-04 18:58:12 +03:00
|
|
|
finally:
|
|
|
|
if r:
|
|
|
|
r.close()
|
|
|
|
return result
|
2014-12-27 23:59:20 +02:00
|
|
|
|
|
|
|
|
2016-10-24 21:03:54 +03:00
|
|
|
def get_api_result(url, params=None, data=None, auth=None, cache_valid=None):
|
2017-12-13 18:14:01 +02:00
|
|
|
internet_on(raise_exception=True)
|
2016-12-05 18:51:25 +02:00
|
|
|
from platformio.app import ContentCache
|
2016-09-01 16:05:02 +03:00
|
|
|
total = 0
|
2016-09-14 19:06:22 +03:00
|
|
|
max_retries = 5
|
2016-12-05 18:51:25 +02:00
|
|
|
cache_key = (ContentCache.key_from_args(url, params, data, auth)
|
2016-09-14 19:06:22 +03:00
|
|
|
if cache_valid else None)
|
2016-09-01 16:05:02 +03:00
|
|
|
while total < max_retries:
|
|
|
|
try:
|
2016-12-05 18:51:25 +02:00
|
|
|
with ContentCache() as cc:
|
2016-09-14 19:06:22 +03:00
|
|
|
if cache_key:
|
2016-12-05 18:51:25 +02:00
|
|
|
result = cc.get(cache_key)
|
2016-09-14 19:06:22 +03:00
|
|
|
if result is not None:
|
|
|
|
return result
|
2016-10-24 21:03:54 +03:00
|
|
|
result = _get_api_result(url, params, data)
|
2016-09-14 19:06:22 +03:00
|
|
|
if cache_valid:
|
2016-12-05 18:51:25 +02:00
|
|
|
with ContentCache() as cc:
|
|
|
|
cc.set(cache_key, result, cache_valid)
|
2016-09-14 19:06:22 +03:00
|
|
|
return result
|
2016-09-01 16:05:02 +03:00
|
|
|
except (requests.exceptions.ConnectionError,
|
|
|
|
requests.exceptions.Timeout) as e:
|
2016-09-14 14:55:07 +03:00
|
|
|
from platformio.maintenance import in_silence
|
2016-09-01 16:05:02 +03:00
|
|
|
total += 1
|
2016-09-14 14:55:07 +03:00
|
|
|
if not in_silence():
|
|
|
|
click.secho(
|
|
|
|
"[API] ConnectionError: {0} (incremented retry: max={1}, "
|
|
|
|
"total={2})".format(e, max_retries, total),
|
|
|
|
fg="yellow")
|
2016-09-01 16:05:02 +03:00
|
|
|
sleep(2 * total)
|
|
|
|
|
|
|
|
raise exception.APIRequestError(
|
2016-10-24 21:03:54 +03:00
|
|
|
"Could not connect to PlatformIO API Service. "
|
2016-09-01 16:05:02 +03:00
|
|
|
"Please try later.")
|
|
|
|
|
|
|
|
|
2017-12-14 20:18:19 +02:00
|
|
|
PING_INTERNET_IPS = [
|
2017-12-28 15:01:18 +02:00
|
|
|
"192.30.253.113", # github.com
|
2017-12-14 20:18:19 +02:00
|
|
|
"159.122.18.156", # dl.bintray.com
|
|
|
|
"193.222.52.25" # dl.platformio.org
|
|
|
|
]
|
|
|
|
|
|
|
|
|
2017-12-13 18:14:01 +02:00
|
|
|
@memoized
|
|
|
|
def _internet_on():
|
|
|
|
timeout = 2
|
|
|
|
socket.setdefaulttimeout(timeout)
|
2017-12-14 20:18:19 +02:00
|
|
|
for ip in PING_INTERNET_IPS:
|
2017-06-01 19:47:50 +03:00
|
|
|
try:
|
2017-12-13 18:14:01 +02:00
|
|
|
if os.getenv("HTTP_PROXY", os.getenv("HTTPS_PROXY")):
|
2017-12-28 17:15:34 +02:00
|
|
|
requests.get(
|
|
|
|
"http://%s" % ip, allow_redirects=False, timeout=timeout)
|
2017-12-13 18:14:01 +02:00
|
|
|
else:
|
|
|
|
socket.socket(socket.AF_INET, socket.SOCK_STREAM).connect((ip,
|
|
|
|
80))
|
2017-06-01 19:47:50 +03:00
|
|
|
return True
|
|
|
|
except: # pylint: disable=bare-except
|
|
|
|
pass
|
|
|
|
return False
|
2016-09-19 15:10:28 +03:00
|
|
|
|
|
|
|
|
2017-12-13 18:14:01 +02:00
|
|
|
def internet_on(raise_exception=False):
|
|
|
|
result = _internet_on()
|
|
|
|
if raise_exception and not result:
|
|
|
|
raise exception.InternetIsOffline()
|
|
|
|
return result
|
|
|
|
|
|
|
|
|
2016-08-29 20:20:12 +03:00
|
|
|
def get_pythonexe_path():
|
|
|
|
return os.environ.get("PYTHONEXEPATH", normpath(sys.executable))
|
|
|
|
|
|
|
|
|
2015-09-04 19:31:59 +03:00
|
|
|
def where_is_program(program, envpath=None):
|
|
|
|
env = os.environ
|
|
|
|
if envpath:
|
|
|
|
env['PATH'] = envpath
|
|
|
|
|
|
|
|
# try OS's built-in commands
|
|
|
|
try:
|
|
|
|
result = exec_command(
|
|
|
|
["where" if "windows" in get_systype() else "which", program],
|
2016-08-03 23:38:20 +03:00
|
|
|
env=env)
|
2015-09-04 19:31:59 +03:00
|
|
|
if result['returncode'] == 0 and isfile(result['out'].strip()):
|
|
|
|
return result['out'].strip()
|
|
|
|
except OSError:
|
|
|
|
pass
|
|
|
|
|
|
|
|
# look up in $PATH
|
|
|
|
for bin_dir in env.get("PATH", "").split(os.pathsep):
|
|
|
|
if isfile(join(bin_dir, program)):
|
|
|
|
return join(bin_dir, program)
|
|
|
|
elif isfile(join(bin_dir, "%s.exe" % program)):
|
|
|
|
return join(bin_dir, "%s.exe" % program)
|
|
|
|
|
|
|
|
return program
|
2016-08-01 14:25:11 +03:00
|
|
|
|
|
|
|
|
|
|
|
def pepver_to_semver(pepver):
|
2017-01-31 01:58:10 +02:00
|
|
|
return re.sub(r"(\.\d+)\.?(dev|a|b|rc|post)", r"\1-\2.", pepver, 1)
|
2016-08-05 18:43:20 +03:00
|
|
|
|
|
|
|
|
|
|
|
def rmtree_(path):
|
|
|
|
|
|
|
|
def _onerror(_, name, __):
|
2017-09-24 01:28:41 +03:00
|
|
|
try:
|
|
|
|
os.chmod(name, stat.S_IWRITE)
|
|
|
|
os.remove(name)
|
|
|
|
except Exception as e: # pylint: disable=broad-except
|
|
|
|
click.secho(
|
|
|
|
"Please manually remove file `%s`" % name, fg="red", err=True)
|
|
|
|
raise e
|
2016-08-05 18:43:20 +03:00
|
|
|
|
|
|
|
return rmtree(path, onerror=_onerror)
|
2017-01-19 19:02:37 +02:00
|
|
|
|
|
|
|
|
|
|
|
#
|
|
|
|
# Glob.Escape from Python 3.4
|
|
|
|
# https://github.com/python/cpython/blob/master/Lib/glob.py#L161
|
|
|
|
#
|
|
|
|
|
|
|
|
try:
|
|
|
|
from glob import escape as glob_escape # pylint: disable=unused-import
|
|
|
|
except ImportError:
|
|
|
|
magic_check = re.compile('([*?[])')
|
|
|
|
magic_check_bytes = re.compile(b'([*?[])')
|
|
|
|
|
|
|
|
def glob_escape(pathname):
|
|
|
|
"""Escape all special characters.
|
|
|
|
"""
|
|
|
|
# Escaping is done by wrapping any of "*?[" between square brackets.
|
|
|
|
# Metacharacters do not work in the drive part and shouldn't be
|
|
|
|
# escaped.
|
|
|
|
drive, pathname = os.path.splitdrive(pathname)
|
|
|
|
if isinstance(pathname, bytes):
|
|
|
|
pathname = magic_check_bytes.sub(br'[\1]', pathname)
|
|
|
|
else:
|
|
|
|
pathname = magic_check.sub(r'[\1]', pathname)
|
|
|
|
return drive + pathname
|