2014-05-18 23:38:59 +03:00
|
|
|
# Copyright (C) Ivan Kravets <me@ikravets.com>
|
|
|
|
# See LICENSE for details.
|
|
|
|
|
2014-12-27 23:59:20 +02:00
|
|
|
import json
|
2015-02-15 23:48:04 +02:00
|
|
|
import os
|
2015-03-09 12:27:54 +02:00
|
|
|
import re
|
2015-02-15 23:48:04 +02:00
|
|
|
import subprocess
|
2015-03-09 12:27:54 +02:00
|
|
|
from os.path import (abspath, basename, dirname, expanduser, isdir, isfile,
|
|
|
|
join, realpath)
|
2014-07-31 16:20:31 +03:00
|
|
|
from platform import system, uname
|
2015-02-15 23:48:04 +02:00
|
|
|
from threading import Thread
|
2014-06-07 13:34:31 +03:00
|
|
|
|
2014-11-22 23:55:17 +02:00
|
|
|
import requests
|
2014-05-18 23:38:59 +03:00
|
|
|
|
2015-02-13 23:01:01 +02:00
|
|
|
from platformio import __apiurl__, __version__, exception
|
2014-07-27 22:35:40 +03:00
|
|
|
|
2014-05-18 23:38:59 +03:00
|
|
|
try:
|
|
|
|
from configparser import ConfigParser
|
|
|
|
except ImportError:
|
2014-06-07 13:34:31 +03:00
|
|
|
from ConfigParser import ConfigParser
|
2014-05-18 23:38:59 +03:00
|
|
|
|
|
|
|
|
2015-02-15 23:48:04 +02:00
|
|
|
class AsyncPipe(Thread):
|
|
|
|
|
|
|
|
def __init__(self, outcallback=None):
|
|
|
|
Thread.__init__(self)
|
|
|
|
self.outcallback = outcallback
|
|
|
|
|
|
|
|
self._fd_read, self._fd_write = os.pipe()
|
|
|
|
self._pipe_reader = os.fdopen(self._fd_read)
|
|
|
|
self._buffer = []
|
|
|
|
|
|
|
|
self.start()
|
|
|
|
|
|
|
|
def get_buffer(self):
|
|
|
|
return self._buffer
|
|
|
|
|
|
|
|
def fileno(self):
|
|
|
|
return self._fd_write
|
|
|
|
|
|
|
|
def run(self):
|
|
|
|
for line in iter(self._pipe_reader.readline, ""):
|
|
|
|
line = line.strip()
|
|
|
|
self._buffer.append(line)
|
|
|
|
if self.outcallback:
|
|
|
|
self.outcallback(line)
|
|
|
|
else:
|
|
|
|
print line
|
|
|
|
self._pipe_reader.close()
|
|
|
|
|
|
|
|
def close(self):
|
|
|
|
os.close(self._fd_write)
|
|
|
|
self.join()
|
|
|
|
|
|
|
|
|
2014-07-31 16:20:31 +03:00
|
|
|
def get_systype():
|
|
|
|
data = uname()
|
|
|
|
return ("%s_%s" % (data[0], data[4])).lower()
|
2014-06-12 23:29:47 +03:00
|
|
|
|
|
|
|
|
2015-03-13 17:54:24 +02:00
|
|
|
def pioversion_to_intstr():
|
|
|
|
vermatch = re.match(r"^([\d\.]+)", __version__)
|
|
|
|
assert vermatch
|
|
|
|
return [int(i) for i in vermatch.group(1).split(".")[:3]]
|
|
|
|
|
|
|
|
|
2015-03-05 01:36:31 +02:00
|
|
|
def _get_projconf_option_dir(name, default=None):
|
|
|
|
_env_name = "PLATFORMIO_%s" % name.upper()
|
|
|
|
if _env_name in os.environ:
|
|
|
|
return os.getenv(_env_name)
|
|
|
|
|
2014-08-22 17:57:28 +03:00
|
|
|
try:
|
|
|
|
config = get_project_config()
|
|
|
|
if (config.has_section("platformio") and
|
2015-03-05 01:36:31 +02:00
|
|
|
config.has_option("platformio", name)):
|
|
|
|
option_dir = config.get("platformio", name)
|
2015-02-22 22:24:22 +02:00
|
|
|
if option_dir.startswith("~"):
|
|
|
|
option_dir = expanduser(option_dir)
|
|
|
|
return abspath(option_dir)
|
2015-02-13 23:01:01 +02:00
|
|
|
except exception.NotPlatformProject:
|
2014-08-22 17:57:28 +03:00
|
|
|
pass
|
2015-03-05 01:36:31 +02:00
|
|
|
return default
|
2015-02-22 22:24:22 +02:00
|
|
|
|
|
|
|
|
|
|
|
def get_home_dir():
|
2015-03-05 01:36:31 +02:00
|
|
|
home_dir = _get_projconf_option_dir(
|
|
|
|
"home_dir",
|
|
|
|
join(expanduser("~"), ".platformio")
|
|
|
|
)
|
2014-11-22 23:55:17 +02:00
|
|
|
|
|
|
|
if not isdir(home_dir):
|
2015-02-15 23:48:04 +02:00
|
|
|
os.makedirs(home_dir)
|
2014-11-22 23:55:17 +02:00
|
|
|
|
|
|
|
assert isdir(home_dir)
|
|
|
|
return home_dir
|
2014-05-18 23:38:59 +03:00
|
|
|
|
|
|
|
|
2014-09-04 18:58:12 +03:00
|
|
|
def get_lib_dir():
|
2015-03-05 01:36:31 +02:00
|
|
|
return _get_projconf_option_dir(
|
|
|
|
"lib_dir",
|
|
|
|
join(get_home_dir(), "lib")
|
|
|
|
)
|
2014-09-04 18:58:12 +03:00
|
|
|
|
|
|
|
|
2014-05-18 23:38:59 +03:00
|
|
|
def get_source_dir():
|
|
|
|
return dirname(realpath(__file__))
|
|
|
|
|
|
|
|
|
|
|
|
def get_project_dir():
|
2015-02-15 23:48:04 +02:00
|
|
|
return os.getcwd()
|
2014-05-18 23:38:59 +03:00
|
|
|
|
|
|
|
|
2015-02-22 22:24:22 +02:00
|
|
|
def get_projectsrc_dir():
|
2015-03-05 01:36:31 +02:00
|
|
|
return _get_projconf_option_dir(
|
|
|
|
"src_dir",
|
|
|
|
join(get_project_dir(), "src")
|
|
|
|
)
|
2015-02-22 22:24:22 +02:00
|
|
|
|
|
|
|
|
2014-07-27 22:35:40 +03:00
|
|
|
def get_pioenvs_dir():
|
2015-03-05 01:36:31 +02:00
|
|
|
return _get_projconf_option_dir(
|
|
|
|
"envs_dir",
|
|
|
|
join(get_project_dir(), ".pioenvs")
|
|
|
|
)
|
2014-07-27 22:35:40 +03:00
|
|
|
|
|
|
|
|
2014-05-18 23:38:59 +03:00
|
|
|
def get_project_config():
|
|
|
|
path = join(get_project_dir(), "platformio.ini")
|
|
|
|
if not isfile(path):
|
2015-02-13 23:01:01 +02:00
|
|
|
raise exception.NotPlatformProject(get_project_dir())
|
2014-06-07 13:34:31 +03:00
|
|
|
cp = ConfigParser()
|
|
|
|
cp.read(path)
|
|
|
|
return cp
|
|
|
|
|
2014-05-18 23:38:59 +03:00
|
|
|
|
2014-06-07 13:34:31 +03:00
|
|
|
def change_filemtime(path, time):
|
2015-02-15 23:48:04 +02:00
|
|
|
os.utime(path, (time, time))
|
2014-06-03 21:27:36 +03:00
|
|
|
|
|
|
|
|
2015-01-29 18:54:28 +02:00
|
|
|
def exec_command(*args, **kwargs):
|
2015-02-19 22:02:50 +02:00
|
|
|
result = {
|
|
|
|
"out": None,
|
|
|
|
"err": None,
|
|
|
|
"returncode": None
|
|
|
|
}
|
|
|
|
|
2015-02-15 23:48:04 +02:00
|
|
|
default = dict(
|
|
|
|
stdout=subprocess.PIPE,
|
|
|
|
stderr=subprocess.PIPE,
|
|
|
|
shell=system() == "Windows"
|
|
|
|
)
|
|
|
|
default.update(kwargs)
|
|
|
|
kwargs = default
|
|
|
|
|
|
|
|
p = subprocess.Popen(*args, **kwargs)
|
|
|
|
try:
|
|
|
|
result['out'], result['err'] = p.communicate()
|
2015-02-19 22:02:50 +02:00
|
|
|
result['returncode'] = p.returncode
|
2015-02-15 23:48:04 +02:00
|
|
|
except KeyboardInterrupt:
|
2015-02-23 11:50:14 +02:00
|
|
|
raise exception.AbortedByUser()
|
|
|
|
finally:
|
2015-02-15 23:48:04 +02:00
|
|
|
for s in ("stdout", "stderr"):
|
|
|
|
if isinstance(kwargs[s], AsyncPipe):
|
|
|
|
kwargs[s].close()
|
|
|
|
|
|
|
|
for s in ("stdout", "stderr"):
|
|
|
|
if isinstance(kwargs[s], AsyncPipe):
|
|
|
|
result[s[3:]] = "\n".join(kwargs[s].get_buffer())
|
|
|
|
|
|
|
|
for k, v in result.iteritems():
|
2015-02-19 22:02:50 +02:00
|
|
|
if v and isinstance(v, basestring):
|
|
|
|
result[k].strip()
|
2015-01-29 18:54:28 +02:00
|
|
|
|
2015-02-15 23:48:04 +02:00
|
|
|
return result
|
2014-07-27 22:29:32 +03:00
|
|
|
|
|
|
|
|
|
|
|
def get_serialports():
|
2015-02-15 23:48:04 +02:00
|
|
|
if os.name == "nt":
|
2014-07-27 22:29:32 +03:00
|
|
|
from serial.tools.list_ports_windows import comports
|
2015-02-15 23:48:04 +02:00
|
|
|
elif os.name == "posix":
|
2014-07-27 22:29:32 +03:00
|
|
|
from serial.tools.list_ports_posix import comports
|
|
|
|
else:
|
2015-02-15 23:48:04 +02:00
|
|
|
raise exception.GetSerialPortsError(os.name)
|
2015-03-09 12:27:54 +02:00
|
|
|
return [{"port": p, "description": d, "hwid": h} for p, d, h in comports()]
|
|
|
|
|
|
|
|
|
|
|
|
def get_logicaldisks():
|
|
|
|
disks = []
|
|
|
|
if system() == "Windows":
|
|
|
|
result = exec_command(
|
|
|
|
["wmic", "logicaldisk", "get", "name,VolumeName"]).get("out")
|
|
|
|
disknamere = re.compile(r"^([A-Z]{1}\:)\s*(\S+)?")
|
|
|
|
for line in result.split("\n"):
|
|
|
|
match = disknamere.match(line.strip())
|
|
|
|
if not match:
|
|
|
|
continue
|
|
|
|
disks.append({"disk": match.group(1), "name": match.group(2)})
|
|
|
|
else:
|
|
|
|
result = exec_command(["df"]).get("out")
|
|
|
|
disknamere = re.compile(r"\d+\%\s+([a-z\d\-_/]+)$", flags=re.I)
|
|
|
|
for line in result.split("\n"):
|
|
|
|
match = disknamere.search(line.strip())
|
|
|
|
if not match:
|
|
|
|
continue
|
|
|
|
disks.append({"disk": match.group(1),
|
|
|
|
"name": basename(match.group(1))})
|
|
|
|
return disks
|
2014-09-04 18:58:12 +03:00
|
|
|
|
|
|
|
|
2014-09-08 22:02:57 +03:00
|
|
|
def get_api_result(path, params=None, data=None):
|
2014-09-04 18:58:12 +03:00
|
|
|
result = None
|
|
|
|
r = None
|
2014-12-01 22:45:53 +02:00
|
|
|
|
2014-09-04 18:58:12 +03:00
|
|
|
try:
|
2014-12-24 21:36:11 +02:00
|
|
|
requests.packages.urllib3.disable_warnings()
|
2014-09-04 18:58:12 +03:00
|
|
|
headers = {"User-Agent": "PlatformIO/%s %s" % (
|
2014-11-22 23:55:17 +02:00
|
|
|
__version__, requests.utils.default_user_agent())}
|
2015-02-22 23:31:51 +02:00
|
|
|
|
|
|
|
if data:
|
2014-11-22 23:55:17 +02:00
|
|
|
r = requests.post(__apiurl__ + path, params=params, data=data,
|
|
|
|
headers=headers)
|
2014-09-08 22:02:57 +03:00
|
|
|
else:
|
2014-11-22 23:55:17 +02:00
|
|
|
r = requests.get(__apiurl__ + path, params=params, headers=headers)
|
2014-09-04 18:58:12 +03:00
|
|
|
result = r.json()
|
|
|
|
r.raise_for_status()
|
2014-11-22 23:55:17 +02:00
|
|
|
except requests.exceptions.HTTPError as e:
|
2014-09-04 18:58:12 +03:00
|
|
|
if result and "errors" in result:
|
2015-02-13 23:01:01 +02:00
|
|
|
raise exception.APIRequestError(result['errors'][0]['title'])
|
2014-09-04 18:58:12 +03:00
|
|
|
else:
|
2015-02-13 23:01:01 +02:00
|
|
|
raise exception.APIRequestError(e)
|
2014-11-22 23:55:17 +02:00
|
|
|
except requests.exceptions.ConnectionError:
|
2015-02-13 23:01:01 +02:00
|
|
|
raise exception.APIRequestError(
|
2014-10-19 00:14:11 +03:00
|
|
|
"Could not connect to PlatformIO Registry Service")
|
2014-09-04 18:58:12 +03:00
|
|
|
except ValueError:
|
2015-02-13 23:01:01 +02:00
|
|
|
raise exception.APIRequestError(
|
|
|
|
"Invalid response: %s" % r.text.encode("utf-8"))
|
2014-09-04 18:58:12 +03:00
|
|
|
finally:
|
|
|
|
if r:
|
|
|
|
r.close()
|
|
|
|
return result
|
2014-12-27 23:59:20 +02:00
|
|
|
|
|
|
|
|
|
|
|
def get_boards(type_=None):
|
|
|
|
boards = {}
|
2015-02-13 23:01:01 +02:00
|
|
|
try:
|
|
|
|
boards = get_boards._cache # pylint: disable=W0212
|
|
|
|
except AttributeError:
|
|
|
|
bdirs = [join(get_source_dir(), "boards")]
|
|
|
|
if isdir(join(get_home_dir(), "boards")):
|
|
|
|
bdirs.append(join(get_home_dir(), "boards"))
|
|
|
|
|
|
|
|
for bdir in bdirs:
|
2015-02-15 23:48:04 +02:00
|
|
|
for json_file in os.listdir(bdir):
|
2015-02-13 23:01:01 +02:00
|
|
|
if not json_file.endswith(".json"):
|
|
|
|
continue
|
|
|
|
with open(join(bdir, json_file)) as f:
|
|
|
|
boards.update(json.load(f))
|
|
|
|
get_boards._cache = boards # pylint: disable=W0212
|
|
|
|
|
|
|
|
if type_ is None:
|
|
|
|
return boards
|
|
|
|
else:
|
|
|
|
if type_ not in boards:
|
|
|
|
raise exception.UnknownBoard(type_)
|
|
|
|
return boards[type_]
|
2015-03-11 18:08:52 +02:00
|
|
|
|
|
|
|
|
|
|
|
def get_frameworks(type_=None):
|
|
|
|
frameworks = {}
|
|
|
|
|
|
|
|
try:
|
|
|
|
frameworks = get_frameworks._cache # pylint: disable=W0212
|
|
|
|
except AttributeError:
|
|
|
|
frameworks_path = join(
|
|
|
|
get_source_dir(), "builder", "scripts", "frameworks")
|
|
|
|
|
|
|
|
frameworks_list = [f[:-3] for f in os.listdir(frameworks_path)
|
|
|
|
if not f.startswith("__") and f.endswith(".py")]
|
|
|
|
for _type in frameworks_list:
|
|
|
|
script_path = join(frameworks_path, "%s.py" % _type)
|
|
|
|
with open(script_path) as f:
|
|
|
|
fcontent = f.read()
|
|
|
|
assert '"""' in fcontent
|
|
|
|
_doc_start = fcontent.index('"""') + 3
|
|
|
|
fdoc = fcontent[
|
|
|
|
_doc_start:fcontent.index('"""', _doc_start)].strip()
|
2015-03-16 17:30:23 +02:00
|
|
|
doclines = [l.strip() for l in fdoc.splitlines() if l.strip()]
|
2015-03-11 18:08:52 +02:00
|
|
|
frameworks[_type] = {
|
2015-03-16 17:30:23 +02:00
|
|
|
"name": doclines[0],
|
|
|
|
"description": " ".join(doclines[1:-1]),
|
|
|
|
"url": doclines[-1],
|
2015-03-11 18:08:52 +02:00
|
|
|
"script": script_path
|
|
|
|
}
|
|
|
|
get_frameworks._cache = frameworks # pylint: disable=W0212
|
|
|
|
|
|
|
|
if type_ is None:
|
|
|
|
return frameworks
|
|
|
|
else:
|
|
|
|
if type_ not in frameworks:
|
|
|
|
raise exception.UnknownFramework(type_)
|
|
|
|
return frameworks[type_]
|
|
|
|
|
|
|
|
return frameworks
|