2017-06-05 16:02:39 +03:00
|
|
|
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
|
2015-11-18 17:16:17 +02:00
|
|
|
#
|
|
|
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
|
|
|
# you may not use this file except in compliance with the License.
|
|
|
|
# You may obtain a copy of the License at
|
|
|
|
#
|
|
|
|
# http://www.apache.org/licenses/LICENSE-2.0
|
|
|
|
#
|
|
|
|
# Unless required by applicable law or agreed to in writing, software
|
|
|
|
# distributed under the License is distributed on an "AS IS" BASIS,
|
|
|
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
|
|
# See the License for the specific language governing permissions and
|
|
|
|
# limitations under the License.
|
2014-06-07 13:34:31 +03:00
|
|
|
|
2016-05-10 15:36:00 +03:00
|
|
|
import base64
|
2015-09-01 16:15:04 +03:00
|
|
|
import os
|
2015-02-19 22:02:50 +02:00
|
|
|
import re
|
2014-11-22 23:55:17 +02:00
|
|
|
from imp import load_source
|
2015-12-04 21:06:29 +02:00
|
|
|
from multiprocessing import cpu_count
|
2016-05-26 19:43:36 +03:00
|
|
|
from os.path import basename, dirname, isdir, isfile, join
|
2017-12-19 01:41:18 +02:00
|
|
|
from urllib import quote
|
2014-06-07 13:34:31 +03:00
|
|
|
|
2015-02-15 23:53:15 +02:00
|
|
|
import click
|
2017-01-25 15:33:40 +02:00
|
|
|
import semantic_version
|
2015-02-15 23:53:15 +02:00
|
|
|
|
2017-01-25 15:33:40 +02:00
|
|
|
from platformio import __version__, app, exception, util
|
2017-03-11 23:28:55 +02:00
|
|
|
from platformio.managers.core import get_core_package_dir
|
2016-07-11 13:27:30 +03:00
|
|
|
from platformio.managers.package import BasePkgManager, PackageManager
|
2016-05-26 19:43:36 +03:00
|
|
|
|
2016-07-11 13:27:30 +03:00
|
|
|
|
|
|
|
class PlatformManager(BasePkgManager):
|
2016-05-26 19:43:36 +03:00
|
|
|
|
2018-03-23 00:08:07 +02:00
|
|
|
FILE_CACHE_VALID = None # disable platform download caching
|
2016-12-05 18:51:25 +02:00
|
|
|
|
2016-07-19 23:49:50 +03:00
|
|
|
def __init__(self, package_dir=None, repositories=None):
|
|
|
|
if not repositories:
|
2016-08-01 17:05:48 +03:00
|
|
|
repositories = [
|
2016-09-15 21:31:56 +03:00
|
|
|
"https://dl.bintray.com/platformio/dl-platforms/manifest.json",
|
2016-08-25 22:57:52 +03:00
|
|
|
"{0}://dl.platformio.org/platforms/manifest.json".format(
|
2016-09-12 02:11:04 +03:00
|
|
|
"https" if app.get_setting("enable_ssl") else "http")
|
2016-08-01 17:05:48 +03:00
|
|
|
]
|
2018-06-08 21:37:57 +03:00
|
|
|
BasePkgManager.__init__(
|
|
|
|
self, package_dir or join(util.get_home_dir(), "platforms"),
|
|
|
|
repositories)
|
2016-05-26 19:43:36 +03:00
|
|
|
|
2016-05-29 23:28:50 +03:00
|
|
|
@property
|
2017-01-15 00:12:41 +02:00
|
|
|
def manifest_names(self):
|
|
|
|
return ["platform.json"]
|
2016-05-26 19:43:36 +03:00
|
|
|
|
2017-01-18 15:19:46 +02:00
|
|
|
def get_manifest_path(self, pkg_dir):
|
|
|
|
if not isdir(pkg_dir):
|
|
|
|
return None
|
|
|
|
for name in self.manifest_names:
|
|
|
|
manifest_path = join(pkg_dir, name)
|
|
|
|
if isfile(manifest_path):
|
|
|
|
return manifest_path
|
|
|
|
return None
|
|
|
|
|
2016-10-31 20:05:34 +02:00
|
|
|
def install(self,
|
2016-08-26 11:46:59 +03:00
|
|
|
name,
|
|
|
|
requirements=None,
|
|
|
|
with_packages=None,
|
|
|
|
without_packages=None,
|
|
|
|
skip_default_package=False,
|
2018-03-16 13:33:59 +02:00
|
|
|
after_update=False,
|
2017-03-26 19:43:00 +03:00
|
|
|
silent=False,
|
2017-12-19 00:51:35 +02:00
|
|
|
force=False,
|
2017-04-15 16:36:59 +03:00
|
|
|
**_): # pylint: disable=too-many-arguments, arguments-differ
|
2017-03-26 19:43:00 +03:00
|
|
|
platform_dir = BasePkgManager.install(
|
2017-12-19 00:51:35 +02:00
|
|
|
self, name, requirements, silent=silent, force=force)
|
2017-03-08 17:24:58 +02:00
|
|
|
p = PlatformFactory.newPlatform(platform_dir)
|
2017-01-30 22:36:25 +02:00
|
|
|
|
2018-03-16 13:33:59 +02:00
|
|
|
# don't cleanup packages or install them after update
|
|
|
|
# we check packages for updates in def update()
|
|
|
|
if after_update:
|
2017-01-30 22:36:25 +02:00
|
|
|
return True
|
2018-03-16 13:33:59 +02:00
|
|
|
|
2017-03-26 19:43:00 +03:00
|
|
|
p.install_packages(
|
|
|
|
with_packages,
|
|
|
|
without_packages,
|
|
|
|
skip_default_package,
|
2017-12-19 00:51:35 +02:00
|
|
|
silent=silent,
|
|
|
|
force=force)
|
2018-03-16 13:33:59 +02:00
|
|
|
return self.cleanup_packages(p.packages.keys())
|
2016-05-26 19:43:36 +03:00
|
|
|
|
2018-03-16 13:33:59 +02:00
|
|
|
def uninstall(self, package, requirements=None, after_update=False):
|
2017-03-08 17:24:58 +02:00
|
|
|
if isdir(package):
|
|
|
|
pkg_dir = package
|
|
|
|
else:
|
2017-11-25 00:31:16 +02:00
|
|
|
name, requirements, url = self.parse_pkg_uri(package, requirements)
|
2017-03-08 17:24:58 +02:00
|
|
|
pkg_dir = self.get_package_dir(name, requirements, url)
|
|
|
|
|
2017-12-15 02:01:03 +02:00
|
|
|
if not pkg_dir:
|
|
|
|
raise exception.UnknownPlatform(package)
|
|
|
|
|
2017-03-08 17:24:58 +02:00
|
|
|
p = PlatformFactory.newPlatform(pkg_dir)
|
|
|
|
BasePkgManager.uninstall(self, pkg_dir, requirements)
|
2017-01-30 22:36:25 +02:00
|
|
|
|
2018-03-16 13:33:59 +02:00
|
|
|
# don't cleanup packages or install them after update
|
|
|
|
# we check packages for updates in def update()
|
|
|
|
if after_update:
|
2017-01-30 22:36:25 +02:00
|
|
|
return True
|
|
|
|
|
2018-03-16 13:33:59 +02:00
|
|
|
return self.cleanup_packages(p.packages.keys())
|
2016-05-26 19:43:36 +03:00
|
|
|
|
2016-10-31 20:05:34 +02:00
|
|
|
def update( # pylint: disable=arguments-differ
|
|
|
|
self,
|
2017-03-08 17:24:58 +02:00
|
|
|
package,
|
2016-10-31 20:05:34 +02:00
|
|
|
requirements=None,
|
2017-03-08 17:24:58 +02:00
|
|
|
only_check=False,
|
|
|
|
only_packages=False):
|
|
|
|
if isdir(package):
|
|
|
|
pkg_dir = package
|
|
|
|
else:
|
2017-11-25 00:31:16 +02:00
|
|
|
name, requirements, url = self.parse_pkg_uri(package, requirements)
|
2017-03-08 17:24:58 +02:00
|
|
|
pkg_dir = self.get_package_dir(name, requirements, url)
|
2017-01-30 22:36:25 +02:00
|
|
|
|
2017-12-15 02:01:03 +02:00
|
|
|
if not pkg_dir:
|
|
|
|
raise exception.UnknownPlatform(package)
|
|
|
|
|
2017-03-08 17:24:58 +02:00
|
|
|
p = PlatformFactory.newPlatform(pkg_dir)
|
2017-08-07 19:33:13 +03:00
|
|
|
pkgs_before = p.get_installed_packages().keys()
|
2017-01-30 22:36:25 +02:00
|
|
|
|
2017-08-07 19:33:13 +03:00
|
|
|
missed_pkgs = set()
|
2016-05-29 23:28:50 +03:00
|
|
|
if not only_packages:
|
2017-03-08 17:24:58 +02:00
|
|
|
BasePkgManager.update(self, pkg_dir, requirements, only_check)
|
|
|
|
p = PlatformFactory.newPlatform(pkg_dir)
|
2017-08-07 19:33:13 +03:00
|
|
|
missed_pkgs = set(pkgs_before) & set(p.packages.keys())
|
|
|
|
missed_pkgs -= set(p.get_installed_packages().keys())
|
2017-01-30 22:36:25 +02:00
|
|
|
|
2016-08-01 17:05:48 +03:00
|
|
|
p.update_packages(only_check)
|
2016-08-29 22:25:38 +03:00
|
|
|
self.cleanup_packages(p.packages.keys())
|
2016-05-26 19:43:36 +03:00
|
|
|
|
2017-08-07 19:33:13 +03:00
|
|
|
if missed_pkgs:
|
2017-01-30 22:36:25 +02:00
|
|
|
p.install_packages(
|
2017-08-07 19:33:13 +03:00
|
|
|
with_packages=list(missed_pkgs), skip_default_package=True)
|
2017-01-30 22:36:25 +02:00
|
|
|
|
|
|
|
return True
|
2016-05-29 23:28:50 +03:00
|
|
|
|
2016-05-31 00:22:25 +03:00
|
|
|
def cleanup_packages(self, names):
|
2017-03-08 17:24:58 +02:00
|
|
|
self.cache_reset()
|
2016-05-29 23:28:50 +03:00
|
|
|
deppkgs = {}
|
|
|
|
for manifest in PlatformManager().get_installed():
|
2017-03-08 17:24:58 +02:00
|
|
|
p = PlatformFactory.newPlatform(manifest['__pkg_dir'])
|
2016-05-29 23:28:50 +03:00
|
|
|
for pkgname, pkgmanifest in p.get_installed_packages().items():
|
|
|
|
if pkgname not in deppkgs:
|
|
|
|
deppkgs[pkgname] = set()
|
|
|
|
deppkgs[pkgname].add(pkgmanifest['version'])
|
|
|
|
|
2016-08-01 17:05:48 +03:00
|
|
|
pm = PackageManager(join(util.get_home_dir(), "packages"))
|
2016-05-29 23:28:50 +03:00
|
|
|
for manifest in pm.get_installed():
|
2016-05-31 00:22:25 +03:00
|
|
|
if manifest['name'] not in names:
|
2016-05-29 23:28:50 +03:00
|
|
|
continue
|
2017-07-24 17:35:41 +03:00
|
|
|
if (manifest['name'] not in deppkgs
|
|
|
|
or manifest['version'] not in deppkgs[manifest['name']]):
|
2018-04-15 05:48:38 +03:00
|
|
|
try:
|
|
|
|
pm.uninstall(manifest['__pkg_dir'], after_update=True)
|
|
|
|
except exception.UnknownPackage:
|
|
|
|
pass
|
2016-05-29 23:28:50 +03:00
|
|
|
|
2017-03-08 17:24:58 +02:00
|
|
|
self.cache_reset()
|
2016-05-29 23:28:50 +03:00
|
|
|
return True
|
2016-05-26 19:43:36 +03:00
|
|
|
|
2018-03-23 00:08:07 +02:00
|
|
|
@util.memoized(expire=5000)
|
2016-05-26 19:43:36 +03:00
|
|
|
def get_installed_boards(self):
|
|
|
|
boards = []
|
|
|
|
for manifest in self.get_installed():
|
2017-03-08 17:24:58 +02:00
|
|
|
p = PlatformFactory.newPlatform(manifest['__pkg_dir'])
|
2016-06-08 13:34:49 +03:00
|
|
|
for config in p.get_boards().values():
|
2017-03-03 23:29:17 +02:00
|
|
|
board = config.get_brief_data()
|
|
|
|
if board not in boards:
|
|
|
|
boards.append(board)
|
2016-05-26 19:43:36 +03:00
|
|
|
return boards
|
|
|
|
|
|
|
|
@staticmethod
|
2018-03-23 00:08:07 +02:00
|
|
|
@util.memoized()
|
2016-05-26 19:43:36 +03:00
|
|
|
def get_registered_boards():
|
2017-09-09 20:57:25 +03:00
|
|
|
return util.get_api_result("/boards", cache_valid="7d")
|
2015-03-14 00:02:09 +02:00
|
|
|
|
2017-09-03 22:40:00 +03:00
|
|
|
def get_all_boards(self):
|
|
|
|
boards = self.get_installed_boards()
|
|
|
|
know_boards = ["%s:%s" % (b['platform'], b['id']) for b in boards]
|
|
|
|
try:
|
|
|
|
for board in self.get_registered_boards():
|
|
|
|
key = "%s:%s" % (board['platform'], board['id'])
|
|
|
|
if key not in know_boards:
|
|
|
|
boards.append(board)
|
|
|
|
except (exception.APIRequestError, exception.InternetIsOffline):
|
|
|
|
pass
|
|
|
|
return sorted(boards, key=lambda b: b['name'])
|
|
|
|
|
2017-03-11 23:28:55 +02:00
|
|
|
def board_config(self, id_, platform=None):
|
2016-09-14 14:46:10 +03:00
|
|
|
for manifest in self.get_installed_boards():
|
2017-07-24 17:35:41 +03:00
|
|
|
if manifest['id'] == id_ and (not platform
|
|
|
|
or manifest['platform'] == platform):
|
2016-09-14 14:46:10 +03:00
|
|
|
return manifest
|
|
|
|
for manifest in self.get_registered_boards():
|
2017-07-24 17:35:41 +03:00
|
|
|
if manifest['id'] == id_ and (not platform
|
|
|
|
or manifest['platform'] == platform):
|
2016-09-14 14:46:10 +03:00
|
|
|
return manifest
|
|
|
|
raise exception.UnknownBoard(id_)
|
|
|
|
|
2015-03-14 00:02:09 +02:00
|
|
|
|
2014-06-07 13:34:31 +03:00
|
|
|
class PlatformFactory(object):
|
|
|
|
|
2014-11-24 21:36:44 +02:00
|
|
|
@staticmethod
|
2016-05-26 19:43:36 +03:00
|
|
|
def get_clsname(name):
|
2016-09-15 01:30:51 +03:00
|
|
|
name = re.sub(r"[^\da-z\_]+", "", name, flags=re.I)
|
2016-05-26 19:43:36 +03:00
|
|
|
return "%s%sPlatform" % (name.upper()[0], name.lower()[1:])
|
2014-11-24 21:36:44 +02:00
|
|
|
|
2014-11-22 23:55:17 +02:00
|
|
|
@staticmethod
|
2016-05-26 19:43:36 +03:00
|
|
|
def load_module(name, path):
|
2014-11-29 22:39:44 +02:00
|
|
|
module = None
|
|
|
|
try:
|
2016-08-01 17:05:48 +03:00
|
|
|
module = load_source("platformio.managers.platform.%s" % name,
|
|
|
|
path)
|
2014-11-29 22:39:44 +02:00
|
|
|
except ImportError:
|
2016-05-26 19:43:36 +03:00
|
|
|
raise exception.UnknownPlatform(name)
|
2014-11-29 22:39:44 +02:00
|
|
|
return module
|
|
|
|
|
|
|
|
@classmethod
|
2016-05-26 19:43:36 +03:00
|
|
|
def newPlatform(cls, name, requirements=None):
|
2017-09-16 00:57:36 +03:00
|
|
|
pm = PlatformManager()
|
2016-05-26 19:43:36 +03:00
|
|
|
platform_dir = None
|
2017-03-08 17:24:58 +02:00
|
|
|
if isdir(name):
|
|
|
|
platform_dir = name
|
2017-09-16 00:57:36 +03:00
|
|
|
name = pm.load_manifest(platform_dir)['name']
|
2017-03-08 17:24:58 +02:00
|
|
|
elif name.endswith("platform.json") and isfile(name):
|
2016-05-26 19:43:36 +03:00
|
|
|
platform_dir = dirname(name)
|
|
|
|
name = util.load_json(name)['name']
|
|
|
|
else:
|
2017-11-25 00:31:16 +02:00
|
|
|
name, requirements, url = pm.parse_pkg_uri(name, requirements)
|
2017-09-16 00:57:36 +03:00
|
|
|
platform_dir = pm.get_package_dir(name, requirements, url)
|
|
|
|
if platform_dir:
|
|
|
|
name = pm.load_manifest(platform_dir)['name']
|
2016-05-28 22:51:33 +03:00
|
|
|
|
2016-05-26 19:43:36 +03:00
|
|
|
if not platform_dir:
|
2018-09-20 14:55:55 +03:00
|
|
|
raise exception.UnknownPlatform(
|
|
|
|
name if not requirements else "%s@%s" % (name, requirements))
|
2016-05-26 19:43:36 +03:00
|
|
|
|
|
|
|
platform_cls = None
|
|
|
|
if isfile(join(platform_dir, "platform.py")):
|
|
|
|
platform_cls = getattr(
|
|
|
|
cls.load_module(name, join(platform_dir, "platform.py")),
|
2016-08-01 17:05:48 +03:00
|
|
|
cls.get_clsname(name))
|
2016-05-26 19:43:36 +03:00
|
|
|
else:
|
|
|
|
platform_cls = type(
|
2016-08-01 17:05:48 +03:00
|
|
|
str(cls.get_clsname(name)), (PlatformBase, ), {})
|
2016-05-26 19:43:36 +03:00
|
|
|
|
|
|
|
_instance = platform_cls(join(platform_dir, "platform.json"))
|
2016-05-30 14:31:58 +03:00
|
|
|
assert isinstance(_instance, PlatformBase)
|
2016-05-26 19:43:36 +03:00
|
|
|
return _instance
|
|
|
|
|
|
|
|
|
|
|
|
class PlatformPackagesMixin(object):
|
|
|
|
|
2017-12-19 00:51:35 +02:00
|
|
|
def install_packages( # pylint: disable=too-many-arguments
|
|
|
|
self,
|
|
|
|
with_packages=None,
|
|
|
|
without_packages=None,
|
|
|
|
skip_default_package=False,
|
|
|
|
silent=False,
|
|
|
|
force=False):
|
2017-03-26 19:43:00 +03:00
|
|
|
with_packages = set(self.find_pkg_names(with_packages or []))
|
|
|
|
without_packages = set(self.find_pkg_names(without_packages or []))
|
2015-04-23 14:11:30 +01:00
|
|
|
|
2016-05-26 19:43:36 +03:00
|
|
|
upkgs = with_packages | without_packages
|
2016-05-29 23:28:50 +03:00
|
|
|
ppkgs = set(self.packages.keys())
|
2016-05-26 19:43:36 +03:00
|
|
|
if not upkgs.issubset(ppkgs):
|
|
|
|
raise exception.UnknownPackage(", ".join(upkgs - ppkgs))
|
|
|
|
|
2016-05-29 23:28:50 +03:00
|
|
|
for name, opts in self.packages.items():
|
2017-01-25 02:08:20 +02:00
|
|
|
version = opts.get("version", "")
|
2016-05-26 19:43:36 +03:00
|
|
|
if name in without_packages:
|
|
|
|
continue
|
|
|
|
elif (name in with_packages or
|
2016-08-01 17:05:48 +03:00
|
|
|
not (skip_default_package or opts.get("optional", False))):
|
2017-11-27 21:04:51 +02:00
|
|
|
if ":" in version:
|
2017-12-19 00:51:35 +02:00
|
|
|
self.pm.install(
|
|
|
|
"%s=%s" % (name, version), silent=silent, force=force)
|
2016-08-01 17:05:48 +03:00
|
|
|
else:
|
2017-12-19 00:51:35 +02:00
|
|
|
self.pm.install(name, version, silent=silent, force=force)
|
2014-11-22 23:55:17 +02:00
|
|
|
|
2016-05-26 19:43:36 +03:00
|
|
|
return True
|
2014-11-22 23:55:17 +02:00
|
|
|
|
2018-03-16 13:33:59 +02:00
|
|
|
def find_pkg_names(self, candidates):
|
2017-03-26 19:43:00 +03:00
|
|
|
result = []
|
2018-03-16 13:33:59 +02:00
|
|
|
for candidate in candidates:
|
|
|
|
found = False
|
2017-03-26 19:43:00 +03:00
|
|
|
|
|
|
|
# lookup by package types
|
|
|
|
for _name, _opts in self.packages.items():
|
2018-03-16 13:33:59 +02:00
|
|
|
if _opts.get("type") == candidate:
|
|
|
|
result.append(_name)
|
|
|
|
found = True
|
|
|
|
|
|
|
|
if (self.frameworks and candidate.startswith("framework-")
|
|
|
|
and candidate[10:] in self.frameworks):
|
|
|
|
result.append(self.frameworks[candidate[10:]]['package'])
|
|
|
|
found = True
|
2017-03-26 19:43:00 +03:00
|
|
|
|
2018-03-16 13:33:59 +02:00
|
|
|
if not found:
|
|
|
|
result.append(candidate)
|
2017-03-26 19:43:00 +03:00
|
|
|
|
|
|
|
return result
|
|
|
|
|
2017-03-08 19:52:11 +02:00
|
|
|
def update_packages(self, only_check=False):
|
|
|
|
for name, manifest in self.get_installed_packages().items():
|
2017-11-25 00:31:16 +02:00
|
|
|
requirements = self.packages[name].get("version", "")
|
2017-11-27 21:04:51 +02:00
|
|
|
if ":" in requirements:
|
2017-11-25 20:16:37 +02:00
|
|
|
_, requirements, __ = self.pm.parse_pkg_uri(requirements)
|
2017-11-25 00:31:16 +02:00
|
|
|
self.pm.update(manifest['__pkg_dir'], requirements, only_check)
|
2017-03-08 19:52:11 +02:00
|
|
|
|
2016-08-29 22:06:37 +03:00
|
|
|
def get_installed_packages(self):
|
|
|
|
items = {}
|
2017-03-08 19:52:11 +02:00
|
|
|
for name in self.packages:
|
|
|
|
pkg_dir = self.get_package_dir(name)
|
|
|
|
if pkg_dir:
|
|
|
|
items[name] = self.pm.load_manifest(pkg_dir)
|
2016-08-29 22:06:37 +03:00
|
|
|
return items
|
|
|
|
|
2017-03-08 19:52:11 +02:00
|
|
|
def are_outdated_packages(self):
|
|
|
|
for name, manifest in self.get_installed_packages().items():
|
2017-11-25 00:31:16 +02:00
|
|
|
requirements = self.packages[name].get("version", "")
|
2017-11-27 21:04:51 +02:00
|
|
|
if ":" in requirements:
|
2017-11-25 20:16:37 +02:00
|
|
|
_, requirements, __ = self.pm.parse_pkg_uri(requirements)
|
2017-11-25 00:31:16 +02:00
|
|
|
if self.pm.outdated(manifest['__pkg_dir'], requirements):
|
2017-03-08 19:52:11 +02:00
|
|
|
return True
|
|
|
|
return False
|
2014-06-07 13:34:31 +03:00
|
|
|
|
2016-08-29 22:06:37 +03:00
|
|
|
def get_package_dir(self, name):
|
2017-01-25 02:08:20 +02:00
|
|
|
version = self.packages[name].get("version", "")
|
2017-11-27 21:04:51 +02:00
|
|
|
if ":" in version:
|
2018-06-08 21:37:57 +03:00
|
|
|
return self.pm.get_package_dir(
|
|
|
|
*self.pm.parse_pkg_uri("%s=%s" % (name, version)))
|
2017-11-25 00:31:16 +02:00
|
|
|
return self.pm.get_package_dir(name, version)
|
2016-08-29 22:06:37 +03:00
|
|
|
|
|
|
|
def get_package_version(self, name):
|
2017-03-08 19:52:11 +02:00
|
|
|
pkg_dir = self.get_package_dir(name)
|
|
|
|
if not pkg_dir:
|
|
|
|
return None
|
|
|
|
return self.pm.load_manifest(pkg_dir).get("version")
|
2016-08-29 22:06:37 +03:00
|
|
|
|
2014-06-07 13:34:31 +03:00
|
|
|
|
2016-05-29 00:50:05 +03:00
|
|
|
class PlatformRunMixin(object):
|
2014-06-07 13:34:31 +03:00
|
|
|
|
2016-09-10 17:27:47 +03:00
|
|
|
LINE_ERROR_RE = re.compile(r"(^|\s+)error:?\s+", re.I)
|
2015-02-19 22:02:50 +02:00
|
|
|
|
2016-08-21 19:27:38 +03:00
|
|
|
def run(self, variables, targets, silent, verbose):
|
2016-05-29 00:50:05 +03:00
|
|
|
assert isinstance(variables, dict)
|
|
|
|
assert isinstance(targets, list)
|
|
|
|
|
|
|
|
self.configure_default_packages(variables, targets)
|
2016-08-21 00:31:58 +03:00
|
|
|
self.install_packages(silent=True)
|
2016-05-29 00:50:05 +03:00
|
|
|
|
2016-08-21 19:27:38 +03:00
|
|
|
self.silent = silent
|
|
|
|
self.verbose = verbose or app.get_setting("force_verbose")
|
2016-05-29 00:50:05 +03:00
|
|
|
|
|
|
|
if "clean" in targets:
|
2016-07-17 00:48:59 +03:00
|
|
|
targets = ["-c", "."]
|
2016-05-29 00:50:05 +03:00
|
|
|
|
|
|
|
variables['platform_manifest'] = self.manifest_path
|
|
|
|
|
|
|
|
if "build_script" not in variables:
|
|
|
|
variables['build_script'] = self.get_build_script()
|
|
|
|
if not isfile(variables['build_script']):
|
|
|
|
raise exception.BuildScriptNotFound(variables['build_script'])
|
|
|
|
|
|
|
|
result = self._run_scons(variables, targets)
|
|
|
|
assert "returncode" in result
|
|
|
|
|
|
|
|
return result
|
|
|
|
|
|
|
|
def _run_scons(self, variables, targets):
|
|
|
|
cmd = [
|
2016-08-29 20:20:12 +03:00
|
|
|
util.get_pythonexe_path(),
|
2017-03-11 23:28:55 +02:00
|
|
|
join(get_core_package_dir("tool-scons"), "script", "scons"), "-Q",
|
2016-08-01 17:05:48 +03:00
|
|
|
"-j %d" % self.get_job_nums(), "--warn=no-no-parallel-support",
|
2017-06-05 16:05:05 +03:00
|
|
|
"-f",
|
|
|
|
join(util.get_source_dir(), "builder", "main.py")
|
2016-07-17 00:48:59 +03:00
|
|
|
]
|
2016-08-27 19:30:38 +03:00
|
|
|
cmd.append("PIOVERBOSE=%d" % (1 if self.verbose else 0))
|
2016-07-17 00:48:59 +03:00
|
|
|
cmd += targets
|
2016-05-29 00:50:05 +03:00
|
|
|
|
|
|
|
# encode and append variables
|
|
|
|
for key, value in variables.items():
|
|
|
|
cmd.append("%s=%s" % (key.upper(), base64.b64encode(value)))
|
|
|
|
|
2016-08-29 20:20:12 +03:00
|
|
|
util.copy_pythonpath_to_osenv()
|
2016-05-29 00:50:05 +03:00
|
|
|
result = util.exec_command(
|
|
|
|
cmd,
|
|
|
|
stdout=util.AsyncPipe(self.on_run_out),
|
2016-08-01 17:05:48 +03:00
|
|
|
stderr=util.AsyncPipe(self.on_run_err))
|
2016-05-29 00:50:05 +03:00
|
|
|
return result
|
|
|
|
|
|
|
|
def on_run_out(self, line):
|
2016-10-05 22:40:19 +03:00
|
|
|
if "`buildprog' is up to date." in line:
|
|
|
|
return
|
2016-07-17 00:48:59 +03:00
|
|
|
self._echo_line(line, level=1)
|
2016-05-29 00:50:05 +03:00
|
|
|
|
|
|
|
def on_run_err(self, line):
|
|
|
|
is_error = self.LINE_ERROR_RE.search(line) is not None
|
2016-07-17 00:48:59 +03:00
|
|
|
self._echo_line(line, level=3 if is_error else 2)
|
2016-05-29 00:50:05 +03:00
|
|
|
|
2017-12-19 01:41:18 +02:00
|
|
|
a_pos = line.find("fatal error:")
|
|
|
|
b_pos = line.rfind(": No such file or directory")
|
|
|
|
if a_pos == -1 or b_pos == -1:
|
|
|
|
return
|
|
|
|
self._echo_missed_dependency(line[a_pos + 12:b_pos].strip())
|
|
|
|
|
2016-08-21 19:27:38 +03:00
|
|
|
def _echo_line(self, line, level):
|
2016-09-02 18:45:19 +03:00
|
|
|
if line.startswith("scons: "):
|
|
|
|
line = line[7:]
|
2016-05-29 00:50:05 +03:00
|
|
|
assert 1 <= level <= 3
|
2016-08-21 19:27:38 +03:00
|
|
|
if self.silent and (level < 2 or not line):
|
|
|
|
return
|
2016-07-17 00:48:59 +03:00
|
|
|
fg = (None, "yellow", "red")[level - 1]
|
|
|
|
if level == 1 and "is up to date" in line:
|
2016-05-29 00:50:05 +03:00
|
|
|
fg = "green"
|
2017-09-02 16:23:24 +03:00
|
|
|
click.secho(line, fg=fg, err=level > 1)
|
2016-05-29 00:50:05 +03:00
|
|
|
|
2017-12-19 01:41:18 +02:00
|
|
|
@staticmethod
|
|
|
|
def _echo_missed_dependency(filename):
|
|
|
|
if "/" in filename or not filename.endswith((".h", ".hpp")):
|
|
|
|
return
|
|
|
|
banner = """
|
|
|
|
{dots}
|
|
|
|
* Looking for {filename_styled} dependency? Check our library registry!
|
|
|
|
*
|
|
|
|
* CLI > platformio lib search "header:{filename}"
|
|
|
|
* Web > {link}
|
|
|
|
*
|
|
|
|
{dots}
|
|
|
|
""".format(filename=filename,
|
|
|
|
filename_styled=click.style(filename, fg="cyan"),
|
|
|
|
link=click.style(
|
2018-02-13 01:34:24 +02:00
|
|
|
"https://platformio.org/lib/search?query=header:%s" % quote(
|
2017-12-19 01:41:18 +02:00
|
|
|
filename, safe=""),
|
|
|
|
fg="blue"),
|
2018-02-13 01:34:24 +02:00
|
|
|
dots="*" * (56 + len(filename)))
|
2017-12-19 01:41:18 +02:00
|
|
|
click.echo(banner, err=True)
|
|
|
|
|
2016-05-29 00:50:05 +03:00
|
|
|
@staticmethod
|
|
|
|
def get_job_nums():
|
|
|
|
try:
|
|
|
|
return cpu_count()
|
|
|
|
except NotImplementedError:
|
|
|
|
return 1
|
|
|
|
|
|
|
|
|
2017-03-03 23:29:17 +02:00
|
|
|
class PlatformBase( # pylint: disable=too-many-public-methods
|
|
|
|
PlatformPackagesMixin, PlatformRunMixin):
|
2016-05-29 00:50:05 +03:00
|
|
|
|
2017-01-25 15:33:40 +02:00
|
|
|
PIO_VERSION = semantic_version.Version(util.pepver_to_semver(__version__))
|
2016-05-29 00:50:05 +03:00
|
|
|
_BOARDS_CACHE = {}
|
|
|
|
|
2016-05-26 19:43:36 +03:00
|
|
|
def __init__(self, manifest_path):
|
|
|
|
self._BOARDS_CACHE = {}
|
|
|
|
self.manifest_path = manifest_path
|
2016-05-29 23:28:50 +03:00
|
|
|
self._manifest = util.load_json(manifest_path)
|
2016-05-26 19:43:36 +03:00
|
|
|
|
|
|
|
self.pm = PackageManager(
|
2018-11-30 01:36:50 +02:00
|
|
|
join(util.get_home_dir(), "packages"), self.package_repositories)
|
2016-05-26 19:43:36 +03:00
|
|
|
|
2016-08-21 19:27:38 +03:00
|
|
|
self.silent = False
|
|
|
|
self.verbose = False
|
2014-06-07 13:34:31 +03:00
|
|
|
|
2017-01-25 15:33:40 +02:00
|
|
|
if self.engines and "platformio" in self.engines:
|
|
|
|
if self.PIO_VERSION not in semantic_version.Spec(
|
|
|
|
self.engines['platformio']):
|
|
|
|
raise exception.IncompatiblePlatform(self.name,
|
|
|
|
str(self.PIO_VERSION))
|
|
|
|
|
2016-05-29 23:28:50 +03:00
|
|
|
@property
|
|
|
|
def name(self):
|
|
|
|
return self._manifest['name']
|
2015-03-16 14:15:57 +02:00
|
|
|
|
2016-05-29 23:28:50 +03:00
|
|
|
@property
|
|
|
|
def title(self):
|
|
|
|
return self._manifest['title']
|
2014-11-22 23:55:17 +02:00
|
|
|
|
2016-05-29 23:28:50 +03:00
|
|
|
@property
|
|
|
|
def description(self):
|
|
|
|
return self._manifest['description']
|
2015-03-16 12:47:43 +02:00
|
|
|
|
2016-05-29 23:28:50 +03:00
|
|
|
@property
|
|
|
|
def version(self):
|
|
|
|
return self._manifest['version']
|
2016-05-26 19:43:36 +03:00
|
|
|
|
2016-05-29 23:28:50 +03:00
|
|
|
@property
|
|
|
|
def homepage(self):
|
|
|
|
return self._manifest.get("homepage")
|
|
|
|
|
2016-09-03 16:26:52 +03:00
|
|
|
@property
|
|
|
|
def vendor_url(self):
|
|
|
|
return self._manifest.get("url")
|
|
|
|
|
2018-04-12 18:44:38 -07:00
|
|
|
@property
|
|
|
|
def docs_url(self):
|
|
|
|
return self._manifest.get("docs")
|
|
|
|
|
2017-03-03 23:29:17 +02:00
|
|
|
@property
|
|
|
|
def repository_url(self):
|
|
|
|
return self._manifest.get("repository", {}).get("url")
|
|
|
|
|
2016-05-29 23:28:50 +03:00
|
|
|
@property
|
|
|
|
def license(self):
|
|
|
|
return self._manifest.get("license")
|
|
|
|
|
|
|
|
@property
|
|
|
|
def frameworks(self):
|
|
|
|
return self._manifest.get("frameworks")
|
|
|
|
|
2017-01-25 15:33:40 +02:00
|
|
|
@property
|
|
|
|
def engines(self):
|
|
|
|
return self._manifest.get("engines")
|
|
|
|
|
2018-11-30 01:36:50 +02:00
|
|
|
@property
|
|
|
|
def package_repositories(self):
|
|
|
|
return self._manifest.get("packageRepositories")
|
|
|
|
|
2016-05-29 23:28:50 +03:00
|
|
|
@property
|
|
|
|
def manifest(self):
|
|
|
|
return self._manifest
|
|
|
|
|
|
|
|
@property
|
|
|
|
def packages(self):
|
2016-08-10 15:50:01 +03:00
|
|
|
if "packages" not in self._manifest:
|
|
|
|
self._manifest['packages'] = {}
|
|
|
|
return self._manifest['packages']
|
2016-05-26 19:43:36 +03:00
|
|
|
|
|
|
|
def get_dir(self):
|
|
|
|
return dirname(self.manifest_path)
|
|
|
|
|
|
|
|
def get_build_script(self):
|
|
|
|
main_script = join(self.get_dir(), "builder", "main.py")
|
|
|
|
if isfile(main_script):
|
|
|
|
return main_script
|
|
|
|
raise NotImplementedError()
|
2014-06-12 23:29:47 +03:00
|
|
|
|
2015-08-09 19:05:16 +03:00
|
|
|
def is_embedded(self):
|
2016-05-29 23:28:50 +03:00
|
|
|
for opts in self.packages.values():
|
2016-05-26 19:43:36 +03:00
|
|
|
if opts.get("type") == "uploader":
|
2015-08-09 19:05:16 +03:00
|
|
|
return True
|
|
|
|
return False
|
|
|
|
|
2016-05-26 19:43:36 +03:00
|
|
|
def get_boards(self, id_=None):
|
2016-06-08 13:34:49 +03:00
|
|
|
|
|
|
|
def _append_board(board_id, manifest_path):
|
|
|
|
config = PlatformBoardConfig(manifest_path)
|
|
|
|
if "platform" in config and config.get("platform") != self.name:
|
|
|
|
return
|
2017-11-07 00:10:48 +02:00
|
|
|
elif "platforms" in config \
|
|
|
|
and self.name not in config.get("platforms"):
|
2016-06-08 13:34:49 +03:00
|
|
|
return
|
|
|
|
config.manifest['platform'] = self.name
|
|
|
|
self._BOARDS_CACHE[board_id] = config
|
|
|
|
|
2016-10-31 17:41:01 +02:00
|
|
|
bdirs = [
|
|
|
|
util.get_projectboards_dir(),
|
|
|
|
join(util.get_home_dir(), "boards"),
|
|
|
|
join(self.get_dir(), "boards"),
|
|
|
|
]
|
|
|
|
|
2016-05-26 19:43:36 +03:00
|
|
|
if id_ is None:
|
2016-06-08 13:34:49 +03:00
|
|
|
for boards_dir in bdirs:
|
|
|
|
if not isdir(boards_dir):
|
2016-05-26 19:43:36 +03:00
|
|
|
continue
|
2016-06-08 13:34:49 +03:00
|
|
|
for item in sorted(os.listdir(boards_dir)):
|
|
|
|
_id = item[:-5]
|
|
|
|
if not item.endswith(".json") or _id in self._BOARDS_CACHE:
|
|
|
|
continue
|
|
|
|
_append_board(_id, join(boards_dir, item))
|
2016-05-26 19:43:36 +03:00
|
|
|
else:
|
|
|
|
if id_ not in self._BOARDS_CACHE:
|
2016-06-08 13:34:49 +03:00
|
|
|
for boards_dir in bdirs:
|
2016-06-08 14:00:40 +03:00
|
|
|
if not isdir(boards_dir):
|
|
|
|
continue
|
|
|
|
manifest_path = join(boards_dir, "%s.json" % id_)
|
2018-03-07 20:46:31 +02:00
|
|
|
if isfile(manifest_path):
|
|
|
|
_append_board(id_, manifest_path)
|
|
|
|
break
|
2016-06-08 13:34:49 +03:00
|
|
|
if id_ not in self._BOARDS_CACHE:
|
|
|
|
raise exception.UnknownBoard(id_)
|
2016-05-26 19:43:36 +03:00
|
|
|
return self._BOARDS_CACHE[id_] if id_ else self._BOARDS_CACHE
|
|
|
|
|
|
|
|
def board_config(self, id_):
|
|
|
|
return self.get_boards(id_)
|
|
|
|
|
|
|
|
def get_package_type(self, name):
|
2016-05-29 23:28:50 +03:00
|
|
|
return self.packages[name].get("type")
|
2014-07-30 23:39:01 +03:00
|
|
|
|
2016-05-10 15:36:00 +03:00
|
|
|
def configure_default_packages(self, variables, targets):
|
2016-09-26 22:15:08 +03:00
|
|
|
# enable used frameworks
|
2016-10-26 18:35:43 +03:00
|
|
|
frameworks = variables.get("pioframework", [])
|
|
|
|
if not isinstance(frameworks, list):
|
|
|
|
frameworks = frameworks.split(", ")
|
|
|
|
for framework in frameworks:
|
2016-07-06 15:27:46 +03:00
|
|
|
if not self.frameworks:
|
|
|
|
continue
|
2016-05-26 19:43:36 +03:00
|
|
|
framework = framework.lower().strip()
|
2016-05-29 23:28:50 +03:00
|
|
|
if not framework or framework not in self.frameworks:
|
2016-05-26 19:43:36 +03:00
|
|
|
continue
|
2016-12-15 13:05:55 +02:00
|
|
|
_pkg_name = self.frameworks[framework].get("package")
|
|
|
|
if _pkg_name:
|
|
|
|
self.packages[_pkg_name]['optional'] = False
|
2015-12-15 15:58:52 +02:00
|
|
|
|
|
|
|
# enable upload tools for upload targets
|
2015-12-15 20:25:59 +02:00
|
|
|
if any(["upload" in t for t in targets] + ["program" in targets]):
|
2018-10-27 20:51:55 +03:00
|
|
|
for name, opts in self.packages.items():
|
2018-07-30 14:26:24 +03:00
|
|
|
if opts.get("type") == "uploader":
|
|
|
|
self.packages[name]['optional'] = False
|
|
|
|
# skip all packages in "nobuild" mode
|
|
|
|
# allow only upload tools and frameworks
|
|
|
|
elif "nobuild" in targets and opts.get("type") != "framework":
|
|
|
|
self.packages[name]['optional'] = True
|
2015-12-15 15:58:52 +02:00
|
|
|
|
2017-01-15 00:12:41 +02:00
|
|
|
def get_lib_storages(self):
|
|
|
|
storages = []
|
2017-01-28 15:48:36 +02:00
|
|
|
for opts in (self.frameworks or {}).values():
|
2017-01-15 00:12:41 +02:00
|
|
|
if "package" not in opts:
|
|
|
|
continue
|
|
|
|
pkg_dir = self.get_package_dir(opts['package'])
|
|
|
|
if not pkg_dir or not isdir(join(pkg_dir, "libraries")):
|
|
|
|
continue
|
2017-01-28 15:48:36 +02:00
|
|
|
libs_dir = join(pkg_dir, "libraries")
|
|
|
|
storages.append({"name": opts['package'], "path": libs_dir})
|
|
|
|
libcores_dir = join(libs_dir, "__cores__")
|
|
|
|
if not isdir(libcores_dir):
|
|
|
|
continue
|
|
|
|
for item in os.listdir(libcores_dir):
|
|
|
|
libcore_dir = join(libcores_dir, item)
|
|
|
|
if not isdir(libcore_dir):
|
|
|
|
continue
|
|
|
|
storages.append({
|
2017-06-05 16:05:05 +03:00
|
|
|
"name":
|
|
|
|
"%s-core-%s" % (opts['package'], item),
|
|
|
|
"path":
|
|
|
|
libcore_dir
|
2017-01-28 15:48:36 +02:00
|
|
|
})
|
|
|
|
|
2017-01-15 00:12:41 +02:00
|
|
|
return storages
|
|
|
|
|
2016-05-26 19:43:36 +03:00
|
|
|
|
|
|
|
class PlatformBoardConfig(object):
|
|
|
|
|
|
|
|
def __init__(self, manifest_path):
|
2016-06-08 13:34:49 +03:00
|
|
|
self._id = basename(manifest_path)[:-5]
|
|
|
|
assert isfile(manifest_path)
|
2016-05-26 19:43:36 +03:00
|
|
|
self.manifest_path = manifest_path
|
2016-08-24 23:03:41 +03:00
|
|
|
try:
|
|
|
|
self._manifest = util.load_json(manifest_path)
|
|
|
|
except ValueError:
|
|
|
|
raise exception.InvalidBoardManifest(manifest_path)
|
2016-08-26 01:29:26 +03:00
|
|
|
if not set(["name", "url", "vendor"]) <= set(self._manifest.keys()):
|
|
|
|
raise exception.PlatformioException(
|
|
|
|
"Please specify name, url and vendor fields for " +
|
|
|
|
manifest_path)
|
2016-05-26 19:43:36 +03:00
|
|
|
|
|
|
|
def get(self, path, default=None):
|
|
|
|
try:
|
2016-05-29 23:28:50 +03:00
|
|
|
value = self._manifest
|
2016-05-26 19:43:36 +03:00
|
|
|
for k in path.split("."):
|
|
|
|
value = value[k]
|
|
|
|
return value
|
|
|
|
except KeyError:
|
|
|
|
if default is not None:
|
|
|
|
return default
|
|
|
|
else:
|
|
|
|
raise KeyError("Invalid board option '%s'" % path)
|
|
|
|
|
2018-05-25 21:18:08 +03:00
|
|
|
def update(self, path, value):
|
|
|
|
newdict = None
|
|
|
|
for key in path.split(".")[::-1]:
|
|
|
|
if newdict is None:
|
|
|
|
newdict = {key: value}
|
|
|
|
else:
|
|
|
|
newdict = {key: newdict}
|
|
|
|
util.merge_dicts(self._manifest, newdict)
|
|
|
|
|
2016-05-26 19:43:36 +03:00
|
|
|
def __contains__(self, key):
|
|
|
|
try:
|
|
|
|
self.get(key)
|
|
|
|
return True
|
|
|
|
except KeyError:
|
|
|
|
return False
|
|
|
|
|
2016-06-08 13:34:49 +03:00
|
|
|
@property
|
2016-09-13 17:36:20 +03:00
|
|
|
def id(self):
|
2016-06-08 13:34:49 +03:00
|
|
|
return self._id
|
|
|
|
|
2016-09-13 17:36:20 +03:00
|
|
|
@property
|
|
|
|
def id_(self):
|
|
|
|
return self.id
|
|
|
|
|
2016-06-08 13:34:49 +03:00
|
|
|
@property
|
|
|
|
def manifest(self):
|
2016-05-29 23:28:50 +03:00
|
|
|
return self._manifest
|
2016-06-08 13:34:49 +03:00
|
|
|
|
|
|
|
def get_brief_data(self):
|
|
|
|
return {
|
2017-09-17 01:33:04 +03:00
|
|
|
"id":
|
|
|
|
self.id,
|
|
|
|
"name":
|
|
|
|
self._manifest['name'],
|
|
|
|
"platform":
|
|
|
|
self._manifest.get("platform"),
|
|
|
|
"mcu":
|
|
|
|
self._manifest.get("build", {}).get("mcu", "").upper(),
|
2016-08-11 19:20:54 +03:00
|
|
|
"fcpu":
|
2018-01-25 17:58:52 +02:00
|
|
|
int("".join([
|
|
|
|
c for c in str(
|
|
|
|
self._manifest.get("build", {}).get("f_cpu", "0L"))
|
|
|
|
if c.isdigit()
|
|
|
|
])),
|
2017-09-17 01:33:04 +03:00
|
|
|
"ram":
|
|
|
|
self._manifest.get("upload", {}).get("maximum_ram_size", 0),
|
|
|
|
"rom":
|
|
|
|
self._manifest.get("upload", {}).get("maximum_size", 0),
|
|
|
|
"connectivity":
|
|
|
|
self._manifest.get("connectivity"),
|
|
|
|
"frameworks":
|
|
|
|
self._manifest.get("frameworks"),
|
|
|
|
"debug":
|
|
|
|
self.get_debug_data(),
|
|
|
|
"vendor":
|
|
|
|
self._manifest['vendor'],
|
|
|
|
"url":
|
|
|
|
self._manifest['url']
|
2016-06-08 13:34:49 +03:00
|
|
|
}
|
2017-04-28 01:38:25 +03:00
|
|
|
|
|
|
|
def get_debug_data(self):
|
|
|
|
if not self._manifest.get("debug", {}).get("tools"):
|
2017-12-15 22:16:37 +02:00
|
|
|
return None
|
2017-04-28 01:38:25 +03:00
|
|
|
tools = {}
|
|
|
|
for name, options in self._manifest['debug']['tools'].items():
|
|
|
|
tools[name] = {}
|
|
|
|
for key, value in options.items():
|
|
|
|
if key in ("default", "onboard"):
|
|
|
|
tools[name][key] = value
|
|
|
|
return {"tools": tools}
|
2018-02-09 21:47:59 +02:00
|
|
|
|
|
|
|
def get_debug_tool_name(self, custom=None):
|
|
|
|
debug_tools = self._manifest.get("debug", {}).get("tools")
|
|
|
|
tool_name = custom
|
|
|
|
if tool_name == "custom":
|
|
|
|
return tool_name
|
|
|
|
if not debug_tools:
|
|
|
|
raise exception.DebugSupportError(self._manifest['name'])
|
|
|
|
if tool_name:
|
|
|
|
if tool_name in debug_tools:
|
|
|
|
return tool_name
|
|
|
|
raise exception.DebugInvalidOptions(
|
|
|
|
"Unknown debug tool `%s`. Please use one of `%s` or `custom`" %
|
|
|
|
(tool_name, ", ".join(sorted(debug_tools.keys()))))
|
|
|
|
|
|
|
|
# automatically select best tool
|
2018-02-10 18:09:23 +02:00
|
|
|
data = {"default": [], "onboard": [], "external": []}
|
2018-02-09 21:47:59 +02:00
|
|
|
for key, value in debug_tools.items():
|
|
|
|
if value.get("default"):
|
2018-02-10 18:09:23 +02:00
|
|
|
data['default'].append(key)
|
2018-02-09 21:47:59 +02:00
|
|
|
elif value.get("onboard"):
|
2018-02-10 18:09:23 +02:00
|
|
|
data['onboard'].append(key)
|
|
|
|
data['external'].append(key)
|
2018-02-09 21:47:59 +02:00
|
|
|
|
2018-02-10 18:09:23 +02:00
|
|
|
for key, value in data.items():
|
|
|
|
if not value:
|
|
|
|
continue
|
|
|
|
return sorted(value)[0]
|
|
|
|
|
|
|
|
assert any(item for item in data)
|