2017-06-05 16:02:39 +03:00
|
|
|
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
|
2016-06-22 21:25:44 +03:00
|
|
|
#
|
|
|
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
|
|
|
# you may not use this file except in compliance with the License.
|
|
|
|
# You may obtain a copy of the License at
|
|
|
|
#
|
|
|
|
# http://www.apache.org/licenses/LICENSE-2.0
|
|
|
|
#
|
|
|
|
# Unless required by applicable law or agreed to in writing, software
|
|
|
|
# distributed under the License is distributed on an "AS IS" BASIS,
|
|
|
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
|
|
# See the License for the specific language governing permissions and
|
|
|
|
# limitations under the License.
|
|
|
|
|
2016-07-15 23:51:33 +03:00
|
|
|
# pylint: disable=no-member, no-self-use, unused-argument
|
2016-11-28 23:24:21 +02:00
|
|
|
# pylint: disable=too-many-instance-attributes, too-many-public-methods
|
2016-06-25 13:23:24 +03:00
|
|
|
|
2016-06-22 21:25:44 +03:00
|
|
|
from __future__ import absolute_import
|
|
|
|
|
2017-12-28 00:19:17 +02:00
|
|
|
import hashlib
|
2016-06-22 21:25:44 +03:00
|
|
|
import os
|
2016-07-24 18:17:23 +03:00
|
|
|
import sys
|
2018-01-18 14:49:01 +02:00
|
|
|
from glob import glob
|
2017-11-25 19:22:00 +02:00
|
|
|
from os.path import (basename, commonprefix, dirname, isdir, isfile, join,
|
|
|
|
realpath, sep)
|
2016-06-22 21:25:44 +03:00
|
|
|
|
|
|
|
import SCons.Scanner
|
2017-12-16 15:38:13 +02:00
|
|
|
from SCons.Script import ARGUMENTS, COMMAND_LINE_TARGETS, DefaultEnvironment
|
2016-06-22 21:25:44 +03:00
|
|
|
|
2018-04-15 06:08:29 +03:00
|
|
|
from platformio import exception, util
|
2016-07-13 13:24:44 +03:00
|
|
|
from platformio.builder.tools import platformio as piotool
|
2016-08-09 14:09:21 +03:00
|
|
|
from platformio.managers.lib import LibraryManager
|
2018-01-18 14:49:01 +02:00
|
|
|
from platformio.managers.package import PackageManager
|
2016-06-22 21:25:44 +03:00
|
|
|
|
|
|
|
|
|
|
|
class LibBuilderFactory(object):
|
|
|
|
|
|
|
|
@staticmethod
|
2016-11-23 23:47:09 +02:00
|
|
|
def new(env, path, verbose=False):
|
2016-06-22 21:25:44 +03:00
|
|
|
clsname = "UnknownLibBuilder"
|
2016-08-31 18:52:27 +03:00
|
|
|
if isfile(join(path, "library.json")):
|
2016-06-22 21:25:44 +03:00
|
|
|
clsname = "PlatformIOLibBuilder"
|
|
|
|
else:
|
|
|
|
used_frameworks = LibBuilderFactory.get_used_frameworks(env, path)
|
2017-06-05 16:05:05 +03:00
|
|
|
common_frameworks = (
|
|
|
|
set(env.get("PIOFRAMEWORK", [])) & set(used_frameworks))
|
2016-06-22 21:25:44 +03:00
|
|
|
if common_frameworks:
|
|
|
|
clsname = "%sLibBuilder" % list(common_frameworks)[0].title()
|
|
|
|
elif used_frameworks:
|
|
|
|
clsname = "%sLibBuilder" % used_frameworks[0].title()
|
|
|
|
|
2017-03-02 17:09:22 +02:00
|
|
|
obj = getattr(sys.modules[__name__], clsname)(
|
|
|
|
env, path, verbose=verbose)
|
2016-06-22 21:25:44 +03:00
|
|
|
assert isinstance(obj, LibBuilderBase)
|
|
|
|
return obj
|
|
|
|
|
|
|
|
@staticmethod
|
|
|
|
def get_used_frameworks(env, path):
|
2018-01-18 22:04:43 +02:00
|
|
|
if any(
|
2016-10-31 20:05:34 +02:00
|
|
|
isfile(join(path, fname))
|
2018-01-18 22:04:43 +02:00
|
|
|
for fname in ("library.properties", "keywords.txt")):
|
2016-06-22 21:25:44 +03:00
|
|
|
return ["arduino"]
|
|
|
|
|
|
|
|
if isfile(join(path, "module.json")):
|
|
|
|
return ["mbed"]
|
|
|
|
|
|
|
|
# check source files
|
|
|
|
for root, _, files in os.walk(path, followlinks=True):
|
|
|
|
for fname in files:
|
2017-04-15 16:51:15 +03:00
|
|
|
if not env.IsFileWithExt(
|
|
|
|
fname, piotool.SRC_BUILD_EXT + piotool.SRC_HEADER_EXT):
|
2016-06-22 21:25:44 +03:00
|
|
|
continue
|
|
|
|
with open(join(root, fname)) as f:
|
|
|
|
content = f.read()
|
|
|
|
if "Arduino.h" in content:
|
|
|
|
return ["arduino"]
|
|
|
|
elif "mbed.h" in content:
|
|
|
|
return ["mbed"]
|
|
|
|
return []
|
|
|
|
|
2016-10-31 20:05:34 +02:00
|
|
|
|
2016-08-31 18:52:27 +03:00
|
|
|
class LibBuilderBase(object):
|
2016-06-22 21:25:44 +03:00
|
|
|
|
2018-01-24 18:33:41 +02:00
|
|
|
IS_WINDOWS = "windows" in util.get_systype()
|
|
|
|
|
2016-12-01 17:37:12 +02:00
|
|
|
LDF_MODES = ["off", "chain", "deep", "chain+", "deep+"]
|
2016-12-07 23:49:38 +02:00
|
|
|
LDF_MODE_DEFAULT = "chain"
|
2016-11-29 23:44:54 +02:00
|
|
|
|
2018-05-25 01:46:53 +03:00
|
|
|
COMPAT_MODES = ["off", "soft", "strict"]
|
|
|
|
COMPAT_MODE_DEFAULT = "soft"
|
2017-11-25 21:51:16 +02:00
|
|
|
|
2016-11-28 23:24:21 +02:00
|
|
|
CLASSIC_SCANNER = SCons.Scanner.C.CScanner()
|
2018-01-26 20:04:43 +02:00
|
|
|
CCONDITIONAL_SCANNER = SCons.Scanner.C.CConditionalScanner()
|
2018-03-08 16:29:09 +02:00
|
|
|
# Max depth of nested includes:
|
|
|
|
# -1 = unlimited
|
|
|
|
# 0 - disabled nesting
|
|
|
|
# >0 - number of allowed nested includes
|
|
|
|
CCONDITIONAL_SCANNER_DEPTH = 99
|
2017-11-26 20:17:38 +02:00
|
|
|
PARSE_SRC_BY_H_NAME = True
|
2017-12-28 00:19:17 +02:00
|
|
|
|
2017-12-16 15:38:13 +02:00
|
|
|
_INCLUDE_DIRS_CACHE = None
|
2017-11-26 20:17:38 +02:00
|
|
|
|
2016-11-23 23:47:09 +02:00
|
|
|
def __init__(self, env, path, manifest=None, verbose=False):
|
2016-07-28 01:54:09 +03:00
|
|
|
self.env = env.Clone()
|
2016-07-28 22:15:03 +03:00
|
|
|
self.envorigin = env.Clone()
|
2016-08-16 13:02:57 +03:00
|
|
|
self.path = realpath(env.subst(path))
|
2016-11-23 23:47:09 +02:00
|
|
|
self.verbose = verbose
|
2016-12-01 17:37:12 +02:00
|
|
|
|
2016-08-16 17:27:11 +03:00
|
|
|
self._manifest = manifest if manifest else self.load_manifest()
|
2016-07-28 01:54:09 +03:00
|
|
|
self._is_dependent = False
|
2016-11-23 23:47:09 +02:00
|
|
|
self._is_built = False
|
2016-09-01 00:06:34 +03:00
|
|
|
self._depbuilders = list()
|
2016-11-23 23:47:09 +02:00
|
|
|
self._circular_deps = list()
|
2017-12-16 15:38:13 +02:00
|
|
|
self._processed_files = list()
|
2016-07-28 01:54:09 +03:00
|
|
|
|
2017-04-14 23:16:52 +03:00
|
|
|
# reset source filter, could be overridden with extra script
|
|
|
|
self.env['SRC_FILTER'] = ""
|
|
|
|
|
2016-07-28 01:54:09 +03:00
|
|
|
# process extra options and append to build environment
|
|
|
|
self.process_extra_options()
|
2016-06-22 21:25:44 +03:00
|
|
|
|
|
|
|
def __repr__(self):
|
|
|
|
return "%s(%r)" % (self.__class__, self.path)
|
|
|
|
|
|
|
|
def __contains__(self, path):
|
2016-08-11 12:41:03 +03:00
|
|
|
p1 = self.path
|
|
|
|
p2 = path
|
2018-01-24 18:33:41 +02:00
|
|
|
if self.IS_WINDOWS:
|
2016-08-11 12:41:03 +03:00
|
|
|
p1 = p1.lower()
|
|
|
|
p2 = p2.lower()
|
|
|
|
return commonprefix((p1 + sep, p2)) == p1 + sep
|
2016-06-22 21:25:44 +03:00
|
|
|
|
|
|
|
@property
|
|
|
|
def name(self):
|
2016-06-25 13:23:24 +03:00
|
|
|
return self._manifest.get("name", basename(self.path))
|
|
|
|
|
|
|
|
@property
|
|
|
|
def version(self):
|
|
|
|
return self._manifest.get("version")
|
2016-06-22 21:25:44 +03:00
|
|
|
|
2018-01-18 14:49:01 +02:00
|
|
|
@property
|
|
|
|
def vcs_info(self):
|
|
|
|
items = glob(join(self.path, ".*", PackageManager.SRC_MANIFEST_NAME))
|
|
|
|
if not items:
|
|
|
|
return None
|
|
|
|
return util.load_json(items[0])
|
|
|
|
|
2016-07-31 00:00:58 +03:00
|
|
|
@property
|
|
|
|
def dependencies(self):
|
2016-08-09 14:09:21 +03:00
|
|
|
return LibraryManager.normalize_dependencies(
|
|
|
|
self._manifest.get("dependencies", []))
|
2016-07-31 00:00:58 +03:00
|
|
|
|
2016-06-22 21:25:44 +03:00
|
|
|
@property
|
|
|
|
def src_filter(self):
|
2016-07-13 13:24:44 +03:00
|
|
|
return piotool.SRC_FILTER_DEFAULT + [
|
2017-06-05 16:05:05 +03:00
|
|
|
"-<example%s>" % os.sep,
|
|
|
|
"-<examples%s>" % os.sep,
|
|
|
|
"-<test%s>" % os.sep,
|
|
|
|
"-<tests%s>" % os.sep
|
2016-07-13 13:24:44 +03:00
|
|
|
]
|
2016-06-22 21:25:44 +03:00
|
|
|
|
2017-12-16 15:38:13 +02:00
|
|
|
@property
|
|
|
|
def include_dir(self):
|
2018-01-18 22:04:43 +02:00
|
|
|
if not all(isdir(join(self.path, d)) for d in ("include", "src")):
|
2017-12-16 15:38:13 +02:00
|
|
|
return None
|
|
|
|
return join(self.path, "include")
|
|
|
|
|
2016-06-22 21:25:44 +03:00
|
|
|
@property
|
|
|
|
def src_dir(self):
|
2016-07-28 01:54:09 +03:00
|
|
|
return (join(self.path, "src")
|
|
|
|
if isdir(join(self.path, "src")) else self.path)
|
2016-06-22 21:25:44 +03:00
|
|
|
|
2017-12-16 15:38:13 +02:00
|
|
|
def get_include_dirs(self):
|
|
|
|
items = [self.src_dir]
|
|
|
|
include_dir = self.include_dir
|
|
|
|
if include_dir and include_dir not in items:
|
|
|
|
items.append(include_dir)
|
|
|
|
return items
|
|
|
|
|
2016-06-22 21:25:44 +03:00
|
|
|
@property
|
|
|
|
def build_dir(self):
|
2017-12-28 00:19:17 +02:00
|
|
|
return join("$BUILD_DIR",
|
|
|
|
"lib%s" % hashlib.sha1(self.path).hexdigest()[:3],
|
|
|
|
basename(self.path))
|
2016-06-22 21:25:44 +03:00
|
|
|
|
2016-07-15 16:12:07 +03:00
|
|
|
@property
|
|
|
|
def build_flags(self):
|
|
|
|
return None
|
|
|
|
|
|
|
|
@property
|
|
|
|
def build_unflags(self):
|
|
|
|
return None
|
|
|
|
|
|
|
|
@property
|
|
|
|
def extra_script(self):
|
|
|
|
return None
|
|
|
|
|
2016-07-24 20:04:09 +03:00
|
|
|
@property
|
|
|
|
def lib_archive(self):
|
2017-08-02 01:06:06 +03:00
|
|
|
return self.env.get("LIB_ARCHIVE", "") != "false"
|
2016-07-24 20:04:09 +03:00
|
|
|
|
2016-08-31 18:52:27 +03:00
|
|
|
@property
|
|
|
|
def lib_ldf_mode(self):
|
2017-11-25 21:51:16 +02:00
|
|
|
return self.validate_ldf_mode(
|
|
|
|
self.env.get("LIB_LDF_MODE", self.LDF_MODE_DEFAULT))
|
|
|
|
|
|
|
|
@property
|
|
|
|
def lib_compat_mode(self):
|
|
|
|
return self.validate_compat_mode(
|
|
|
|
self.env.get("LIB_COMPAT_MODE", self.COMPAT_MODE_DEFAULT))
|
2016-08-31 18:52:27 +03:00
|
|
|
|
2016-06-22 21:25:44 +03:00
|
|
|
@property
|
2016-07-31 00:00:58 +03:00
|
|
|
def depbuilders(self):
|
|
|
|
return self._depbuilders
|
2016-07-28 01:54:09 +03:00
|
|
|
|
|
|
|
@property
|
|
|
|
def dependent(self):
|
|
|
|
return self._is_dependent
|
2016-06-22 21:25:44 +03:00
|
|
|
|
2016-12-01 17:37:12 +02:00
|
|
|
@property
|
|
|
|
def is_built(self):
|
|
|
|
return self._is_built
|
|
|
|
|
2017-11-25 21:51:16 +02:00
|
|
|
@staticmethod
|
|
|
|
def validate_ldf_mode(mode):
|
|
|
|
if isinstance(mode, basestring):
|
|
|
|
mode = mode.strip().lower()
|
|
|
|
if mode in LibBuilderBase.LDF_MODES:
|
|
|
|
return mode
|
|
|
|
try:
|
|
|
|
return LibBuilderBase.LDF_MODES[int(mode)]
|
|
|
|
except (IndexError, ValueError):
|
|
|
|
pass
|
|
|
|
return LibBuilderBase.LDF_MODE_DEFAULT
|
|
|
|
|
|
|
|
@staticmethod
|
|
|
|
def validate_compat_mode(mode):
|
2018-01-26 19:53:07 +02:00
|
|
|
if isinstance(mode, basestring):
|
|
|
|
mode = mode.strip().lower()
|
|
|
|
if mode in LibBuilderBase.COMPAT_MODES:
|
2017-11-25 21:51:16 +02:00
|
|
|
return mode
|
2018-01-26 19:53:07 +02:00
|
|
|
try:
|
|
|
|
return LibBuilderBase.COMPAT_MODES[int(mode)]
|
|
|
|
except (IndexError, ValueError):
|
|
|
|
pass
|
|
|
|
return LibBuilderBase.COMPAT_MODE_DEFAULT
|
2017-11-25 21:51:16 +02:00
|
|
|
|
2016-10-26 18:35:43 +03:00
|
|
|
def is_platforms_compatible(self, platforms):
|
2016-07-15 23:06:10 +03:00
|
|
|
return True
|
|
|
|
|
2016-10-26 18:35:43 +03:00
|
|
|
def is_frameworks_compatible(self, frameworks):
|
2016-07-15 23:06:10 +03:00
|
|
|
return True
|
|
|
|
|
2016-07-15 23:51:33 +03:00
|
|
|
def load_manifest(self):
|
2016-06-25 13:23:24 +03:00
|
|
|
return {}
|
|
|
|
|
2016-07-28 01:54:09 +03:00
|
|
|
def process_extra_options(self):
|
|
|
|
with util.cd(self.path):
|
|
|
|
self.env.ProcessUnFlags(self.build_unflags)
|
|
|
|
self.env.ProcessFlags(self.build_flags)
|
|
|
|
if self.extra_script:
|
2017-04-14 23:16:52 +03:00
|
|
|
self.env.SConscriptChdir(1)
|
2016-07-28 22:15:03 +03:00
|
|
|
self.env.SConscript(
|
|
|
|
realpath(self.extra_script),
|
2017-11-01 14:10:32 +02:00
|
|
|
exports={
|
|
|
|
"env": self.env,
|
|
|
|
"pio_lib_builder": self
|
|
|
|
})
|
2016-07-28 01:54:09 +03:00
|
|
|
|
2017-11-25 19:22:00 +02:00
|
|
|
def process_dependencies(self):
|
2016-08-01 00:32:38 +03:00
|
|
|
if not self.dependencies:
|
|
|
|
return
|
|
|
|
for item in self.dependencies:
|
2016-10-26 18:35:43 +03:00
|
|
|
skip = False
|
|
|
|
for key in ("platforms", "frameworks"):
|
2016-11-02 18:24:52 +02:00
|
|
|
env_key = "PIO" + key.upper()[:-1]
|
|
|
|
if env_key not in self.env:
|
|
|
|
continue
|
|
|
|
if (key in item and
|
2018-01-13 19:44:05 +02:00
|
|
|
not util.items_in_list(self.env[env_key], item[key])):
|
2016-11-23 23:47:09 +02:00
|
|
|
if self.verbose:
|
2018-06-08 21:37:57 +03:00
|
|
|
sys.stderr.write("Skip %s incompatible dependency %s\n"
|
|
|
|
% (key[:-1], item))
|
2016-10-26 18:35:43 +03:00
|
|
|
skip = True
|
|
|
|
if skip:
|
|
|
|
continue
|
|
|
|
|
2016-08-01 00:32:38 +03:00
|
|
|
found = False
|
2017-11-25 21:51:16 +02:00
|
|
|
for lb in self.env.GetLibBuilders():
|
2016-08-01 00:32:38 +03:00
|
|
|
if item['name'] != lb.name:
|
|
|
|
continue
|
|
|
|
elif "frameworks" in item and \
|
2016-10-26 18:35:43 +03:00
|
|
|
not lb.is_frameworks_compatible(item["frameworks"]):
|
2016-08-01 00:32:38 +03:00
|
|
|
continue
|
|
|
|
elif "platforms" in item and \
|
2016-10-26 18:35:43 +03:00
|
|
|
not lb.is_platforms_compatible(item["platforms"]):
|
2016-08-01 00:32:38 +03:00
|
|
|
continue
|
|
|
|
found = True
|
2016-12-01 17:37:12 +02:00
|
|
|
self.depend_recursive(lb)
|
2016-08-01 00:32:38 +03:00
|
|
|
break
|
|
|
|
|
|
|
|
if not found:
|
|
|
|
sys.stderr.write(
|
|
|
|
"Error: Could not find `%s` dependency for `%s` "
|
|
|
|
"library\n" % (item['name'], self.name))
|
2016-08-26 14:39:23 +03:00
|
|
|
self.env.Exit(1)
|
2016-06-22 21:25:44 +03:00
|
|
|
|
2017-12-16 15:38:13 +02:00
|
|
|
def get_search_files(self):
|
|
|
|
items = [
|
2018-01-16 00:06:24 +02:00
|
|
|
join(self.src_dir, item) for item in self.env.MatchSourceFiles(
|
|
|
|
self.src_dir, self.src_filter)
|
2017-12-16 15:38:13 +02:00
|
|
|
]
|
|
|
|
include_dir = self.include_dir
|
|
|
|
if include_dir:
|
|
|
|
items.extend([
|
|
|
|
join(include_dir, item)
|
|
|
|
for item in self.env.MatchSourceFiles(include_dir)
|
|
|
|
])
|
|
|
|
return items
|
|
|
|
|
|
|
|
def _validate_search_files(self, search_files=None):
|
|
|
|
if not search_files:
|
|
|
|
search_files = []
|
|
|
|
assert isinstance(search_files, list)
|
2016-07-28 01:54:09 +03:00
|
|
|
|
2017-12-16 15:38:13 +02:00
|
|
|
_search_files = []
|
|
|
|
for path in search_files:
|
|
|
|
if path not in self._processed_files:
|
|
|
|
_search_files.append(path)
|
|
|
|
self._processed_files.append(path)
|
2016-07-28 01:54:09 +03:00
|
|
|
|
2017-12-16 15:38:13 +02:00
|
|
|
return _search_files
|
2016-07-28 01:54:09 +03:00
|
|
|
|
2017-12-16 15:38:13 +02:00
|
|
|
def _get_found_includes(self, search_files=None):
|
2016-09-01 00:06:34 +03:00
|
|
|
# all include directories
|
2017-12-16 15:38:13 +02:00
|
|
|
if not LibBuilderBase._INCLUDE_DIRS_CACHE:
|
|
|
|
LibBuilderBase._INCLUDE_DIRS_CACHE = []
|
2017-11-25 21:51:16 +02:00
|
|
|
for lb in self.env.GetLibBuilders():
|
2017-12-16 15:38:13 +02:00
|
|
|
LibBuilderBase._INCLUDE_DIRS_CACHE.extend(
|
|
|
|
[self.env.Dir(d) for d in lb.get_include_dirs()])
|
2016-09-01 00:06:34 +03:00
|
|
|
|
|
|
|
# append self include directories
|
2017-12-16 15:38:13 +02:00
|
|
|
include_dirs = [self.env.Dir(d) for d in self.get_include_dirs()]
|
|
|
|
include_dirs.extend(LibBuilderBase._INCLUDE_DIRS_CACHE)
|
2016-09-01 00:06:34 +03:00
|
|
|
|
|
|
|
result = []
|
2017-12-16 15:38:13 +02:00
|
|
|
for path in self._validate_search_files(search_files):
|
2016-11-28 23:24:21 +02:00
|
|
|
try:
|
2016-12-02 00:52:01 +02:00
|
|
|
assert "+" in self.lib_ldf_mode
|
2018-11-21 00:52:34 +02:00
|
|
|
candidates = LibBuilderBase.CCONDITIONAL_SCANNER(
|
2018-03-08 16:29:09 +02:00
|
|
|
self.env.File(path),
|
|
|
|
self.env,
|
|
|
|
tuple(include_dirs),
|
|
|
|
depth=self.CCONDITIONAL_SCANNER_DEPTH)
|
2016-12-02 00:52:01 +02:00
|
|
|
except Exception as e: # pylint: disable=broad-except
|
|
|
|
if self.verbose and "+" in self.lib_ldf_mode:
|
|
|
|
sys.stderr.write(
|
|
|
|
"Warning! Classic Pre Processor is used for `%s`, "
|
|
|
|
"advanced has failed with `%s`\n" % (path, e))
|
2018-11-21 00:52:34 +02:00
|
|
|
candidates = LibBuilderBase.CLASSIC_SCANNER(
|
2018-01-26 20:04:43 +02:00
|
|
|
self.env.File(path), self.env, tuple(include_dirs))
|
2018-11-21 00:52:34 +02:00
|
|
|
|
|
|
|
# print(path, map(lambda n: n.get_abspath(), candidates))
|
|
|
|
for item in candidates:
|
|
|
|
if item not in result:
|
|
|
|
result.append(item)
|
|
|
|
if not self.PARSE_SRC_BY_H_NAME:
|
|
|
|
continue
|
|
|
|
_h_path = item.get_abspath()
|
|
|
|
if not self.env.IsFileWithExt(_h_path, piotool.SRC_HEADER_EXT):
|
|
|
|
continue
|
|
|
|
_f_part = _h_path[:_h_path.rindex(".")]
|
|
|
|
for ext in piotool.SRC_C_EXT:
|
|
|
|
if not isfile("%s.%s" % (_f_part, ext)):
|
2017-11-26 18:51:08 +02:00
|
|
|
continue
|
2018-11-21 00:52:34 +02:00
|
|
|
_c_path = self.env.File("%s.%s" % (_f_part, ext))
|
|
|
|
if _c_path not in result:
|
|
|
|
result.append(_c_path)
|
|
|
|
|
2016-07-28 01:54:09 +03:00
|
|
|
return result
|
|
|
|
|
2017-12-16 15:38:13 +02:00
|
|
|
def depend_recursive(self, lb, search_files=None):
|
2016-11-23 23:47:09 +02:00
|
|
|
|
|
|
|
def _already_depends(_lb):
|
|
|
|
if self in _lb.depbuilders:
|
|
|
|
return True
|
|
|
|
for __lb in _lb.depbuilders:
|
|
|
|
if _already_depends(__lb):
|
|
|
|
return True
|
|
|
|
return False
|
|
|
|
|
2016-08-16 17:27:11 +03:00
|
|
|
# assert isinstance(lb, LibBuilderBase)
|
2016-07-31 00:00:58 +03:00
|
|
|
if self != lb:
|
2016-11-23 23:47:09 +02:00
|
|
|
if _already_depends(lb):
|
|
|
|
if self.verbose:
|
2018-06-08 21:37:57 +03:00
|
|
|
sys.stderr.write(
|
|
|
|
"Warning! Circular dependencies detected "
|
|
|
|
"between `%s` and `%s`\n" % (self.path, lb.path))
|
2016-11-23 23:47:09 +02:00
|
|
|
self._circular_deps.append(lb)
|
2016-07-31 00:00:58 +03:00
|
|
|
elif lb not in self._depbuilders:
|
2016-09-01 00:06:34 +03:00
|
|
|
self._depbuilders.append(lb)
|
2017-12-16 15:38:13 +02:00
|
|
|
LibBuilderBase._INCLUDE_DIRS_CACHE = None
|
|
|
|
lb.search_deps_recursive(search_files)
|
2016-07-31 00:00:58 +03:00
|
|
|
|
2017-12-16 15:38:13 +02:00
|
|
|
def search_deps_recursive(self, search_files=None):
|
2016-09-01 00:06:34 +03:00
|
|
|
if not self._is_dependent:
|
|
|
|
self._is_dependent = True
|
2017-11-25 19:22:00 +02:00
|
|
|
self.process_dependencies()
|
2016-07-31 00:00:58 +03:00
|
|
|
|
2016-12-01 17:37:12 +02:00
|
|
|
if self.lib_ldf_mode.startswith("deep"):
|
2017-12-16 15:38:13 +02:00
|
|
|
search_files = self.get_search_files()
|
2016-08-01 00:14:22 +03:00
|
|
|
|
|
|
|
# when LDF is disabled
|
2016-12-01 17:37:12 +02:00
|
|
|
if self.lib_ldf_mode == "off":
|
2016-07-31 00:00:58 +03:00
|
|
|
return
|
|
|
|
|
2016-07-28 01:54:09 +03:00
|
|
|
lib_inc_map = {}
|
2017-12-16 15:38:13 +02:00
|
|
|
for inc in self._get_found_includes(search_files):
|
2017-11-25 21:51:16 +02:00
|
|
|
for lb in self.env.GetLibBuilders():
|
2016-07-28 01:54:09 +03:00
|
|
|
if inc.get_abspath() in lb:
|
|
|
|
if lb not in lib_inc_map:
|
2016-09-01 00:06:34 +03:00
|
|
|
lib_inc_map[lb] = []
|
|
|
|
lib_inc_map[lb].append(inc.get_abspath())
|
2016-07-28 01:54:09 +03:00
|
|
|
break
|
|
|
|
|
2017-12-16 15:38:13 +02:00
|
|
|
for lb, lb_search_files in lib_inc_map.items():
|
|
|
|
self.depend_recursive(lb, lb_search_files)
|
2016-06-25 13:23:24 +03:00
|
|
|
|
2016-06-22 21:25:44 +03:00
|
|
|
def build(self):
|
2016-07-28 01:54:09 +03:00
|
|
|
libs = []
|
2016-11-23 23:47:09 +02:00
|
|
|
for lb in self._depbuilders:
|
2016-07-28 01:54:09 +03:00
|
|
|
libs.extend(lb.build())
|
|
|
|
# copy shared information to self env
|
|
|
|
for key in ("CPPPATH", "LIBPATH", "LIBS", "LINKFLAGS"):
|
2018-11-19 17:45:53 +02:00
|
|
|
self.env.PrependUnique(**{key: lb.env.get(key)})
|
2016-07-15 16:12:07 +03:00
|
|
|
|
2016-11-23 23:47:09 +02:00
|
|
|
for lb in self._circular_deps:
|
2018-11-19 17:45:53 +02:00
|
|
|
self.env.PrependUnique(CPPPATH=lb.get_include_dirs())
|
2016-11-23 23:47:09 +02:00
|
|
|
|
2017-08-01 14:45:42 +03:00
|
|
|
if self._is_built:
|
|
|
|
return libs
|
|
|
|
self._is_built = True
|
2016-12-01 17:37:12 +02:00
|
|
|
|
2018-11-19 17:45:53 +02:00
|
|
|
self.env.PrependUnique(CPPPATH=self.get_include_dirs())
|
2016-12-01 17:37:12 +02:00
|
|
|
|
2017-08-01 14:45:42 +03:00
|
|
|
if self.lib_ldf_mode == "off":
|
2017-11-25 21:51:16 +02:00
|
|
|
for lb in self.env.GetLibBuilders():
|
2017-08-01 14:45:42 +03:00
|
|
|
if self == lb or not lb.is_built:
|
|
|
|
continue
|
|
|
|
for key in ("CPPPATH", "LIBPATH", "LIBS", "LINKFLAGS"):
|
2018-11-19 17:45:53 +02:00
|
|
|
self.env.PrependUnique(**{key: lb.env.get(key)})
|
2017-08-01 14:45:42 +03:00
|
|
|
|
|
|
|
if self.lib_archive:
|
|
|
|
libs.append(
|
|
|
|
self.env.BuildLibrary(self.build_dir, self.src_dir,
|
|
|
|
self.src_filter))
|
|
|
|
else:
|
|
|
|
self.env.BuildSources(self.build_dir, self.src_dir,
|
|
|
|
self.src_filter)
|
2016-09-07 19:05:15 +03:00
|
|
|
return libs
|
2016-06-22 21:25:44 +03:00
|
|
|
|
|
|
|
|
|
|
|
class UnknownLibBuilder(LibBuilderBase):
|
|
|
|
pass
|
|
|
|
|
|
|
|
|
|
|
|
class ArduinoLibBuilder(LibBuilderBase):
|
|
|
|
|
2016-06-25 13:23:24 +03:00
|
|
|
def load_manifest(self):
|
|
|
|
manifest = {}
|
|
|
|
if not isfile(join(self.path, "library.properties")):
|
|
|
|
return manifest
|
|
|
|
with open(join(self.path, "library.properties")) as fp:
|
|
|
|
for line in fp.readlines():
|
|
|
|
if "=" not in line:
|
|
|
|
continue
|
|
|
|
key, value = line.split("=", 1)
|
|
|
|
manifest[key.strip()] = value.strip()
|
|
|
|
return manifest
|
|
|
|
|
2017-12-16 15:38:13 +02:00
|
|
|
def get_include_dirs(self):
|
|
|
|
include_dirs = LibBuilderBase.get_include_dirs(self)
|
2016-08-31 18:52:27 +03:00
|
|
|
if isdir(join(self.path, "src")):
|
2017-12-16 15:38:13 +02:00
|
|
|
return include_dirs
|
2016-08-31 18:52:27 +03:00
|
|
|
if isdir(join(self.path, "utility")):
|
2017-12-16 15:38:13 +02:00
|
|
|
include_dirs.append(join(self.path, "utility"))
|
|
|
|
return include_dirs
|
2016-06-22 21:25:44 +03:00
|
|
|
|
2016-07-13 13:24:44 +03:00
|
|
|
@property
|
|
|
|
def src_filter(self):
|
|
|
|
if isdir(join(self.path, "src")):
|
|
|
|
return LibBuilderBase.src_filter.fget(self)
|
2016-08-26 18:52:44 +03:00
|
|
|
src_filter = []
|
|
|
|
is_utility = isdir(join(self.path, "utility"))
|
|
|
|
for ext in piotool.SRC_BUILD_EXT + piotool.SRC_HEADER_EXT:
|
|
|
|
src_filter.append("+<*.%s>" % ext)
|
|
|
|
if is_utility:
|
|
|
|
src_filter.append("+<utility%s*.%s>" % (sep, ext))
|
2016-08-23 13:40:32 +03:00
|
|
|
return src_filter
|
2016-07-13 13:24:44 +03:00
|
|
|
|
2016-10-26 18:35:43 +03:00
|
|
|
def is_frameworks_compatible(self, frameworks):
|
2018-01-13 19:44:05 +02:00
|
|
|
return util.items_in_list(frameworks, ["arduino", "energia"])
|
2016-07-15 23:06:10 +03:00
|
|
|
|
2018-06-12 13:47:16 +03:00
|
|
|
def is_platforms_compatible(self, platforms):
|
|
|
|
platforms_map = {
|
|
|
|
"avr": "atmelavr",
|
|
|
|
"sam": "atmelsam",
|
|
|
|
"samd": "atmelsam",
|
|
|
|
"esp8266": "espressif8266",
|
|
|
|
"esp32": "espressif32",
|
|
|
|
"arc32": "intel_arc32",
|
|
|
|
"stm32": "ststm32"
|
|
|
|
}
|
|
|
|
items = []
|
|
|
|
for arch in self._manifest.get("architectures", "").split(","):
|
|
|
|
arch = arch.strip()
|
|
|
|
if arch == "*":
|
|
|
|
items = "*"
|
|
|
|
break
|
|
|
|
if arch in platforms_map:
|
|
|
|
items.append(platforms_map[arch])
|
|
|
|
if not items:
|
|
|
|
return LibBuilderBase.is_platforms_compatible(self, platforms)
|
|
|
|
return util.items_in_list(platforms, items)
|
|
|
|
|
2016-06-22 21:25:44 +03:00
|
|
|
|
|
|
|
class MbedLibBuilder(LibBuilderBase):
|
|
|
|
|
2016-06-25 13:23:24 +03:00
|
|
|
def load_manifest(self):
|
|
|
|
if not isfile(join(self.path, "module.json")):
|
|
|
|
return {}
|
|
|
|
return util.load_json(join(self.path, "module.json"))
|
2016-06-22 21:25:44 +03:00
|
|
|
|
2017-12-16 15:38:13 +02:00
|
|
|
@property
|
|
|
|
def include_dir(self):
|
|
|
|
if isdir(join(self.path, "include")):
|
|
|
|
return join(self.path, "include")
|
|
|
|
return None
|
|
|
|
|
2016-06-22 21:25:44 +03:00
|
|
|
@property
|
|
|
|
def src_dir(self):
|
|
|
|
if isdir(join(self.path, "source")):
|
|
|
|
return join(self.path, "source")
|
2016-06-25 13:23:24 +03:00
|
|
|
return LibBuilderBase.src_dir.fget(self)
|
2016-06-22 21:25:44 +03:00
|
|
|
|
2017-12-16 15:38:13 +02:00
|
|
|
def get_include_dirs(self):
|
|
|
|
include_dirs = LibBuilderBase.get_include_dirs(self)
|
|
|
|
if self.path not in include_dirs:
|
|
|
|
include_dirs.append(self.path)
|
2018-01-24 14:56:44 +02:00
|
|
|
|
|
|
|
# library with module.json
|
2016-06-25 13:23:24 +03:00
|
|
|
for p in self._manifest.get("extraIncludes", []):
|
2017-12-16 15:38:13 +02:00
|
|
|
include_dirs.append(join(self.path, p))
|
2018-01-24 14:56:44 +02:00
|
|
|
|
|
|
|
# old mbed library without manifest, add to CPPPATH all folders
|
|
|
|
if not self._manifest:
|
|
|
|
for root, _, __ in os.walk(self.path):
|
|
|
|
part = root.replace(self.path, "").lower()
|
|
|
|
if any(s in part for s in ("%s." % sep, "test", "example")):
|
|
|
|
continue
|
|
|
|
if root not in include_dirs:
|
|
|
|
include_dirs.append(root)
|
|
|
|
|
2017-12-16 15:38:13 +02:00
|
|
|
return include_dirs
|
2016-06-22 21:25:44 +03:00
|
|
|
|
2016-10-26 18:35:43 +03:00
|
|
|
def is_frameworks_compatible(self, frameworks):
|
2018-01-13 19:44:05 +02:00
|
|
|
return util.items_in_list(frameworks, ["mbed"])
|
2016-07-15 23:06:10 +03:00
|
|
|
|
2016-06-22 21:25:44 +03:00
|
|
|
|
|
|
|
class PlatformIOLibBuilder(LibBuilderBase):
|
|
|
|
|
2016-06-25 13:23:24 +03:00
|
|
|
def load_manifest(self):
|
|
|
|
assert isfile(join(self.path, "library.json"))
|
|
|
|
manifest = util.load_json(join(self.path, "library.json"))
|
|
|
|
assert "name" in manifest
|
2017-11-29 15:32:38 +02:00
|
|
|
|
2017-11-29 18:30:00 +02:00
|
|
|
# replace "espressif" old name dev/platform with ESP8266
|
2017-11-29 15:32:38 +02:00
|
|
|
if "platforms" in manifest:
|
2017-11-29 18:30:00 +02:00
|
|
|
manifest['platforms'] = [
|
|
|
|
"espressif8266" if p == "espressif" else p
|
2018-01-13 19:44:05 +02:00
|
|
|
for p in util.items_to_list(manifest['platforms'])
|
2017-11-29 18:30:00 +02:00
|
|
|
]
|
2017-11-29 15:32:38 +02:00
|
|
|
|
2016-06-25 13:23:24 +03:00
|
|
|
return manifest
|
2016-06-22 21:25:44 +03:00
|
|
|
|
2016-08-31 18:52:27 +03:00
|
|
|
def _is_arduino_manifest(self):
|
|
|
|
return isfile(join(self.path, "library.properties"))
|
|
|
|
|
2016-07-15 16:12:07 +03:00
|
|
|
@property
|
|
|
|
def src_filter(self):
|
|
|
|
if "srcFilter" in self._manifest.get("build", {}):
|
|
|
|
return self._manifest.get("build").get("srcFilter")
|
2017-04-14 23:16:52 +03:00
|
|
|
elif self.env['SRC_FILTER']:
|
|
|
|
return self.env['SRC_FILTER']
|
2016-08-31 18:52:27 +03:00
|
|
|
elif self._is_arduino_manifest():
|
|
|
|
return ArduinoLibBuilder.src_filter.fget(self)
|
2016-07-15 16:12:07 +03:00
|
|
|
return LibBuilderBase.src_filter.fget(self)
|
|
|
|
|
|
|
|
@property
|
|
|
|
def build_flags(self):
|
|
|
|
if "flags" in self._manifest.get("build", {}):
|
|
|
|
return self._manifest.get("build").get("flags")
|
|
|
|
return LibBuilderBase.build_flags.fget(self)
|
|
|
|
|
|
|
|
@property
|
|
|
|
def build_unflags(self):
|
|
|
|
if "unflags" in self._manifest.get("build", {}):
|
|
|
|
return self._manifest.get("build").get("unflags")
|
|
|
|
return LibBuilderBase.build_unflags.fget(self)
|
|
|
|
|
|
|
|
@property
|
|
|
|
def extra_script(self):
|
2016-07-28 22:15:03 +03:00
|
|
|
if "extraScript" in self._manifest.get("build", {}):
|
|
|
|
return self._manifest.get("build").get("extraScript")
|
2016-07-15 16:12:07 +03:00
|
|
|
return LibBuilderBase.extra_script.fget(self)
|
|
|
|
|
2016-07-24 20:04:09 +03:00
|
|
|
@property
|
|
|
|
def lib_archive(self):
|
|
|
|
if "libArchive" in self._manifest.get("build", {}):
|
|
|
|
return self._manifest.get("build").get("libArchive")
|
|
|
|
return LibBuilderBase.lib_archive.fget(self)
|
|
|
|
|
2016-08-31 18:52:27 +03:00
|
|
|
@property
|
|
|
|
def lib_ldf_mode(self):
|
|
|
|
if "libLDFMode" in self._manifest.get("build", {}):
|
2016-12-01 17:37:12 +02:00
|
|
|
return self.validate_ldf_mode(
|
|
|
|
self._manifest.get("build").get("libLDFMode"))
|
2016-08-31 18:52:27 +03:00
|
|
|
return LibBuilderBase.lib_ldf_mode.fget(self)
|
|
|
|
|
2017-11-25 21:51:16 +02:00
|
|
|
@property
|
|
|
|
def lib_compat_mode(self):
|
|
|
|
if "libCompatMode" in self._manifest.get("build", {}):
|
|
|
|
return self.validate_compat_mode(
|
|
|
|
self._manifest.get("build").get("libCompatMode"))
|
|
|
|
return LibBuilderBase.lib_compat_mode.fget(self)
|
|
|
|
|
2016-10-26 18:35:43 +03:00
|
|
|
def is_platforms_compatible(self, platforms):
|
2016-07-15 23:06:10 +03:00
|
|
|
items = self._manifest.get("platforms")
|
|
|
|
if not items:
|
2016-10-26 18:35:43 +03:00
|
|
|
return LibBuilderBase.is_platforms_compatible(self, platforms)
|
2018-01-13 19:44:05 +02:00
|
|
|
return util.items_in_list(platforms, items)
|
2016-07-15 23:06:10 +03:00
|
|
|
|
2016-10-26 18:35:43 +03:00
|
|
|
def is_frameworks_compatible(self, frameworks):
|
2016-07-15 23:06:10 +03:00
|
|
|
items = self._manifest.get("frameworks")
|
|
|
|
if not items:
|
2016-10-26 18:35:43 +03:00
|
|
|
return LibBuilderBase.is_frameworks_compatible(self, frameworks)
|
2018-01-13 19:44:05 +02:00
|
|
|
return util.items_in_list(frameworks, items)
|
2016-07-15 23:06:10 +03:00
|
|
|
|
2017-12-16 15:38:13 +02:00
|
|
|
def get_include_dirs(self):
|
|
|
|
include_dirs = LibBuilderBase.get_include_dirs(self)
|
2016-08-31 18:52:27 +03:00
|
|
|
|
2017-12-16 15:38:13 +02:00
|
|
|
# backwards compatibility with PlatformIO 2.0
|
2017-07-24 17:35:41 +03:00
|
|
|
if ("build" not in self._manifest and self._is_arduino_manifest()
|
|
|
|
and not isdir(join(self.path, "src"))
|
|
|
|
and isdir(join(self.path, "utility"))):
|
2017-12-16 15:38:13 +02:00
|
|
|
include_dirs.append(join(self.path, "utility"))
|
2016-08-31 18:52:27 +03:00
|
|
|
|
2016-11-02 18:24:52 +02:00
|
|
|
for path in self.env.get("CPPPATH", []):
|
2017-01-23 14:24:34 +02:00
|
|
|
if path not in self.envorigin.get("CPPPATH", []):
|
2017-12-16 15:38:13 +02:00
|
|
|
include_dirs.append(self.env.subst(path))
|
|
|
|
return include_dirs
|
2016-07-28 22:15:03 +03:00
|
|
|
|
2016-06-22 21:25:44 +03:00
|
|
|
|
2017-08-01 14:45:42 +03:00
|
|
|
class ProjectAsLibBuilder(LibBuilderBase):
|
|
|
|
|
2018-11-19 22:29:53 +02:00
|
|
|
def __init__(self, env, *args, **kwargs):
|
2018-11-19 19:06:56 +02:00
|
|
|
# backup original value, will be reset in base.__init__
|
|
|
|
project_src_filter = env.get("SRC_FILTER")
|
2018-11-19 22:29:53 +02:00
|
|
|
super(ProjectAsLibBuilder, self).__init__(env, *args, **kwargs)
|
2018-11-19 19:06:56 +02:00
|
|
|
self.env['SRC_FILTER'] = project_src_filter
|
|
|
|
|
2017-12-16 15:38:13 +02:00
|
|
|
@property
|
|
|
|
def include_dir(self):
|
|
|
|
include_dir = self.env.subst("$PROJECTINCLUDE_DIR")
|
|
|
|
return include_dir if isdir(include_dir) else None
|
|
|
|
|
2017-08-01 14:45:42 +03:00
|
|
|
@property
|
|
|
|
def src_dir(self):
|
|
|
|
return self.env.subst("$PROJECTSRC_DIR")
|
|
|
|
|
2017-12-16 15:38:13 +02:00
|
|
|
def get_include_dirs(self):
|
|
|
|
include_dirs = LibBuilderBase.get_include_dirs(self)
|
2018-01-06 01:04:43 +02:00
|
|
|
project_include_dir = self.env.subst("$PROJECTINCLUDE_DIR")
|
|
|
|
if isdir(project_include_dir):
|
|
|
|
include_dirs.append(project_include_dir)
|
2017-12-16 15:38:13 +02:00
|
|
|
return include_dirs
|
|
|
|
|
|
|
|
def get_search_files(self):
|
|
|
|
# project files
|
|
|
|
items = LibBuilderBase.get_search_files(self)
|
|
|
|
# test files
|
|
|
|
if "__test" in COMMAND_LINE_TARGETS:
|
|
|
|
items.extend([
|
2018-01-16 00:06:24 +02:00
|
|
|
join("$PROJECTTEST_DIR",
|
|
|
|
item) for item in self.env.MatchSourceFiles(
|
|
|
|
"$PROJECTTEST_DIR", "$PIOTEST_SRC_FILTER")
|
2017-12-16 15:38:13 +02:00
|
|
|
])
|
|
|
|
return items
|
2017-12-15 21:48:20 +02:00
|
|
|
|
2017-08-01 14:45:42 +03:00
|
|
|
@property
|
|
|
|
def lib_ldf_mode(self):
|
|
|
|
mode = LibBuilderBase.lib_ldf_mode.fget(self)
|
|
|
|
if not mode.startswith("chain"):
|
|
|
|
return mode
|
|
|
|
# parse all project files
|
|
|
|
return "deep+" if "+" in mode else "deep"
|
|
|
|
|
|
|
|
@property
|
|
|
|
def src_filter(self):
|
2018-11-19 19:06:56 +02:00
|
|
|
return (self.env.get("SRC_FILTER")
|
|
|
|
or LibBuilderBase.src_filter.fget(self))
|
2017-08-01 14:45:42 +03:00
|
|
|
|
|
|
|
def process_extra_options(self):
|
|
|
|
# skip for project, options are already processed
|
|
|
|
pass
|
|
|
|
|
2017-11-25 19:59:03 +02:00
|
|
|
def process_dependencies(self): # pylint: disable=too-many-branches
|
|
|
|
uris = self.env.get("LIB_DEPS", [])
|
|
|
|
if not uris:
|
2017-11-25 19:22:00 +02:00
|
|
|
return
|
|
|
|
storage_dirs = []
|
|
|
|
for lb in self.env.GetLibBuilders():
|
|
|
|
if dirname(lb.path) not in storage_dirs:
|
|
|
|
storage_dirs.append(dirname(lb.path))
|
|
|
|
|
2017-11-25 19:59:03 +02:00
|
|
|
for uri in uris:
|
2017-11-25 19:22:00 +02:00
|
|
|
found = False
|
|
|
|
for storage_dir in storage_dirs:
|
|
|
|
if found:
|
|
|
|
break
|
|
|
|
lm = LibraryManager(storage_dir)
|
|
|
|
pkg_dir = lm.get_package_dir(*lm.parse_pkg_uri(uri))
|
|
|
|
if not pkg_dir:
|
|
|
|
continue
|
2017-11-25 21:51:16 +02:00
|
|
|
for lb in self.env.GetLibBuilders():
|
2017-11-25 19:22:00 +02:00
|
|
|
if lb.path != pkg_dir:
|
|
|
|
continue
|
|
|
|
if lb not in self.depbuilders:
|
|
|
|
self.depend_recursive(lb)
|
|
|
|
found = True
|
|
|
|
break
|
|
|
|
|
|
|
|
if not found:
|
2017-11-25 21:51:16 +02:00
|
|
|
for lb in self.env.GetLibBuilders():
|
2017-11-25 19:22:00 +02:00
|
|
|
if lb.name != uri:
|
|
|
|
continue
|
2017-08-01 14:45:42 +03:00
|
|
|
if lb not in self.depbuilders:
|
|
|
|
self.depend_recursive(lb)
|
|
|
|
break
|
|
|
|
|
|
|
|
def build(self):
|
|
|
|
self._is_built = True # do not build Project now
|
2018-11-19 17:45:53 +02:00
|
|
|
self.env.PrependUnique(CPPPATH=self.get_include_dirs())
|
2017-08-01 14:45:42 +03:00
|
|
|
return LibBuilderBase.build(self)
|
|
|
|
|
|
|
|
|
2016-12-01 17:37:12 +02:00
|
|
|
def GetLibBuilders(env): # pylint: disable=too-many-branches
|
|
|
|
|
|
|
|
if "__PIO_LIB_BUILDERS" in DefaultEnvironment():
|
2017-11-25 19:22:00 +02:00
|
|
|
return sorted(
|
|
|
|
DefaultEnvironment()['__PIO_LIB_BUILDERS'],
|
|
|
|
key=lambda lb: 0 if lb.dependent else 1)
|
2016-12-01 17:37:12 +02:00
|
|
|
|
2016-09-01 00:06:34 +03:00
|
|
|
items = []
|
2017-11-08 00:38:19 +02:00
|
|
|
verbose = int(ARGUMENTS.get("PIOVERBOSE",
|
|
|
|
0)) and not env.GetOption('clean')
|
2016-07-24 18:17:23 +03:00
|
|
|
|
2016-08-15 19:49:15 +03:00
|
|
|
def _check_lib_builder(lb):
|
2017-11-25 21:51:16 +02:00
|
|
|
compat_mode = lb.lib_compat_mode
|
2016-08-11 15:23:36 +03:00
|
|
|
if lb.name in env.get("LIB_IGNORE", []):
|
|
|
|
if verbose:
|
|
|
|
sys.stderr.write("Ignored library %s\n" % lb.path)
|
2017-12-15 22:16:37 +02:00
|
|
|
return None
|
2018-01-26 19:53:07 +02:00
|
|
|
if compat_mode == "strict" and not lb.is_platforms_compatible(
|
2017-01-24 21:54:50 +02:00
|
|
|
env['PIOPLATFORM']):
|
2016-08-11 15:23:36 +03:00
|
|
|
if verbose:
|
2017-06-05 16:05:05 +03:00
|
|
|
sys.stderr.write(
|
|
|
|
"Platform incompatible library %s\n" % lb.path)
|
2016-08-15 19:49:15 +03:00
|
|
|
return False
|
2018-05-25 01:46:53 +03:00
|
|
|
if compat_mode == "soft" and "PIOFRAMEWORK" in env and \
|
2016-10-26 18:35:43 +03:00
|
|
|
not lb.is_frameworks_compatible(env.get("PIOFRAMEWORK", [])):
|
2016-08-11 15:23:36 +03:00
|
|
|
if verbose:
|
2017-06-05 16:05:05 +03:00
|
|
|
sys.stderr.write(
|
|
|
|
"Framework incompatible library %s\n" % lb.path)
|
2016-08-15 19:49:15 +03:00
|
|
|
return False
|
|
|
|
return True
|
2016-08-11 15:23:36 +03:00
|
|
|
|
2016-08-23 00:09:28 +03:00
|
|
|
found_incompat = False
|
2016-07-24 18:17:23 +03:00
|
|
|
for libs_dir in env['LIBSOURCE_DIRS']:
|
|
|
|
libs_dir = env.subst(libs_dir)
|
|
|
|
if not isdir(libs_dir):
|
|
|
|
continue
|
2016-06-22 21:25:44 +03:00
|
|
|
for item in sorted(os.listdir(libs_dir)):
|
2016-06-25 13:23:24 +03:00
|
|
|
if item == "__cores__" or not isdir(join(libs_dir, item)):
|
|
|
|
continue
|
2016-08-23 00:09:28 +03:00
|
|
|
try:
|
2017-03-02 17:09:22 +02:00
|
|
|
lb = LibBuilderFactory.new(
|
|
|
|
env, join(libs_dir, item), verbose=verbose)
|
2018-04-15 06:08:29 +03:00
|
|
|
except exception.InvalidJSONFile:
|
2016-08-31 00:16:23 +03:00
|
|
|
if verbose:
|
|
|
|
sys.stderr.write("Skip library with broken manifest: %s\n"
|
|
|
|
% join(libs_dir, item))
|
2016-08-23 00:09:28 +03:00
|
|
|
continue
|
2016-08-15 19:49:15 +03:00
|
|
|
if _check_lib_builder(lb):
|
2016-09-01 00:06:34 +03:00
|
|
|
items.append(lb)
|
2016-08-23 00:09:28 +03:00
|
|
|
else:
|
|
|
|
found_incompat = True
|
2016-08-11 15:23:36 +03:00
|
|
|
|
2016-08-15 19:49:15 +03:00
|
|
|
for lb in env.get("EXTRA_LIB_BUILDERS", []):
|
|
|
|
if _check_lib_builder(lb):
|
2016-09-01 00:06:34 +03:00
|
|
|
items.append(lb)
|
2016-08-23 00:09:28 +03:00
|
|
|
else:
|
|
|
|
found_incompat = True
|
|
|
|
|
|
|
|
if verbose and found_incompat:
|
|
|
|
sys.stderr.write(
|
|
|
|
"More details about \"Library Compatibility Mode\": "
|
2018-08-15 19:44:02 +03:00
|
|
|
"https://docs.platformio.org/page/librarymanager/ldf.html#"
|
2016-08-23 00:09:28 +03:00
|
|
|
"ldf-compat-mode\n")
|
2016-08-11 15:23:36 +03:00
|
|
|
|
2016-12-01 17:37:12 +02:00
|
|
|
DefaultEnvironment()['__PIO_LIB_BUILDERS'] = items
|
2016-06-25 13:23:24 +03:00
|
|
|
return items
|
|
|
|
|
|
|
|
|
2018-06-30 18:24:50 +03:00
|
|
|
def ConfigureProjectLibBuilder(env):
|
2016-07-28 01:54:09 +03:00
|
|
|
|
2018-01-26 20:50:33 +02:00
|
|
|
def correct_found_libs(lib_builders):
|
2016-09-01 00:06:34 +03:00
|
|
|
# build full dependency graph
|
|
|
|
found_lbs = [lb for lb in lib_builders if lb.dependent]
|
|
|
|
for lb in lib_builders:
|
2016-11-30 19:05:37 +02:00
|
|
|
if lb in found_lbs:
|
2017-12-16 15:38:13 +02:00
|
|
|
lb.search_deps_recursive(lb.get_search_files())
|
2016-09-01 00:06:34 +03:00
|
|
|
for lb in lib_builders:
|
|
|
|
for deplb in lb.depbuilders[:]:
|
|
|
|
if deplb not in found_lbs:
|
|
|
|
lb.depbuilders.remove(deplb)
|
|
|
|
|
2016-07-28 01:54:09 +03:00
|
|
|
def print_deps_tree(root, level=0):
|
|
|
|
margin = "| " * (level)
|
2016-07-31 00:00:58 +03:00
|
|
|
for lb in root.depbuilders:
|
2016-07-28 01:54:09 +03:00
|
|
|
title = "<%s>" % lb.name
|
2018-01-18 14:49:01 +02:00
|
|
|
vcs_info = lb.vcs_info
|
2016-07-28 01:54:09 +03:00
|
|
|
if lb.version:
|
2018-06-08 21:38:41 +03:00
|
|
|
title += " %s" % lb.version
|
2018-03-02 14:31:24 +02:00
|
|
|
if vcs_info and vcs_info.get("version"):
|
2018-01-18 14:49:01 +02:00
|
|
|
title += " #%s" % vcs_info.get("version")
|
2017-12-28 14:27:08 +02:00
|
|
|
sys.stdout.write("%s|-- %s" % (margin, title))
|
2016-08-27 19:30:38 +03:00
|
|
|
if int(ARGUMENTS.get("PIOVERBOSE", 0)):
|
2018-01-18 14:49:01 +02:00
|
|
|
if vcs_info:
|
|
|
|
sys.stdout.write(" [%s]" % vcs_info.get("url"))
|
2017-12-28 14:27:08 +02:00
|
|
|
sys.stdout.write(" (")
|
|
|
|
sys.stdout.write(lb.path)
|
|
|
|
sys.stdout.write(")")
|
|
|
|
sys.stdout.write("\n")
|
2016-07-31 00:00:58 +03:00
|
|
|
if lb.depbuilders:
|
2016-07-28 01:54:09 +03:00
|
|
|
print_deps_tree(lb, level + 1)
|
|
|
|
|
2017-08-01 14:45:42 +03:00
|
|
|
project = ProjectAsLibBuilder(env, "$PROJECT_DIR")
|
2018-01-26 19:53:07 +02:00
|
|
|
ldf_mode = LibBuilderBase.lib_ldf_mode.fget(project)
|
|
|
|
|
2018-10-27 20:22:11 +03:00
|
|
|
print("Library Dependency Finder -> http://bit.ly/configure-pio-ldf")
|
|
|
|
print("LDF MODES: FINDER(%s) COMPATIBILITY(%s)" %
|
|
|
|
(ldf_mode, project.lib_compat_mode))
|
2018-01-26 20:50:33 +02:00
|
|
|
|
|
|
|
lib_builders = env.GetLibBuilders()
|
2018-10-27 20:22:11 +03:00
|
|
|
print("Collected %d compatible libraries" % len(lib_builders))
|
2018-01-26 19:53:07 +02:00
|
|
|
|
2018-10-27 20:22:11 +03:00
|
|
|
print("Scanning dependencies...")
|
2016-12-01 17:37:12 +02:00
|
|
|
project.search_deps_recursive()
|
2016-07-28 01:54:09 +03:00
|
|
|
|
2018-01-26 19:53:07 +02:00
|
|
|
if ldf_mode.startswith("chain") and project.depbuilders:
|
2018-01-26 20:50:33 +02:00
|
|
|
correct_found_libs(lib_builders)
|
2016-09-01 00:06:34 +03:00
|
|
|
|
2016-07-31 00:00:58 +03:00
|
|
|
if project.depbuilders:
|
2018-10-27 20:22:11 +03:00
|
|
|
print("Dependency Graph")
|
2016-07-28 01:54:09 +03:00
|
|
|
print_deps_tree(project)
|
|
|
|
else:
|
2018-10-27 20:22:11 +03:00
|
|
|
print("No dependencies")
|
2016-07-28 01:54:09 +03:00
|
|
|
|
2018-06-30 18:24:50 +03:00
|
|
|
return project
|
2016-06-22 21:25:44 +03:00
|
|
|
|
|
|
|
|
|
|
|
def exists(_):
|
|
|
|
return True
|
|
|
|
|
|
|
|
|
|
|
|
def generate(env):
|
2016-06-25 13:23:24 +03:00
|
|
|
env.AddMethod(GetLibBuilders)
|
2018-06-30 18:24:50 +03:00
|
|
|
env.AddMethod(ConfigureProjectLibBuilder)
|
2016-06-22 21:25:44 +03:00
|
|
|
return env
|