mirror of
https://github.com/platformio/platformio-core.git
synced 2025-07-29 17:47:14 +02:00
Merge branch 'release/v6.1.4'
This commit is contained in:
13
HISTORY.rst
13
HISTORY.rst
@ -13,6 +13,19 @@ PlatformIO Core 6
|
||||
|
||||
**A professional collaborative platform for declarative, safety-critical, and test-driven embedded development.**
|
||||
|
||||
6.1.4 (2022-08-12)
|
||||
~~~~~~~~~~~~~~~~~~
|
||||
|
||||
* Added support for accepting the original FileNode environment in a "callback" function when using `Build Middlewares <https://docs.platformio.org/en/latest/scripting/middlewares.html>`__ (`pull #4380 <https://github.com/platformio/platformio-core/pull/4380>`_)
|
||||
* Improved device port finder when using dual channel UART converter (`issue #4367 <https://github.com/platformio/platformio-core/issues/4367>`_)
|
||||
* Improved project dependency resolving when using the `pio project init --ide <https://docs.platformio.org/en/latest/core/userguide/project/cmd_init.html>`__ command
|
||||
* Upgraded build engine to the SCons 4.4.0 (`release notes <https://github.com/SCons/scons/releases/tag/4.4.0>`__)
|
||||
* Keep custom "unwantedRecommendations" when generating projects for VSCode (`issue #4383 <https://github.com/platformio/platformio-core/issues/4383>`_)
|
||||
* Do not resolve project dependencies for the ``cleanall`` target (`issue #4344 <https://github.com/platformio/platformio-core/issues/4344>`_)
|
||||
* Warn about calling "env.BuildSources" in a POST-type script (`issue #4385 <https://github.com/platformio/platformio-core/issues/4385>`_)
|
||||
* Fixed an issue when escaping macros/defines for IDE integration (`issue #4360 <https://github.com/platformio/platformio-core/issues/4360>`_)
|
||||
* Fixed an issue when the "cleanall" target removes dependencies from all working environments (`issue #4386 <https://github.com/platformio/platformio-core/issues/4386>`_)
|
||||
|
||||
6.1.3 (2022-07-18)
|
||||
~~~~~~~~~~~~~~~~~~
|
||||
|
||||
|
@ -1,7 +1,3 @@
|
||||
.. image:: https://raw.githubusercontent.com/vshymanskyy/StandWithUkraine/main/banner-direct.svg
|
||||
:target: https://github.com/vshymanskyy/StandWithUkraine/blob/main/docs/README.md
|
||||
:alt: SWUbanner
|
||||
|
||||
PlatformIO Core
|
||||
===============
|
||||
|
||||
@ -99,3 +95,7 @@ Copyright (c) 2014-present PlatformIO <contact@platformio.org>
|
||||
|
||||
The PlatformIO is licensed under the permissive Apache 2.0 license,
|
||||
so you can use it in both commercial and personal projects with confidence.
|
||||
|
||||
.. image:: https://raw.githubusercontent.com/vshymanskyy/StandWithUkraine/main/banner-direct.svg
|
||||
:target: https://github.com/vshymanskyy/StandWithUkraine/blob/main/docs/README.md
|
||||
:alt: SWUbanner
|
2
docs
2
docs
Submodule docs updated: beb6d196ea...b38923e39b
2
examples
2
examples
Submodule examples updated: 7fbb0ec153...f98cb5a9be
@ -14,7 +14,7 @@
|
||||
|
||||
import sys
|
||||
|
||||
VERSION = (6, 1, 3)
|
||||
VERSION = (6, 1, 4)
|
||||
__version__ = ".".join([str(s) for s in VERSION])
|
||||
|
||||
__title__ = "platformio"
|
||||
@ -49,7 +49,7 @@ __default_requests_timeout__ = (10, None) # (connect, read)
|
||||
__core_packages__ = {
|
||||
"contrib-piohome": "~3.4.2",
|
||||
"contrib-pysite": "~2.%d%d.0" % (sys.version_info.major, sys.version_info.minor),
|
||||
"tool-scons": "~4.40300.0",
|
||||
"tool-scons": "~4.40400.0",
|
||||
"tool-cppcheck": "~1.270.0",
|
||||
"tool-clangtidy": "~1.120001.0",
|
||||
"tool-pvs-studio": "~7.18.0",
|
||||
|
@ -31,7 +31,7 @@ from platformio.compat import IS_CYGWIN, ensure_python3
|
||||
@click.option("--caller", "-c", help="Caller ID (service)")
|
||||
@click.option("--no-ansi", is_flag=True, help="Do not print ANSI control characters")
|
||||
@click.pass_context
|
||||
def cli(ctx, force, caller, no_ansi):
|
||||
def cli(ctx, force, caller, no_ansi): # pylint: disable=unused-argument
|
||||
try:
|
||||
if (
|
||||
no_ansi
|
||||
@ -53,7 +53,7 @@ def cli(ctx, force, caller, no_ansi):
|
||||
except: # pylint: disable=bare-except
|
||||
pass
|
||||
|
||||
maintenance.on_platformio_start(ctx, force, caller)
|
||||
maintenance.on_platformio_start(ctx, caller)
|
||||
|
||||
|
||||
@cli.result_callback()
|
||||
|
@ -12,8 +12,6 @@
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
from __future__ import absolute_import
|
||||
|
||||
import getpass
|
||||
import hashlib
|
||||
import json
|
||||
@ -64,7 +62,6 @@ DEFAULT_SETTINGS = {
|
||||
|
||||
SESSION_VARS = {
|
||||
"command_ctx": None,
|
||||
"force_option": False,
|
||||
"caller_id": None,
|
||||
"custom_project_conf": None,
|
||||
}
|
||||
@ -229,13 +226,7 @@ def set_session_var(name, value):
|
||||
|
||||
|
||||
def is_disabled_progressbar():
|
||||
return any(
|
||||
[
|
||||
get_session_var("force_option"),
|
||||
proc.is_ci(),
|
||||
os.getenv("PLATFORMIO_DISABLE_PROGRESSBAR") == "true",
|
||||
]
|
||||
)
|
||||
return os.getenv("PLATFORMIO_DISABLE_PROGRESSBAR") == "true"
|
||||
|
||||
|
||||
def get_cid():
|
||||
|
@ -55,7 +55,7 @@ DEFAULT_ENV_OPTIONS = dict(
|
||||
"link",
|
||||
"piohooks",
|
||||
"pioasm",
|
||||
"platformio",
|
||||
"piobuild",
|
||||
"pioproject",
|
||||
"pioplatform",
|
||||
"piotest",
|
||||
@ -82,7 +82,6 @@ DEFAULT_ENV_OPTIONS = dict(
|
||||
PROGPATH=os.path.join("$BUILD_DIR", "$PROGNAME$PROGSUFFIX"),
|
||||
PROG_PATH="$PROGPATH", # deprecated
|
||||
PYTHONEXE=get_pythonexe_path(),
|
||||
IDE_EXTRA_DATA={},
|
||||
)
|
||||
|
||||
# Declare command verbose messages
|
||||
@ -127,6 +126,7 @@ env.Replace(
|
||||
PROJECT_DATA_DIR=config.get("platformio", "data_dir"),
|
||||
PROJECTDATA_DIR="$PROJECT_DATA_DIR", # legacy for dev/platform
|
||||
PROJECT_BUILD_DIR=config.get("platformio", "build_dir"),
|
||||
BUILD_TYPE=env.GetBuildType(),
|
||||
BUILD_CACHE_DIR=config.get("platformio", "build_cache_dir"),
|
||||
LIBSOURCE_DIRS=[
|
||||
config.get("platformio", "lib_dir"),
|
||||
|
@ -23,15 +23,13 @@
|
||||
# pylint: disable=unused-argument, protected-access, unused-variable, import-error
|
||||
# Original: https://github.com/mongodb/mongo/blob/master/site_scons/site_tools/compilation_db.py
|
||||
|
||||
from __future__ import absolute_import
|
||||
|
||||
import itertools
|
||||
import json
|
||||
import os
|
||||
|
||||
import SCons
|
||||
|
||||
from platformio.builder.tools.platformio import SRC_ASM_EXT, SRC_C_EXT, SRC_CXX_EXT
|
||||
from platformio.builder.tools.piobuild import SRC_ASM_EXT, SRC_C_EXT, SRC_CXX_EXT
|
||||
from platformio.proc import where_is_program
|
||||
|
||||
# Implements the ability for SCons to emit a compilation database for the MongoDB project. See
|
||||
|
@ -12,8 +12,6 @@
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
from __future__ import absolute_import
|
||||
|
||||
import SCons.Tool.asm # pylint: disable=import-error
|
||||
|
||||
#
|
||||
|
@ -12,8 +12,6 @@
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
from __future__ import absolute_import
|
||||
|
||||
import fnmatch
|
||||
import os
|
||||
import sys
|
||||
@ -86,7 +84,7 @@ def BuildProgram(env):
|
||||
)
|
||||
)
|
||||
|
||||
print("Building in %s mode" % env.GetBuildType())
|
||||
print("Building in %s mode" % env["BUILD_TYPE"])
|
||||
|
||||
return program
|
||||
|
||||
@ -121,7 +119,7 @@ def ProcessProgramDeps(env):
|
||||
# process framework scripts
|
||||
env.BuildFrameworks(env.get("PIOFRAMEWORK"))
|
||||
|
||||
if "debug" in env.GetBuildType():
|
||||
if "debug" in env["BUILD_TYPE"]:
|
||||
env.ConfigureDebugTarget()
|
||||
|
||||
# remove specified flags
|
||||
@ -149,7 +147,7 @@ def ProcessProjectDeps(env):
|
||||
}
|
||||
)
|
||||
|
||||
if "test" in env.GetBuildType():
|
||||
if "test" in env["BUILD_TYPE"]:
|
||||
build_files_before_nums = len(env.get("PIOBUILDFILES", []))
|
||||
plb.env.BuildSources(
|
||||
"$BUILD_TEST_DIR", "$PROJECT_TEST_DIR", "$PIOTEST_SRC_FILTER"
|
||||
@ -161,7 +159,7 @@ def ProcessProjectDeps(env):
|
||||
)
|
||||
env.Exit(1)
|
||||
|
||||
if "test" not in env.GetBuildType() or env.GetProjectOption("test_build_src"):
|
||||
if "test" not in env["BUILD_TYPE"] or env.GetProjectOption("test_build_src"):
|
||||
plb.env.BuildSources(
|
||||
"$BUILD_SRC_DIR", "$PROJECT_SRC_DIR", env.get("SRC_FILTER")
|
||||
)
|
||||
@ -294,7 +292,12 @@ def CollectBuildFiles(
|
||||
for callback, pattern in middlewares:
|
||||
if pattern and not fnmatch.fnmatch(node.srcnode().get_path(), pattern):
|
||||
continue
|
||||
new_node = callback(new_node)
|
||||
if callback.__code__.co_argcount == 2:
|
||||
new_node = callback(env, new_node)
|
||||
else:
|
||||
new_node = callback(new_node)
|
||||
if not new_node:
|
||||
break
|
||||
if new_node:
|
||||
new_sources.append(new_node)
|
||||
|
||||
@ -338,6 +341,14 @@ def BuildLibrary(env, variant_dir, src_dir, src_filter=None, nodes=None):
|
||||
|
||||
|
||||
def BuildSources(env, variant_dir, src_dir, src_filter=None):
|
||||
if env.get("PIOMAINPROG"):
|
||||
sys.stderr.write(
|
||||
"Error: The main program is already constructed and the inline "
|
||||
"source files are not allowed. Please use `env.BuildLibrary(...)` "
|
||||
"or PRE-type script instead."
|
||||
)
|
||||
env.Exit(1)
|
||||
|
||||
nodes = env.CollectBuildFiles(variant_dir, src_dir, src_filter)
|
||||
DefaultEnvironment().Append(
|
||||
PIOBUILDFILES=[
|
@ -12,8 +12,6 @@
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
from __future__ import absolute_import
|
||||
|
||||
|
||||
def AddActionWrapper(handler):
|
||||
def wraps(env, files, action):
|
||||
|
@ -12,8 +12,6 @@
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
from __future__ import absolute_import
|
||||
|
||||
import atexit
|
||||
import glob
|
||||
import io
|
||||
|
@ -12,7 +12,6 @@
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
from __future__ import absolute_import
|
||||
|
||||
import glob
|
||||
import os
|
||||
@ -94,7 +93,7 @@ def dump_defines(env):
|
||||
for item in SCons.Defaults.processDefines(env.get("CPPDEFINES", [])):
|
||||
item = item.strip()
|
||||
if item:
|
||||
defines.append(env.subst(item).replace("\\", ""))
|
||||
defines.append(env.subst(item).replace('\\"', '"'))
|
||||
|
||||
# special symbol for Atmel AVR MCU
|
||||
if env["PIOPLATFORM"] == "atmelavr":
|
||||
@ -145,51 +144,39 @@ def _subst_cmd(env, cmd):
|
||||
return " ".join([SCons.Subst.quote_spaces(arg) for arg in args])
|
||||
|
||||
|
||||
def DumpIntegrationData(env, globalenv):
|
||||
"""env here is `projenv`"""
|
||||
|
||||
def DumpIntegrationData(*args):
|
||||
projenv, globalenv = args[0:2] # pylint: disable=unbalanced-tuple-unpacking
|
||||
data = {
|
||||
"env_name": env["PIOENV"],
|
||||
"libsource_dirs": [env.subst(item) for item in env.GetLibSourceDirs()],
|
||||
"defines": dump_defines(env),
|
||||
"includes": env.DumpIntegrationIncludes(),
|
||||
"cc_path": where_is_program(env.subst("$CC"), env.subst("${ENV['PATH']}")),
|
||||
"cxx_path": where_is_program(env.subst("$CXX"), env.subst("${ENV['PATH']}")),
|
||||
"gdb_path": where_is_program(env.subst("$GDB"), env.subst("${ENV['PATH']}")),
|
||||
"prog_path": env.subst("$PROG_PATH"),
|
||||
"svd_path": dump_svd_path(env),
|
||||
"compiler_type": env.GetCompilerType(),
|
||||
"env_name": globalenv["PIOENV"],
|
||||
"libsource_dirs": [
|
||||
globalenv.subst(item) for item in globalenv.GetLibSourceDirs()
|
||||
],
|
||||
"defines": dump_defines(projenv),
|
||||
"includes": projenv.DumpIntegrationIncludes(),
|
||||
"cc_flags": _subst_cmd(projenv, "$CFLAGS $CCFLAGS $CPPFLAGS"),
|
||||
"cxx_flags": _subst_cmd(projenv, "$CXXFLAGS $CCFLAGS $CPPFLAGS"),
|
||||
"cc_path": where_is_program(
|
||||
globalenv.subst("$CC"), globalenv.subst("${ENV['PATH']}")
|
||||
),
|
||||
"cxx_path": where_is_program(
|
||||
globalenv.subst("$CXX"), globalenv.subst("${ENV['PATH']}")
|
||||
),
|
||||
"gdb_path": where_is_program(
|
||||
globalenv.subst("$GDB"), globalenv.subst("${ENV['PATH']}")
|
||||
),
|
||||
"prog_path": globalenv.subst("$PROGPATH"),
|
||||
"svd_path": dump_svd_path(globalenv),
|
||||
"compiler_type": globalenv.GetCompilerType(),
|
||||
"targets": globalenv.DumpTargets(),
|
||||
"extra": dict(
|
||||
flash_images=[
|
||||
{"offset": item[0], "path": env.subst(item[1])}
|
||||
for item in env.get("FLASH_EXTRA_IMAGES", [])
|
||||
{"offset": item[0], "path": globalenv.subst(item[1])}
|
||||
for item in globalenv.get("FLASH_EXTRA_IMAGES", [])
|
||||
]
|
||||
),
|
||||
}
|
||||
data["extra"].update(
|
||||
env.get("INTEGRATION_EXTRA_DATA", env.get("IDE_EXTRA_DATA", {}))
|
||||
)
|
||||
|
||||
env_ = env.Clone()
|
||||
# https://github.com/platformio/platformio-atom-ide/issues/34
|
||||
_new_defines = []
|
||||
for item in SCons.Defaults.processDefines(env_.get("CPPDEFINES", [])):
|
||||
item = item.replace('\\"', '"')
|
||||
if " " in item:
|
||||
_new_defines.append(item.replace(" ", "\\\\ "))
|
||||
else:
|
||||
_new_defines.append(item)
|
||||
env_.Replace(CPPDEFINES=_new_defines)
|
||||
|
||||
# export C/C++ build flags
|
||||
data.update(
|
||||
{
|
||||
"cc_flags": _subst_cmd(env_, "$CFLAGS $CCFLAGS $CPPFLAGS"),
|
||||
"cxx_flags": _subst_cmd(env_, "$CXXFLAGS $CCFLAGS $CPPFLAGS"),
|
||||
}
|
||||
)
|
||||
|
||||
for key in ("IDE_EXTRA_DATA", "INTEGRATION_EXTRA_DATA"):
|
||||
data["extra"].update(globalenv.get(key, {}))
|
||||
return data
|
||||
|
||||
|
||||
@ -198,6 +185,8 @@ def exists(_):
|
||||
|
||||
|
||||
def generate(env):
|
||||
env["IDE_EXTRA_DATA"] = {} # legacy support
|
||||
env["INTEGRATION_EXTRA_DATA"] = {}
|
||||
env.AddMethod(IsIntegrationDump)
|
||||
env.AddMethod(DumpIntegrationIncludes)
|
||||
env.AddMethod(DumpIntegrationData)
|
||||
|
@ -15,8 +15,6 @@
|
||||
# pylint: disable=too-many-instance-attributes, too-many-public-methods
|
||||
# pylint: disable=assignment-from-no-return, unused-argument, too-many-lines
|
||||
|
||||
from __future__ import absolute_import
|
||||
|
||||
import hashlib
|
||||
import io
|
||||
import os
|
||||
@ -29,7 +27,7 @@ from SCons.Script import ARGUMENTS # pylint: disable=import-error
|
||||
from SCons.Script import DefaultEnvironment # pylint: disable=import-error
|
||||
|
||||
from platformio import exception, fs
|
||||
from platformio.builder.tools import platformio as piotool
|
||||
from platformio.builder.tools import piobuild
|
||||
from platformio.compat import IS_WINDOWS, hashlib_encode_data, string_types
|
||||
from platformio.http import HTTPClientError, InternetIsOffline
|
||||
from platformio.package.exception import (
|
||||
@ -94,7 +92,7 @@ class LibBuilderFactory:
|
||||
return ["mbed"]
|
||||
for fname in files:
|
||||
if not fs.path_endswith_ext(
|
||||
fname, piotool.SRC_BUILD_EXT + piotool.SRC_HEADER_EXT
|
||||
fname, piobuild.SRC_BUILD_EXT + piobuild.SRC_HEADER_EXT
|
||||
):
|
||||
continue
|
||||
with io.open(
|
||||
@ -146,7 +144,7 @@ class LibBuilderBase:
|
||||
self._processed_search_files = []
|
||||
|
||||
# pass a macro to the projenv + libs
|
||||
if "test" in env.GetBuildType():
|
||||
if "test" in env["BUILD_TYPE"]:
|
||||
self.env.Append(CPPDEFINES=["PIO_UNIT_TESTING"])
|
||||
|
||||
# reset source filter, could be overridden with extra script
|
||||
@ -199,7 +197,7 @@ class LibBuilderBase:
|
||||
|
||||
@property
|
||||
def src_filter(self):
|
||||
return piotool.SRC_FILTER_DEFAULT + [
|
||||
return piobuild.SRC_FILTER_DEFAULT + [
|
||||
"-<example%s>" % os.sep,
|
||||
"-<examples%s>" % os.sep,
|
||||
"-<test%s>" % os.sep,
|
||||
@ -331,7 +329,7 @@ class LibBuilderBase:
|
||||
return [
|
||||
os.path.join(self.src_dir, item)
|
||||
for item in self.env.MatchSourceFiles(
|
||||
self.src_dir, self.src_filter, piotool.SRC_BUILD_EXT
|
||||
self.src_dir, self.src_filter, piobuild.SRC_BUILD_EXT
|
||||
)
|
||||
]
|
||||
|
||||
@ -396,10 +394,10 @@ class LibBuilderBase:
|
||||
result.append(item)
|
||||
if not self.PARSE_SRC_BY_H_NAME:
|
||||
continue
|
||||
if not fs.path_endswith_ext(item_path, piotool.SRC_HEADER_EXT):
|
||||
if not fs.path_endswith_ext(item_path, piobuild.SRC_HEADER_EXT):
|
||||
continue
|
||||
item_fname = item_path[: item_path.rindex(".")]
|
||||
for ext in piotool.SRC_C_EXT + piotool.SRC_CXX_EXT:
|
||||
for ext in piobuild.SRC_C_EXT + piobuild.SRC_CXX_EXT:
|
||||
if not os.path.isfile("%s.%s" % (item_fname, ext)):
|
||||
continue
|
||||
item_c_node = self.env.File("%s.%s" % (item_fname, ext))
|
||||
@ -560,7 +558,7 @@ class ArduinoLibBuilder(LibBuilderBase):
|
||||
|
||||
src_filter = []
|
||||
is_utility = os.path.isdir(os.path.join(self.path, "utility"))
|
||||
for ext in piotool.SRC_BUILD_EXT + piotool.SRC_HEADER_EXT:
|
||||
for ext in piobuild.SRC_BUILD_EXT + piobuild.SRC_HEADER_EXT:
|
||||
# arduino ide ignores files with .asm or .ASM extensions
|
||||
if ext.lower() == "asm":
|
||||
continue
|
||||
@ -911,7 +909,7 @@ class ProjectAsLibBuilder(LibBuilderBase):
|
||||
|
||||
def get_search_files(self):
|
||||
items = []
|
||||
build_type = self.env.GetBuildType()
|
||||
build_type = self.env["BUILD_TYPE"]
|
||||
# project files
|
||||
if "test" not in build_type or self.env.GetProjectOption("test_build_src"):
|
||||
items.extend(super().get_search_files())
|
||||
@ -1164,7 +1162,7 @@ def ConfigureProjectLibBuilder(env):
|
||||
|
||||
project = ProjectAsLibBuilder(env, "$PROJECT_DIR")
|
||||
|
||||
if "test" in env.GetBuildType():
|
||||
if "test" in env["BUILD_TYPE"]:
|
||||
project.env.ConfigureTestTarget()
|
||||
|
||||
ldf_mode = LibBuilderBase.lib_ldf_mode.fget(project) # pylint: disable=no-member
|
||||
|
@ -12,8 +12,6 @@
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
from __future__ import absolute_import
|
||||
|
||||
import hashlib
|
||||
import os
|
||||
import re
|
||||
|
@ -12,8 +12,6 @@
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
from __future__ import absolute_import
|
||||
|
||||
import os
|
||||
import sys
|
||||
|
||||
|
@ -12,8 +12,6 @@
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
from __future__ import absolute_import
|
||||
|
||||
import os
|
||||
import sys
|
||||
|
||||
|
@ -12,8 +12,6 @@
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
from __future__ import absolute_import
|
||||
|
||||
from platformio.compat import MISSING
|
||||
from platformio.project.config import ProjectConfig
|
||||
|
||||
|
@ -14,8 +14,6 @@
|
||||
|
||||
# pylint: disable=too-many-locals
|
||||
|
||||
from __future__ import absolute_import
|
||||
|
||||
import json
|
||||
import sys
|
||||
from os import environ, makedirs, remove
|
||||
|
@ -12,8 +12,6 @@
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
from __future__ import absolute_import
|
||||
|
||||
import os
|
||||
|
||||
from SCons.Action import Action # pylint: disable=import-error
|
||||
@ -47,7 +45,7 @@ def PioClean(env, clean_all=False):
|
||||
fs.rmtree(path)
|
||||
|
||||
build_dir = env.subst("$BUILD_DIR")
|
||||
libdeps_dir = env.subst("$PROJECT_LIBDEPS_DIR")
|
||||
libdeps_dir = env.subst(os.path.join("$PROJECT_LIBDEPS_DIR", "$PIOENV"))
|
||||
if os.path.isdir(build_dir):
|
||||
_clean_dir(build_dir)
|
||||
else:
|
||||
|
@ -12,11 +12,9 @@
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
from __future__ import absolute_import
|
||||
|
||||
import os
|
||||
|
||||
from platformio.builder.tools import platformio as piotool
|
||||
from platformio.builder.tools import piobuild
|
||||
from platformio.test.result import TestSuite
|
||||
from platformio.test.runners.factory import TestRunnerFactory
|
||||
|
||||
@ -24,7 +22,7 @@ from platformio.test.runners.factory import TestRunnerFactory
|
||||
def ConfigureTestTarget(env):
|
||||
env.Append(
|
||||
CPPDEFINES=["UNIT_TEST"], # deprecated, use PIO_UNIT_TESTING
|
||||
PIOTEST_SRC_FILTER=[f"+<*.{ext}>" for ext in piotool.SRC_BUILD_EXT],
|
||||
PIOTEST_SRC_FILTER=[f"+<*.{ext}>" for ext in piobuild.SRC_BUILD_EXT],
|
||||
)
|
||||
env.Prepend(CPPPATH=["$PROJECT_TEST_DIR"])
|
||||
|
||||
@ -38,7 +36,7 @@ def ConfigureTestTarget(env):
|
||||
env.Prepend(
|
||||
PIOTEST_SRC_FILTER=[
|
||||
f"+<{test_name}{os.path.sep}*.{ext}>"
|
||||
for ext in piotool.SRC_BUILD_EXT
|
||||
for ext in piobuild.SRC_BUILD_EXT
|
||||
],
|
||||
CPPPATH=[os.path.join("$PROJECT_TEST_DIR", test_name)],
|
||||
)
|
||||
|
@ -14,8 +14,6 @@
|
||||
|
||||
# pylint: disable=unused-argument
|
||||
|
||||
from __future__ import absolute_import
|
||||
|
||||
import os
|
||||
import re
|
||||
import sys
|
||||
@ -26,7 +24,7 @@ from SCons.Script import ARGUMENTS # pylint: disable=import-error
|
||||
from serial import Serial, SerialException
|
||||
|
||||
from platformio import exception, fs
|
||||
from platformio.device.finder import find_mbed_disk, find_serial_port, is_pattern_port
|
||||
from platformio.device.finder import SerialPortFinder, find_mbed_disk, is_pattern_port
|
||||
from platformio.device.list.util import list_serial_ports
|
||||
from platformio.proc import exec_command
|
||||
|
||||
@ -112,13 +110,12 @@ def AutodetectUploadPort(*args, **kwargs):
|
||||
except exception.InvalidUdevRules as exc:
|
||||
sys.stderr.write("\n%s\n\n" % exc)
|
||||
env.Replace(
|
||||
UPLOAD_PORT=find_serial_port(
|
||||
initial_port=initial_port,
|
||||
UPLOAD_PORT=SerialPortFinder(
|
||||
board_config=env.BoardConfig() if "BOARD" in env else None,
|
||||
upload_protocol=upload_protocol,
|
||||
prefer_gdb_port="blackmagic" in upload_protocol,
|
||||
verbose=int(ARGUMENTS.get("PIOVERBOSE", 0)),
|
||||
)
|
||||
).find(initial_port)
|
||||
)
|
||||
|
||||
if env.subst("$UPLOAD_PORT"):
|
||||
|
@ -198,7 +198,7 @@ def print_processing_header(tool, envname, envdump):
|
||||
"Checking %s > %s (%s)"
|
||||
% (click.style(envname, fg="cyan", bold=True), tool, "; ".join(envdump))
|
||||
)
|
||||
terminal_width, _ = shutil.get_terminal_size()
|
||||
terminal_width = shutil.get_terminal_size().columns
|
||||
click.secho("-" * terminal_width, bold=True)
|
||||
|
||||
|
||||
|
@ -41,7 +41,7 @@ def cli(query, installed, json_output): # pylint: disable=R0912
|
||||
grpboards[board["platform"]] = []
|
||||
grpboards[board["platform"]].append(board)
|
||||
|
||||
terminal_width, _ = shutil.get_terminal_size()
|
||||
terminal_width = shutil.get_terminal_size().columns
|
||||
for (platform, boards) in sorted(grpboards.items()):
|
||||
click.echo("")
|
||||
click.echo("Platform: ", nl=False)
|
||||
|
@ -19,7 +19,7 @@ import tempfile
|
||||
|
||||
import click
|
||||
|
||||
from platformio import app, fs
|
||||
from platformio import fs
|
||||
from platformio.exception import CIBuildEnvsEmpty
|
||||
from platformio.project.commands.init import project_init_cmd, validate_boards
|
||||
from platformio.project.config import ProjectConfig
|
||||
@ -84,8 +84,6 @@ def cli( # pylint: disable=too-many-arguments, too-many-branches
|
||||
raise click.BadParameter("Missing argument 'src'")
|
||||
|
||||
try:
|
||||
app.set_session_var("force_option", True)
|
||||
|
||||
if not keep_build_dir and os.path.isdir(build_dir):
|
||||
fs.rmtree(build_dir)
|
||||
if not os.path.isdir(build_dir):
|
||||
|
@ -50,6 +50,13 @@ def isascii(text):
|
||||
return True
|
||||
|
||||
|
||||
def is_terminal():
|
||||
try:
|
||||
return sys.stdout.isatty()
|
||||
except Exception: # pylint: disable=broad-except
|
||||
return False
|
||||
|
||||
|
||||
def ci_strings_are_equal(a, b):
|
||||
if a == b:
|
||||
return True
|
||||
|
@ -14,7 +14,7 @@
|
||||
|
||||
from platformio.debug.config.base import DebugConfigBase
|
||||
from platformio.debug.exception import DebugInvalidOptionsError
|
||||
from platformio.device.finder import find_serial_port, is_pattern_port
|
||||
from platformio.device.finder import SerialPortFinder, is_pattern_port
|
||||
|
||||
|
||||
class BlackmagicDebugConfig(DebugConfigBase):
|
||||
@ -56,12 +56,11 @@ set language auto
|
||||
initial_port = DebugConfigBase.port.fget(self)
|
||||
if initial_port and not is_pattern_port(initial_port):
|
||||
return initial_port
|
||||
port = find_serial_port(
|
||||
initial_port,
|
||||
port = SerialPortFinder(
|
||||
board_config=self.board_config,
|
||||
upload_protocol=self.tool_name,
|
||||
prefer_gdb_port=True,
|
||||
)
|
||||
).find(initial_port)
|
||||
if port:
|
||||
return port
|
||||
raise DebugInvalidOptionsError(
|
||||
|
@ -18,7 +18,7 @@ from fnmatch import fnmatch
|
||||
import click
|
||||
import serial
|
||||
|
||||
from platformio.compat import IS_MACOS, IS_WINDOWS
|
||||
from platformio.compat import IS_WINDOWS
|
||||
from platformio.device.list.util import list_logical_devices, list_serial_ports
|
||||
from platformio.fs import get_platformio_udev_rules_path
|
||||
from platformio.package.manager.platform import PlatformPackageManager
|
||||
@ -53,190 +53,12 @@ def parse_udev_rules_hwids(path):
|
||||
return result
|
||||
|
||||
|
||||
def normalize_board_hwid(value):
|
||||
if isinstance(value, (list, tuple)):
|
||||
value = ("%s:%s" % (value[0], value[1])).replace("0x", "")
|
||||
return value.upper()
|
||||
|
||||
|
||||
def is_pattern_port(port):
|
||||
if not port:
|
||||
return False
|
||||
return set(["*", "?", "[", "]"]) & set(port)
|
||||
|
||||
|
||||
def match_serial_port(pattern):
|
||||
for item in list_serial_ports():
|
||||
if fnmatch(item["port"], pattern):
|
||||
return item["port"]
|
||||
return None
|
||||
|
||||
|
||||
def is_serial_port_ready(port, timeout=1):
|
||||
try:
|
||||
serial.Serial(port, timeout=timeout).close()
|
||||
return True
|
||||
except: # pylint: disable=bare-except
|
||||
pass
|
||||
return False
|
||||
|
||||
|
||||
def find_serial_port( # pylint: disable=too-many-arguments
|
||||
initial_port,
|
||||
board_config=None,
|
||||
upload_protocol=None,
|
||||
ensure_ready=False,
|
||||
prefer_gdb_port=False,
|
||||
timeout=2,
|
||||
verbose=False,
|
||||
):
|
||||
if initial_port:
|
||||
if not is_pattern_port(initial_port):
|
||||
return initial_port
|
||||
return match_serial_port(initial_port)
|
||||
|
||||
if upload_protocol and upload_protocol.startswith("blackmagic"):
|
||||
return find_blackmagic_serial_port(prefer_gdb_port, timeout)
|
||||
port = None
|
||||
if board_config and board_config.get("build.hwids", []):
|
||||
port = find_board_serial_port(board_config, timeout, verbose)
|
||||
if not port:
|
||||
port = find_known_uart_port(ensure_ready, timeout, verbose)
|
||||
if port:
|
||||
return port
|
||||
|
||||
# pick the best PID:VID USB device
|
||||
best_port = None
|
||||
for item in list_serial_ports():
|
||||
if ensure_ready and not is_serial_port_ready(item["port"]):
|
||||
continue
|
||||
port = item["port"]
|
||||
if "VID:PID" in item["hwid"]:
|
||||
best_port = port
|
||||
return best_port or port
|
||||
|
||||
|
||||
def find_blackmagic_serial_port(prefer_gdb_port=False, timeout=0):
|
||||
try:
|
||||
|
||||
@retry(timeout=timeout)
|
||||
def wrapper():
|
||||
candidates = []
|
||||
for item in list_serial_ports(filter_hwid=True):
|
||||
if (
|
||||
not any(hwid in item["hwid"].upper() for hwid in BLACK_MAGIC_HWIDS)
|
||||
and not "Black Magic" in item["description"]
|
||||
):
|
||||
continue
|
||||
if (
|
||||
IS_WINDOWS
|
||||
and item["port"].startswith("COM")
|
||||
and len(item["port"]) > 4
|
||||
):
|
||||
item["port"] = "\\\\.\\%s" % item["port"]
|
||||
candidates.append(item)
|
||||
|
||||
if not candidates:
|
||||
raise retry.RetryNextException()
|
||||
|
||||
for item in candidates:
|
||||
if ("GDB" if prefer_gdb_port else "UART") in item["description"]:
|
||||
return item["port"]
|
||||
if IS_MACOS:
|
||||
# 1 - GDB, 3 - UART
|
||||
for item in candidates:
|
||||
if item["port"].endswith("1" if prefer_gdb_port else "3"):
|
||||
return item["port"]
|
||||
|
||||
candidates = sorted(candidates, key=lambda item: item["port"])
|
||||
return (
|
||||
candidates[0] # first port is GDB?
|
||||
if len(candidates) == 1 or prefer_gdb_port
|
||||
else candidates[1]
|
||||
)["port"]
|
||||
|
||||
return wrapper()
|
||||
except retry.RetryStopException:
|
||||
pass
|
||||
return None
|
||||
|
||||
|
||||
def find_board_serial_port(board_config, timeout=0, verbose=False):
|
||||
hwids = board_config.get("build.hwids", [])
|
||||
try:
|
||||
|
||||
@retry(timeout=timeout)
|
||||
def wrapper():
|
||||
for item in list_serial_ports(filter_hwid=True):
|
||||
hwid = item["hwid"].upper()
|
||||
for board_hwid in hwids:
|
||||
if normalize_board_hwid(board_hwid) in hwid:
|
||||
return item["port"]
|
||||
raise retry.RetryNextException()
|
||||
|
||||
return wrapper()
|
||||
except retry.RetryStopException:
|
||||
pass
|
||||
|
||||
if verbose:
|
||||
click.secho(
|
||||
"TimeoutError: Could not automatically find serial port "
|
||||
"for the `%s` board based on the declared HWIDs=%s"
|
||||
% (board_config.get("name", "unknown"), hwids),
|
||||
fg="yellow",
|
||||
err=True,
|
||||
)
|
||||
|
||||
return None
|
||||
|
||||
|
||||
def find_known_uart_port(ensure_ready=False, timeout=0, verbose=False):
|
||||
known_hwids = list(BLACK_MAGIC_HWIDS)
|
||||
|
||||
# load from UDEV rules
|
||||
udev_rules_path = get_platformio_udev_rules_path()
|
||||
if os.path.isfile(udev_rules_path):
|
||||
known_hwids.extend(parse_udev_rules_hwids(udev_rules_path))
|
||||
|
||||
# load from installed dev-platforms
|
||||
for platform in PlatformPackageManager().get_installed():
|
||||
p = PlatformFactory.new(platform)
|
||||
for board_config in p.get_boards().values():
|
||||
for board_hwid in board_config.get("build.hwids", []):
|
||||
board_hwid = normalize_board_hwid(board_hwid)
|
||||
if board_hwid not in known_hwids:
|
||||
known_hwids.append(board_hwid)
|
||||
|
||||
try:
|
||||
|
||||
@retry(timeout=timeout)
|
||||
def wrapper():
|
||||
for item in list_serial_ports(as_objects=True):
|
||||
if not item.vid or not item.pid:
|
||||
continue
|
||||
hwid = "{:04X}:{:04X}".format(item.vid, item.pid)
|
||||
for pattern in known_hwids:
|
||||
if fnmatch(hwid, pattern) and (
|
||||
not ensure_ready or is_serial_port_ready(item.device)
|
||||
):
|
||||
return item.device
|
||||
raise retry.RetryNextException()
|
||||
|
||||
return wrapper()
|
||||
except retry.RetryStopException:
|
||||
pass
|
||||
|
||||
if verbose:
|
||||
click.secho(
|
||||
"TimeoutError: Could not automatically find serial port "
|
||||
"based on the known UART bridges",
|
||||
fg="yellow",
|
||||
err=True,
|
||||
)
|
||||
|
||||
return None
|
||||
|
||||
|
||||
def find_mbed_disk(initial_port):
|
||||
msdlabels = ("mbed", "nucleo", "frdm", "microbit")
|
||||
for item in list_logical_devices():
|
||||
@ -254,3 +76,173 @@ def find_mbed_disk(initial_port):
|
||||
if item["name"] and any(l in item["name"].lower() for l in msdlabels):
|
||||
return item["path"]
|
||||
return None
|
||||
|
||||
|
||||
def is_serial_port_ready(port, timeout=1):
|
||||
try:
|
||||
serial.Serial(port, timeout=timeout).close()
|
||||
return True
|
||||
except: # pylint: disable=bare-except
|
||||
pass
|
||||
return False
|
||||
|
||||
|
||||
class SerialPortFinder:
|
||||
def __init__( # pylint: disable=too-many-arguments
|
||||
self,
|
||||
board_config=None,
|
||||
upload_protocol=None,
|
||||
ensure_ready=False,
|
||||
prefer_gdb_port=False,
|
||||
timeout=2,
|
||||
verbose=False,
|
||||
):
|
||||
self.board_config = board_config
|
||||
self.upload_protocol = upload_protocol
|
||||
self.ensure_ready = ensure_ready
|
||||
self.prefer_gdb_port = prefer_gdb_port
|
||||
self.timeout = timeout
|
||||
self.verbose = verbose
|
||||
|
||||
@staticmethod
|
||||
def normalize_board_hwid(value):
|
||||
if isinstance(value, (list, tuple)):
|
||||
value = ("%s:%s" % (value[0], value[1])).replace("0x", "")
|
||||
return value.upper()
|
||||
|
||||
@staticmethod
|
||||
def match_serial_port(pattern):
|
||||
for item in list_serial_ports():
|
||||
if fnmatch(item["port"], pattern):
|
||||
return item["port"]
|
||||
return None
|
||||
|
||||
@staticmethod
|
||||
def match_device_hwid(patterns):
|
||||
for item in list_serial_ports(as_objects=True):
|
||||
if not item.vid or not item.pid:
|
||||
continue
|
||||
hwid = "{:04X}:{:04X}".format(item.vid, item.pid)
|
||||
for pattern in patterns:
|
||||
if fnmatch(hwid, pattern):
|
||||
return item
|
||||
return None
|
||||
|
||||
def find(self, initial_port=None):
|
||||
if initial_port:
|
||||
if not is_pattern_port(initial_port):
|
||||
return initial_port
|
||||
return self.match_serial_port(initial_port)
|
||||
|
||||
if self.upload_protocol and self.upload_protocol.startswith("blackmagic"):
|
||||
return self._find_blackmagic_port()
|
||||
|
||||
device = None
|
||||
if self.board_config and self.board_config.get("build.hwids", []):
|
||||
device = self._find_board_device()
|
||||
if not device:
|
||||
device = self._find_known_device()
|
||||
if device:
|
||||
port = self._reveal_device_port(device)
|
||||
|
||||
# pick the best PID:VID USB device
|
||||
best_port = None
|
||||
for item in list_serial_ports():
|
||||
if self.ensure_ready and not is_serial_port_ready(item["port"]):
|
||||
continue
|
||||
port = item["port"]
|
||||
if "VID:PID" in item["hwid"]:
|
||||
best_port = port
|
||||
return best_port or port
|
||||
|
||||
def _reveal_device_port(self, device):
|
||||
candidates = []
|
||||
for item in list_serial_ports(as_objects=True):
|
||||
if item.vid == device.vid and item.pid == device.pid:
|
||||
candidates.append(item)
|
||||
if len(candidates) == 1:
|
||||
return device.device
|
||||
for item in candidates:
|
||||
if ("GDB" if self.prefer_gdb_port else "UART") in item.description:
|
||||
return item.device
|
||||
candidates = sorted(candidates, key=lambda item: item.device)
|
||||
# first port is GDB? BlackMagic, ESP-Prog
|
||||
return candidates[0 if self.prefer_gdb_port else -1].device
|
||||
|
||||
def _find_blackmagic_port(self):
|
||||
device = self.match_device_hwid(BLACK_MAGIC_HWIDS)
|
||||
if not device:
|
||||
return None
|
||||
port = self._reveal_device_port(device)
|
||||
if IS_WINDOWS and port.startswith("COM") and len(port) > 4:
|
||||
return "\\\\.\\%s" % port
|
||||
return port
|
||||
|
||||
def _find_board_device(self):
|
||||
hwids = [
|
||||
self.normalize_board_hwid(hwid)
|
||||
for hwid in self.board_config.get("build.hwids", [])
|
||||
]
|
||||
try:
|
||||
|
||||
@retry(timeout=self.timeout)
|
||||
def wrapper():
|
||||
device = self.match_device_hwid(hwids)
|
||||
if device:
|
||||
return device
|
||||
raise retry.RetryNextException()
|
||||
|
||||
return wrapper()
|
||||
except retry.RetryStopException:
|
||||
pass
|
||||
|
||||
if self.verbose:
|
||||
click.secho(
|
||||
"TimeoutError: Could not automatically find serial port "
|
||||
"for the `%s` board based on the declared HWIDs=%s"
|
||||
% (self.board_config.get("name", "unknown"), hwids),
|
||||
fg="yellow",
|
||||
err=True,
|
||||
)
|
||||
|
||||
return None
|
||||
|
||||
def _find_known_device(self):
|
||||
hwids = list(BLACK_MAGIC_HWIDS)
|
||||
|
||||
# load from UDEV rules
|
||||
udev_rules_path = get_platformio_udev_rules_path()
|
||||
if os.path.isfile(udev_rules_path):
|
||||
hwids.extend(parse_udev_rules_hwids(udev_rules_path))
|
||||
|
||||
# load from installed dev-platforms
|
||||
for platform in PlatformPackageManager().get_installed():
|
||||
p = PlatformFactory.new(platform)
|
||||
for board_config in p.get_boards().values():
|
||||
for board_hwid in board_config.get("build.hwids", []):
|
||||
board_hwid = self.normalize_board_hwid(board_hwid)
|
||||
if board_hwid not in hwids:
|
||||
hwids.append(board_hwid)
|
||||
|
||||
try:
|
||||
|
||||
@retry(timeout=self.timeout)
|
||||
def wrapper():
|
||||
device = self.match_device_hwid(hwids)
|
||||
if device:
|
||||
return device
|
||||
raise retry.RetryNextException()
|
||||
|
||||
return wrapper()
|
||||
except retry.RetryStopException:
|
||||
pass
|
||||
|
||||
if self.verbose:
|
||||
click.secho(
|
||||
"TimeoutError: Could not automatically find serial port "
|
||||
"based on the known UART bridges",
|
||||
fg="yellow",
|
||||
err=True,
|
||||
)
|
||||
|
||||
return None
|
||||
|
@ -18,7 +18,7 @@ import sys
|
||||
import click
|
||||
|
||||
from platformio import exception, fs
|
||||
from platformio.device.finder import find_serial_port
|
||||
from platformio.device.finder import SerialPortFinder
|
||||
from platformio.device.monitor.filters.base import register_filters
|
||||
from platformio.device.monitor.terminal import get_available_filters, start_terminal
|
||||
from platformio.platform.factory import PlatformFactory
|
||||
@ -124,14 +124,13 @@ def device_monitor_cmd(**options):
|
||||
|
||||
options = apply_project_monitor_options(options, project_options)
|
||||
register_filters(platform=platform, options=options)
|
||||
options["port"] = find_serial_port(
|
||||
initial_port=options["port"],
|
||||
options["port"] = SerialPortFinder(
|
||||
board_config=platform.board_config(project_options.get("board"))
|
||||
if platform and project_options.get("board")
|
||||
else None,
|
||||
upload_protocol=project_options.get("upload_protocol"),
|
||||
ensure_ready=True,
|
||||
)
|
||||
).find(initial_port=options["port"])
|
||||
|
||||
if options["menu_char"] == options["exit_char"]:
|
||||
raise exception.UserSideException(
|
||||
|
@ -12,8 +12,6 @@
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
from __future__ import absolute_import
|
||||
|
||||
import os
|
||||
from pathlib import Path
|
||||
|
||||
|
@ -12,8 +12,6 @@
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
from __future__ import absolute_import
|
||||
|
||||
import glob
|
||||
import io
|
||||
import os
|
||||
|
@ -12,8 +12,6 @@
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
from __future__ import absolute_import
|
||||
|
||||
import io
|
||||
import json
|
||||
import os
|
||||
|
@ -12,8 +12,6 @@
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
from __future__ import absolute_import
|
||||
|
||||
import os
|
||||
import shutil
|
||||
import time
|
||||
|
@ -32,9 +32,8 @@ from platformio.package.version import pepver_to_semver
|
||||
from platformio.system.prune import calculate_unnecessary_system_data
|
||||
|
||||
|
||||
def on_platformio_start(ctx, force, caller):
|
||||
def on_platformio_start(ctx, caller):
|
||||
app.set_session_var("command_ctx", ctx)
|
||||
app.set_session_var("force_option", force)
|
||||
set_caller(caller)
|
||||
telemetry.on_command()
|
||||
|
||||
@ -126,7 +125,7 @@ class Upgrader:
|
||||
|
||||
|
||||
def after_upgrade(ctx):
|
||||
terminal_width, _ = shutil.get_terminal_size()
|
||||
terminal_width = shutil.get_terminal_size().columns
|
||||
last_version = app.get_state_item("last_version", "0.0.0")
|
||||
if last_version == __version__:
|
||||
return
|
||||
@ -222,7 +221,7 @@ def check_platformio_upgrade():
|
||||
if pepver_to_semver(latest_version) <= pepver_to_semver(__version__):
|
||||
return
|
||||
|
||||
terminal_width, _ = shutil.get_terminal_size()
|
||||
terminal_width = shutil.get_terminal_size().columns
|
||||
|
||||
click.echo("")
|
||||
click.echo("*" * terminal_width)
|
||||
@ -267,7 +266,7 @@ def check_prune_system():
|
||||
if (unnecessary_size / 1024) < threshold_mb:
|
||||
return
|
||||
|
||||
terminal_width, _ = shutil.get_terminal_size()
|
||||
terminal_width = shutil.get_terminal_size().columns
|
||||
click.echo()
|
||||
click.echo("*" * terminal_width)
|
||||
click.secho(
|
||||
|
@ -13,8 +13,7 @@
|
||||
# limitations under the License.
|
||||
|
||||
import io
|
||||
import math
|
||||
from email.utils import parsedate_tz
|
||||
from email.utils import parsedate
|
||||
from os.path import getsize, join
|
||||
from time import mktime
|
||||
|
||||
@ -22,6 +21,7 @@ import click
|
||||
import requests
|
||||
|
||||
from platformio import __default_requests_timeout__, app, fs
|
||||
from platformio.compat import is_terminal
|
||||
from platformio.package.exception import PackageException
|
||||
|
||||
|
||||
@ -72,22 +72,44 @@ class FileDownloader:
|
||||
|
||||
def start(self, with_progress=True, silent=False):
|
||||
label = "Downloading"
|
||||
file_size = self.get_size()
|
||||
itercontent = self._request.iter_content(chunk_size=io.DEFAULT_BUFFER_SIZE)
|
||||
fp = open(self._destination, "wb") # pylint: disable=consider-using-with
|
||||
try:
|
||||
if not with_progress or self.get_size() == -1:
|
||||
if not silent:
|
||||
click.echo("%s..." % label)
|
||||
for chunk in itercontent:
|
||||
if chunk:
|
||||
with open(self._destination, "wb") as fp:
|
||||
if file_size == -1 or not with_progress or silent:
|
||||
if not silent:
|
||||
click.echo(f"{label}...")
|
||||
for chunk in itercontent:
|
||||
fp.write(chunk)
|
||||
else:
|
||||
chunks = int(math.ceil(self.get_size() / float(io.DEFAULT_BUFFER_SIZE)))
|
||||
with click.progressbar(length=chunks, label=label) as pb:
|
||||
for _ in pb:
|
||||
fp.write(next(itercontent))
|
||||
|
||||
elif not is_terminal():
|
||||
click.echo(f"{label} 0%", nl=False)
|
||||
print_percent_step = 10
|
||||
printed_percents = 0
|
||||
downloaded_size = 0
|
||||
for chunk in itercontent:
|
||||
fp.write(chunk)
|
||||
downloaded_size += len(chunk)
|
||||
if (downloaded_size / file_size * 100) >= (
|
||||
printed_percents + print_percent_step
|
||||
):
|
||||
printed_percents += print_percent_step
|
||||
click.echo(f" {printed_percents}%", nl=False)
|
||||
click.echo("")
|
||||
|
||||
else:
|
||||
with click.progressbar(
|
||||
length=file_size,
|
||||
iterable=itercontent,
|
||||
label=label,
|
||||
update_min_steps=min(
|
||||
256 * 1024, file_size / 100
|
||||
), # every 256Kb or less,
|
||||
) as pb:
|
||||
for chunk in pb:
|
||||
pb.update(len(chunk))
|
||||
fp.write(chunk)
|
||||
finally:
|
||||
fp.close()
|
||||
self._request.close()
|
||||
|
||||
if self.get_lmtime():
|
||||
@ -132,8 +154,7 @@ class FileDownloader:
|
||||
return True
|
||||
|
||||
def _preserve_filemtime(self, lmdate):
|
||||
timedata = parsedate_tz(lmdate)
|
||||
lmtime = mktime(timedata[:9])
|
||||
lmtime = mktime(parsedate(lmdate))
|
||||
fs.change_filemtime(self._destination, lmtime)
|
||||
|
||||
def __del__(self):
|
||||
|
@ -41,7 +41,7 @@ class PackageManagerRegistryMixin:
|
||||
if not package or not version:
|
||||
raise UnknownPackageError(spec.humanize())
|
||||
|
||||
pkgfile = self._pick_compatible_pkg_file(version["files"]) if version else None
|
||||
pkgfile = self.pick_compatible_pkg_file(version["files"]) if version else None
|
||||
if not pkgfile:
|
||||
raise UnknownPackageError(spec.humanize())
|
||||
|
||||
@ -162,7 +162,7 @@ class PackageManagerRegistryMixin:
|
||||
time.sleep(1)
|
||||
return (None, None)
|
||||
|
||||
def filter_incompatible_registry_versions(self, versions, spec=None):
|
||||
def get_compatible_registry_versions(self, versions, spec=None, custom_system=None):
|
||||
assert not spec or isinstance(spec, PackageSpec)
|
||||
result = []
|
||||
for version in versions:
|
||||
@ -170,22 +170,27 @@ class PackageManagerRegistryMixin:
|
||||
if spec and spec.requirements and semver not in spec.requirements:
|
||||
continue
|
||||
if not any(
|
||||
self.is_system_compatible(f.get("system")) for f in version["files"]
|
||||
self.is_system_compatible(f.get("system"), custom_system=custom_system)
|
||||
for f in version["files"]
|
||||
):
|
||||
continue
|
||||
result.append(version)
|
||||
return result
|
||||
|
||||
def pick_best_registry_version(self, versions, spec=None):
|
||||
def pick_best_registry_version(self, versions, spec=None, custom_system=None):
|
||||
best = None
|
||||
for version in self.filter_incompatible_registry_versions(versions, spec):
|
||||
for version in self.get_compatible_registry_versions(
|
||||
versions, spec, custom_system
|
||||
):
|
||||
semver = cast_version_to_semver(version["name"])
|
||||
if not best or (semver > cast_version_to_semver(best["name"])):
|
||||
best = version
|
||||
return best
|
||||
|
||||
def _pick_compatible_pkg_file(self, version_files):
|
||||
def pick_compatible_pkg_file(self, version_files, custom_system=None):
|
||||
for item in version_files:
|
||||
if self.is_system_compatible(item.get("system")):
|
||||
if self.is_system_compatible(
|
||||
item.get("system"), custom_system=custom_system
|
||||
):
|
||||
return item
|
||||
return None
|
||||
|
@ -116,10 +116,10 @@ class BasePackageManager( # pylint: disable=too-many-public-methods,too-many-in
|
||||
self._MEMORY_CACHE.clear()
|
||||
|
||||
@staticmethod
|
||||
def is_system_compatible(value):
|
||||
def is_system_compatible(value, custom_system=None):
|
||||
if not value or "*" in value:
|
||||
return True
|
||||
return util.items_in_list(value, util.get_systype())
|
||||
return util.items_in_list(value, custom_system or util.get_systype())
|
||||
|
||||
@staticmethod
|
||||
def ensure_dir_exists(path):
|
||||
|
@ -276,7 +276,7 @@ class ManifestSchema(BaseSchema):
|
||||
@staticmethod
|
||||
@memoized(expire="1h")
|
||||
def load_spdx_licenses():
|
||||
version = "3.17"
|
||||
version = "3.18"
|
||||
spdx_data_url = (
|
||||
"https://raw.githubusercontent.com/spdx/license-list-data/"
|
||||
"v%s/json/licenses.json" % version
|
||||
|
@ -79,27 +79,32 @@ def project_init_cmd(
|
||||
elif board:
|
||||
update_board_envs(project_dir, board, project_option, env_prefix)
|
||||
|
||||
# resolve project dependencies
|
||||
if not no_install_dependencies and (environment or board):
|
||||
install_project_dependencies(
|
||||
options=dict(
|
||||
project_dir=project_dir,
|
||||
environments=[environment] if environment else [],
|
||||
silent=silent,
|
||||
)
|
||||
)
|
||||
|
||||
if ide:
|
||||
if not silent:
|
||||
click.echo(
|
||||
"Updating metadata for the %s IDE..." % click.style(ide, fg="cyan")
|
||||
)
|
||||
with fs.cd(project_dir):
|
||||
config = ProjectConfig.get_instance(
|
||||
os.path.join(project_dir, "platformio.ini")
|
||||
)
|
||||
with fs.cd(project_dir):
|
||||
generator = None
|
||||
config = ProjectConfig.get_instance(os.path.join(project_dir, "platformio.ini"))
|
||||
if ide:
|
||||
config.validate()
|
||||
ProjectGenerator(config, environment, ide, board).generate()
|
||||
# init generator and pick the best env if user didn't specify
|
||||
generator = ProjectGenerator(config, environment, ide, board)
|
||||
if not environment:
|
||||
environment = generator.env_name
|
||||
|
||||
# resolve project dependencies
|
||||
if not no_install_dependencies and (environment or board):
|
||||
install_project_dependencies(
|
||||
options=dict(
|
||||
project_dir=project_dir,
|
||||
environments=[environment] if environment else [],
|
||||
silent=silent,
|
||||
)
|
||||
)
|
||||
|
||||
if generator:
|
||||
if not silent:
|
||||
click.echo(
|
||||
"Updating metadata for the %s IDE..." % click.style(ide, fg="cyan")
|
||||
)
|
||||
generator.generate()
|
||||
|
||||
if is_new_project:
|
||||
init_cvs_ignore(project_dir)
|
||||
|
@ -3,13 +3,20 @@
|
||||
% import re
|
||||
%
|
||||
% recommendations = set(["platformio.platformio-ide"])
|
||||
% unwantedRecommendations = set(["ms-vscode.cpptools-extension-pack"])
|
||||
% previous_json = os.path.join(project_dir, ".vscode", "extensions.json")
|
||||
% if os.path.isfile(previous_json):
|
||||
% fp = open(previous_json)
|
||||
% contents = re.sub(r"^\s*//.*$", "", fp.read(), flags=re.M).strip()
|
||||
% fp.close()
|
||||
% if contents:
|
||||
% recommendations |= set(json.loads(contents).get("recommendations", []))
|
||||
% try:
|
||||
% data = json.loads(contents)
|
||||
% recommendations |= set(data.get("recommendations", []))
|
||||
% unwantedRecommendations |= set(data.get("unwantedRecommendations", []))
|
||||
% except ValueError:
|
||||
% pass
|
||||
% end
|
||||
% end
|
||||
% end
|
||||
{
|
||||
@ -21,6 +28,8 @@
|
||||
% end
|
||||
],
|
||||
"unwantedRecommendations": [
|
||||
"ms-vscode.cpptools-extension-pack"
|
||||
% for i, item in enumerate(sorted(unwantedRecommendations)):
|
||||
"{{ item }}"{{ ("," if (i + 1) < len(unwantedRecommendations) else "") }}
|
||||
% end
|
||||
]
|
||||
}
|
||||
|
@ -228,7 +228,7 @@ def print_processing_header(env, config, verbose=False):
|
||||
"Processing %s (%s)"
|
||||
% (click.style(env, fg="cyan", bold=True), "; ".join(env_dump))
|
||||
)
|
||||
terminal_width, _ = shutil.get_terminal_size()
|
||||
terminal_width = shutil.get_terminal_size().columns
|
||||
click.secho("-" * terminal_width, bold=True)
|
||||
|
||||
|
||||
|
@ -79,7 +79,7 @@ class EnvironmentProcessor:
|
||||
if "monitor" in build_targets:
|
||||
build_targets.remove("monitor")
|
||||
|
||||
if "clean" not in build_targets:
|
||||
if not set(["clean", "cleanall"]) & set(build_targets):
|
||||
install_project_env_dependencies(
|
||||
self.name,
|
||||
{
|
||||
|
@ -183,7 +183,7 @@ def print_suite_header(test_suite):
|
||||
click.style(test_suite.env_name, fg="cyan", bold=True),
|
||||
)
|
||||
)
|
||||
terminal_width, _ = shutil.get_terminal_size()
|
||||
terminal_width = shutil.get_terminal_size().columns
|
||||
click.secho("-" * terminal_width, bold=True)
|
||||
|
||||
|
||||
|
@ -17,7 +17,7 @@ from time import sleep
|
||||
import click
|
||||
import serial
|
||||
|
||||
from platformio.device.finder import find_serial_port
|
||||
from platformio.device.finder import SerialPortFinder
|
||||
from platformio.exception import UserSideException
|
||||
|
||||
|
||||
@ -66,15 +66,14 @@ class SerialTestOutputReader:
|
||||
project_options = self.test_runner.project_config.items(
|
||||
env=self.test_runner.test_suite.env_name, as_dict=True
|
||||
)
|
||||
port = find_serial_port(
|
||||
initial_port=self.test_runner.get_test_port(),
|
||||
port = SerialPortFinder(
|
||||
board_config=self.test_runner.platform.board_config(
|
||||
project_options["board"]
|
||||
),
|
||||
upload_protocol=project_options.get("upload_protocol"),
|
||||
ensure_ready=True,
|
||||
verbose=self.test_runner.options.verbose,
|
||||
)
|
||||
).find(initial_port=self.test_runner.get_test_port())
|
||||
if port:
|
||||
return port
|
||||
raise UserSideException(
|
||||
|
@ -12,8 +12,6 @@
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
from __future__ import absolute_import
|
||||
|
||||
import functools
|
||||
import math
|
||||
import platform
|
||||
@ -188,7 +186,7 @@ def merge_dicts(d1, d2, path=None):
|
||||
|
||||
|
||||
def print_labeled_bar(label, is_error=False, fg=None, sep="="):
|
||||
terminal_width, _ = shutil.get_terminal_size()
|
||||
terminal_width = shutil.get_terminal_size().columns
|
||||
width = len(click.unstyle(label))
|
||||
half_line = sep * int((terminal_width - width - 2) / 2)
|
||||
click.secho("%s %s %s" % (half_line, label, half_line), fg=fg, err=is_error)
|
||||
@ -207,4 +205,5 @@ def humanize_duration_time(duration):
|
||||
|
||||
|
||||
def strip_ansi_codes(text):
|
||||
return re.sub(r"\x1B\[\d+(;\d+){0,2}m", "", text)
|
||||
# pylint: disable=protected-access
|
||||
return click._compat.strip_ansi(text)
|
||||
|
@ -39,6 +39,8 @@ RST_COPYRIGHT = """.. Copyright (c) 2014-present PlatformIO <contact@platformio
|
||||
limitations under the License.
|
||||
"""
|
||||
|
||||
SKIP_DEBUG_TOOLS = ["esp-bridge", "esp-builtin"]
|
||||
|
||||
DOCS_ROOT_DIR = os.path.realpath(
|
||||
os.path.join(os.path.dirname(os.path.realpath(__file__)), "..", "docs")
|
||||
)
|
||||
@ -904,10 +906,12 @@ You can switch between debugging :ref:`debugging_tools` using
|
||||
)
|
||||
for (tool_name, tool_data) in sorted(board["debug"]["tools"].items()):
|
||||
lines.append(
|
||||
""" * - :ref:`debugging_tool_{name}`
|
||||
""" * - {tool}
|
||||
- {onboard}
|
||||
- {default}""".format(
|
||||
name=tool_name,
|
||||
tool=f"``{tool_name}``"
|
||||
if tool_name in SKIP_DEBUG_TOOLS
|
||||
else f":ref:`debugging_tool_{tool_name}`",
|
||||
onboard="Yes" if tool_data.get("onboard") else "",
|
||||
default="Yes" if tool_name == default_debug_tool else "",
|
||||
)
|
||||
@ -984,7 +988,10 @@ Boards
|
||||
for tool, platforms in tool_to_platforms.items():
|
||||
tool_path = os.path.join(DOCS_ROOT_DIR, "plus", "debug-tools", "%s.rst" % tool)
|
||||
if not os.path.isfile(tool_path):
|
||||
click.secho("Unknown debug tool `%s`" % tool, fg="red")
|
||||
if tool in SKIP_DEBUG_TOOLS:
|
||||
click.secho("Skipped debug tool `%s`" % tool, fg="yellow")
|
||||
else:
|
||||
click.secho("Unknown debug tool `%s`" % tool, fg="red")
|
||||
continue
|
||||
platforms = sorted(set(platforms))
|
||||
|
||||
|
@ -18,6 +18,7 @@ import logging
|
||||
import os
|
||||
import time
|
||||
from pathlib import Path
|
||||
from random import random
|
||||
|
||||
import pytest
|
||||
import semantic_version
|
||||
@ -41,11 +42,11 @@ def test_download(isolated_pio_core):
|
||||
lm.set_log_level(logging.ERROR)
|
||||
archive_path = lm.download(url, checksum)
|
||||
assert fs.calculate_file_hashsum("sha256", archive_path) == checksum
|
||||
lm.cleanup_expired_downloads(time.time())
|
||||
lm.cleanup_expired_downloads(random())
|
||||
assert os.path.isfile(archive_path)
|
||||
# test outdated downloads
|
||||
lm.set_download_utime(archive_path, time.time() - lm.DOWNLOAD_CACHE_EXPIRE - 1)
|
||||
lm.cleanup_expired_downloads(time.time())
|
||||
lm.cleanup_expired_downloads(random())
|
||||
assert not os.path.isfile(archive_path)
|
||||
# check that key is deleted from DB
|
||||
with open(lm.get_download_usagedb_path(), encoding="utf8") as fp:
|
||||
|
Reference in New Issue
Block a user