Merge branch 'develop' into feature/v7

# Conflicts:
#	platformio/__init__.py
#	platformio/commands/upgrade.py
#	platformio/http.py
#	platformio/project/options.py
#	platformio/registry/mirror.py
#	setup.py
This commit is contained in:
Ivan Kravets
2024-01-27 14:11:01 +02:00
63 changed files with 590 additions and 301 deletions

View File

@ -8,12 +8,12 @@ jobs:
fail-fast: false fail-fast: false
matrix: matrix:
os: [ubuntu-20.04, windows-latest, macos-latest] os: [ubuntu-20.04, windows-latest, macos-latest]
python-version: ["3.6", "3.7", "3.8", "3.9", "3.10", "3.11"] python-version: ["3.6", "3.7", "3.11", "3.12"]
runs-on: ${{ matrix.os }} runs-on: ${{ matrix.os }}
steps: steps:
- uses: actions/checkout@v3 - uses: actions/checkout@v4
with: with:
submodules: "recursive" submodules: "recursive"
@ -37,7 +37,7 @@ jobs:
tox -e lint tox -e lint
- name: Integration Tests - name: Integration Tests
if: ${{ matrix.python-version == '3.9' }} if: ${{ matrix.python-version == '3.11' }}
run: | run: |
tox -e testcore tox -e testcore

View File

@ -12,14 +12,14 @@ jobs:
environment: production environment: production
steps: steps:
- uses: actions/checkout@v3 - uses: actions/checkout@v4
with: with:
submodules: "recursive" submodules: "recursive"
- name: Set up Python - name: Set up Python
uses: actions/setup-python@v4 uses: actions/setup-python@v4
with: with:
python-version: "3.9" python-version: "3.11"
- name: Install dependencies - name: Install dependencies
run: | run: |
@ -35,7 +35,8 @@ jobs:
tox -e testcore tox -e testcore
- name: Build Python source tarball - name: Build Python source tarball
run: python setup.py sdist bdist_wheel # run: python setup.py sdist bdist_wheel
run: python setup.py sdist
- name: Publish package to PyPI - name: Publish package to PyPI
if: ${{ github.ref == 'refs/heads/master' }} if: ${{ github.ref == 'refs/heads/master' }}

View File

@ -7,13 +7,13 @@ jobs:
name: Build Docs name: Build Docs
runs-on: ubuntu-latest runs-on: ubuntu-latest
steps: steps:
- uses: actions/checkout@v3 - uses: actions/checkout@v4
with: with:
submodules: "recursive" submodules: "recursive"
- name: Set up Python - name: Set up Python
uses: actions/setup-python@v4 uses: actions/setup-python@v4
with: with:
python-version: 3.9 python-version: "3.11"
- name: Install dependencies - name: Install dependencies
run: | run: |
python -m pip install --upgrade pip python -m pip install --upgrade pip
@ -78,7 +78,7 @@ jobs:
fi fi
- name: Checkout latest Docs - name: Checkout latest Docs
continue-on-error: true continue-on-error: true
uses: actions/checkout@v3 uses: actions/checkout@v4
with: with:
repository: ${{ env.DOCS_REPO }} repository: ${{ env.DOCS_REPO }}
path: ${{ env.DOCS_DIR }} path: ${{ env.DOCS_DIR }}

View File

@ -15,14 +15,14 @@ jobs:
PIO_INSTALL_DEVPLATFORM_NAMES: "aceinna_imu,atmelavr,atmelmegaavr,atmelsam,espressif32,espressif8266,nordicnrf52,raspberrypi,ststm32,teensy" PIO_INSTALL_DEVPLATFORM_NAMES: "aceinna_imu,atmelavr,atmelmegaavr,atmelsam,espressif32,espressif8266,nordicnrf52,raspberrypi,ststm32,teensy"
steps: steps:
- uses: actions/checkout@v3 - uses: actions/checkout@v4
with: with:
submodules: "recursive" submodules: "recursive"
- name: Set up Python - name: Set up Python
uses: actions/setup-python@v4 uses: actions/setup-python@v4
with: with:
python-version: "3.9" python-version: "3.11"
- name: Install dependencies - name: Install dependencies
run: | run: |

View File

@ -40,20 +40,20 @@ jobs:
runs-on: ${{ matrix.os }} runs-on: ${{ matrix.os }}
steps: steps:
- uses: actions/checkout@v3 - uses: actions/checkout@v4
with: with:
submodules: "recursive" submodules: "recursive"
- name: Set up Python ${{ matrix.python-version }} - name: Set up Python ${{ matrix.python-version }}
uses: actions/setup-python@v4 uses: actions/setup-python@v4
with: with:
python-version: 3.9 python-version: 3.11
- name: Install PlatformIO - name: Install PlatformIO
run: pip install -U . run: pip install -U .
- name: Check out ${{ matrix.project.repository }} - name: Check out ${{ matrix.project.repository }}
uses: actions/checkout@v3 uses: actions/checkout@v4
with: with:
submodules: "recursive" submodules: "recursive"
repository: ${{ matrix.project.repository }} repository: ${{ matrix.project.repository }}

View File

@ -17,7 +17,38 @@ Unlock the true potential of embedded software development with
PlatformIO's collaborative ecosystem, embracing declarative principles, PlatformIO's collaborative ecosystem, embracing declarative principles,
test-driven methodologies, and modern toolchains for unrivaled success. test-driven methodologies, and modern toolchains for unrivaled success.
6.1.11 (2023-??-??) 6.1.14 (2024-??-??)
~~~~~~~~~~~~~~~~~~~
* Broadened version support for the ``pyelftools`` dependency, enabling compatibility with lower versions and facilitating integration with a wider range of third-party tools (`issue #4834 <https://github.com/platformio/platformio-core/issues/4834>`_)
* Resolved an issue related to the relative package path in the `pio pkg publish <https://docs.platformio.org/en/latest/core/userguide/pkg/cmd_publish.html>`__ command
6.1.13 (2024-01-12)
~~~~~~~~~~~~~~~~~~~
* Expanded support for SCons variables declared in the legacy format ``${SCONS_VARNAME}`` (`issue #4828 <https://github.com/platformio/platformio-core/issues/4828>`_)
6.1.12 (2024-01-10)
~~~~~~~~~~~~~~~~~~~
* Added support for Python 3.12
* Introduced the capability to launch the debug server in a separate process (`issue #4722 <https://github.com/platformio/platformio-core/issues/4722>`_)
* Introduced a warning during the verification of MCU maximum RAM usage, signaling when the allocated RAM surpasses 100% (`issue #4791 <https://github.com/platformio/platformio-core/issues/4791>`_)
* Drastically enhanced the speed of project building when operating in verbose mode (`issue #4783 <https://github.com/platformio/platformio-core/issues/4783>`_)
* Upgraded the build engine to the latest version of SCons (4.6.0) to improve build performance, reliability, and compatibility with other tools and systems (`release notes <https://github.com/SCons/scons/releases/tag/4.6.0>`__)
* Enhanced the handling of built-in variables in |PIOCONF| during |INTERPOLATION| (`issue #4695 <https://github.com/platformio/platformio-core/issues/4695>`_)
* Enhanced PIP dependency declarations for improved reliability and extended support to include Python 3.6 (`issue #4819 <https://github.com/platformio/platformio-core/issues/4819>`_)
* Implemented automatic installation of missing dependencies when utilizing a SOCKS proxy (`issue #4822 <https://github.com/platformio/platformio-core/issues/4822>`_)
* Implemented a fail-safe mechanism to terminate a debugging session if an unknown CLI option is passed (`issue #4699 <https://github.com/platformio/platformio-core/issues/4699>`_)
* Rectified an issue where ``${platformio.name}`` erroneously represented ``None`` as the default `project name <https://docs.platformio.org/en/latest/projectconf/sections/platformio/options/generic/name.html>`__ (`issue #4717 <https://github.com/platformio/platformio-core/issues/4717>`_)
* Resolved an issue where the ``COMPILATIONDB_INCLUDE_TOOLCHAIN`` setting was not correctly applying to private libraries (`issue #4762 <https://github.com/platformio/platformio-core/issues/4762>`_)
* Resolved an issue where ``get_systype()`` inaccurately returned the architecture when executed within a Docker container on a 64-bit kernel with a 32-bit userspace (`issue #4777 <https://github.com/platformio/platformio-core/issues/4777>`_)
* Resolved an issue with incorrect handling of the ``check_src_filters`` option when used in multiple environments (`issue #4788 <https://github.com/platformio/platformio-core/issues/4788>`_)
* Resolved an issue where running `pio project metadata <https://docs.platformio.org/en/latest/core/userguide/project/cmd_metadata.html>`__ resulted in duplicated "include" entries (`issue #4723 <https://github.com/platformio/platformio-core/issues/4723>`_)
* Resolved an issue where native debugging failed on the host machine (`issue #4745 <https://github.com/platformio/platformio-core/issues/4745>`_)
* Resolved an issue where custom debug configurations were being inadvertently overwritten in VSCode's ``launch.json`` (`issue #4810 <https://github.com/platformio/platformio-core/issues/4810>`_)
6.1.11 (2023-08-31)
~~~~~~~~~~~~~~~~~~~ ~~~~~~~~~~~~~~~~~~~
* Resolved a possible issue that may cause generated projects for `PlatformIO IDE for VSCode <https://docs.platformio.org/en/latest/integration/ide/vscode.html>`__ to fail to launch a debug session because of a missing "objdump" binary when GDB is not part of the toolchain package * Resolved a possible issue that may cause generated projects for `PlatformIO IDE for VSCode <https://docs.platformio.org/en/latest/integration/ide/vscode.html>`__ to fail to launch a debug session because of a missing "objdump" binary when GDB is not part of the toolchain package

View File

@ -12,7 +12,7 @@
# See the License for the specific language governing permissions and # See the License for the specific language governing permissions and
# limitations under the License. # limitations under the License.
VERSION = (6, 1, "11a2") VERSION = (6, 1, "14a1")
__version__ = ".".join([str(s) for s in VERSION]) __version__ = ".".join([str(s) for s in VERSION])
__title__ = "platformio" __title__ = "platformio"
@ -41,7 +41,7 @@ __pioremote_endpoint__ = "ssl:host=remote.platformio.org:port=4413"
__core_packages__ = { __core_packages__ = {
"contrib-piohome": "~3.4.2", "contrib-piohome": "~3.4.2",
"contrib-pioremote": "~1.0.0", "contrib-pioremote": "~1.0.0",
"tool-scons": "~4.40502.0", "tool-scons": "~4.40600.0",
"tool-cppcheck": "~1.21100.0", "tool-cppcheck": "~1.21100.0",
"tool-clangtidy": "~1.150005.0", "tool-clangtidy": "~1.150005.0",
"tool-pvs-studio": "~7.18.0", "tool-pvs-studio": "~7.18.0",
@ -52,22 +52,3 @@ __check_internet_hosts__ = [
"88.198.170.159", # platformio.org "88.198.170.159", # platformio.org
"github.com", "github.com",
] + __registry_mirror_hosts__ ] + __registry_mirror_hosts__
__install_requires__ = [
# Core requirements
"bottle == 0.12.*",
"click >=8.0.4, <=8.2",
"colorama",
"httpx >=0.22.0, <0.25",
"marshmallow == 3.*",
"pyelftools == 0.29",
"pyserial == 3.5.*", # keep in sync "device/monitor/terminal.py"
"semantic_version == 2.10.*",
"tabulate == 0.*",
] + [
# PIO Home requirements
"ajsonrpc == 1.2.*",
"starlette >=0.19, <0.32",
"uvicorn >=0.16, <0.24",
"wsproto == 1.*",
]

View File

@ -51,11 +51,13 @@ def team_list_cmd(orgname, json_output):
table_data.append( table_data.append(
( (
"Members:", "Members:",
", ".join( (
(member.get("username") for member in team.get("members")) ", ".join(
) (member.get("username") for member in team.get("members"))
if team.get("members") )
else "-", if team.get("members")
else "-"
),
) )
) )
click.echo(tabulate(table_data, tablefmt="plain")) click.echo(tabulate(table_data, tablefmt="plain"))

View File

@ -171,3 +171,6 @@ ATTRS{product}=="*CMSIS-DAP*", MODE="0666", ENV{ID_MM_DEVICE_IGNORE}="1", ENV{ID
# Atmel AVR Dragon # Atmel AVR Dragon
ATTRS{idVendor}=="03eb", ATTRS{idProduct}=="2107", MODE="0666", ENV{ID_MM_DEVICE_IGNORE}="1", ENV{ID_MM_PORT_IGNORE}="1" ATTRS{idVendor}=="03eb", ATTRS{idProduct}=="2107", MODE="0666", ENV{ID_MM_DEVICE_IGNORE}="1", ENV{ID_MM_PORT_IGNORE}="1"
# Espressif USB JTAG/serial debug unit
ATTRS{idVendor}=="303a", ATTR{idProduct}=="1001", MODE="0666", ENV{ID_MM_DEVICE_IGNORE}="1", ENV{ID_MM_PORT_IGNORE}="1"

View File

@ -117,6 +117,10 @@ def ProcessProgramDeps(env):
# remove specified flags # remove specified flags
env.ProcessUnFlags(env.get("BUILD_UNFLAGS")) env.ProcessUnFlags(env.get("BUILD_UNFLAGS"))
env.ProcessCompileDbToolchainOption()
def ProcessCompileDbToolchainOption(env):
if "compiledb" in COMMAND_LINE_TARGETS: if "compiledb" in COMMAND_LINE_TARGETS:
# Resolve absolute path of toolchain # Resolve absolute path of toolchain
for cmd in ("CC", "CXX", "AS"): for cmd in ("CC", "CXX", "AS"):
@ -129,6 +133,7 @@ def ProcessProgramDeps(env):
) )
if env.get("COMPILATIONDB_INCLUDE_TOOLCHAIN"): if env.get("COMPILATIONDB_INCLUDE_TOOLCHAIN"):
print("Warning! `COMPILATIONDB_INCLUDE_TOOLCHAIN` is scoping")
for scope, includes in env.DumpIntegrationIncludes().items(): for scope, includes in env.DumpIntegrationIncludes().items():
if scope in ("toolchain",): if scope in ("toolchain",):
env.Append(CPPPATH=includes) env.Append(CPPPATH=includes)
@ -370,6 +375,7 @@ def generate(env):
env.AddMethod(GetBuildType) env.AddMethod(GetBuildType)
env.AddMethod(BuildProgram) env.AddMethod(BuildProgram)
env.AddMethod(ProcessProgramDeps) env.AddMethod(ProcessProgramDeps)
env.AddMethod(ProcessCompileDbToolchainOption)
env.AddMethod(ProcessProjectDeps) env.AddMethod(ProcessProjectDeps)
env.AddMethod(ParseFlagsExtended) env.AddMethod(ParseFlagsExtended)
env.AddMethod(ProcessFlags) env.AddMethod(ProcessFlags)

View File

@ -29,12 +29,7 @@ def IsIntegrationDump(_):
def DumpIntegrationIncludes(env): def DumpIntegrationIncludes(env):
result = dict(build=[], compatlib=[], toolchain=[]) result = dict(build=[], compatlib=[], toolchain=[])
result["build"].extend( # `env`(project) CPPPATH
[
env.subst("$PROJECT_INCLUDE_DIR"),
env.subst("$PROJECT_SRC_DIR"),
]
)
result["build"].extend( result["build"].extend(
[os.path.abspath(env.subst(item)) for item in env.get("CPPPATH", [])] [os.path.abspath(env.subst(item)) for item in env.get("CPPPATH", [])]
) )

View File

@ -477,6 +477,7 @@ class LibBuilderBase:
self.is_built = True self.is_built = True
self.env.PrependUnique(CPPPATH=self.get_include_dirs()) self.env.PrependUnique(CPPPATH=self.get_include_dirs())
self.env.ProcessCompileDbToolchainOption()
if self.lib_ldf_mode == "off": if self.lib_ldf_mode == "off":
for lb in self.env.GetLibBuilders(): for lb in self.env.GetLibBuilders():
@ -791,7 +792,9 @@ class PlatformIOLibBuilder(LibBuilderBase):
include_dirs.append(os.path.join(self.path, "utility")) include_dirs.append(os.path.join(self.path, "utility"))
for path in self.env.get("CPPPATH", []): for path in self.env.get("CPPPATH", []):
if path not in self.envorigin.get("CPPPATH", []): if path not in include_dirs and path not in self.envorigin.get(
"CPPPATH", []
):
include_dirs.append(self.env.subst(path)) include_dirs.append(self.env.subst(path))
return include_dirs return include_dirs

View File

@ -75,9 +75,11 @@ def LoadPioPlatform(env):
continue continue
env.PrependENVPath( env.PrependENVPath(
"PATH", "PATH",
os.path.join(pkg.path, "bin") (
if os.path.isdir(os.path.join(pkg.path, "bin")) os.path.join(pkg.path, "bin")
else pkg.path, if os.path.isdir(os.path.join(pkg.path, "bin"))
else pkg.path
),
) )
if ( if (
not IS_WINDOWS not IS_WINDOWS

View File

@ -218,12 +218,11 @@ def CheckUploadSize(_, target, source, env):
if int(ARGUMENTS.get("PIOVERBOSE", 0)): if int(ARGUMENTS.get("PIOVERBOSE", 0)):
print(output) print(output)
# raise error if data_max_size and data_size > data_max_size:
# if data_max_size and data_size > data_max_size: sys.stderr.write(
# sys.stderr.write( "Warning! The data size (%d bytes) is greater "
# "Error: The data size (%d bytes) is greater " "than maximum allowed (%s bytes)\n" % (data_size, data_max_size)
# "than maximum allowed (%s bytes)\n" % (data_size, data_max_size)) )
# env.Exit(1)
if program_size > program_max_size: if program_size > program_max_size:
sys.stderr.write( sys.stderr.write(
"Error: The program size (%d bytes) is greater " "Error: The program size (%d bytes) is greater "

View File

@ -108,7 +108,7 @@ def cli(
"+<%s>" % os.path.basename(config.get("platformio", "include_dir")), "+<%s>" % os.path.basename(config.get("platformio", "include_dir")),
] ]
src_filters = ( env_src_filters = (
src_filters src_filters
or pattern or pattern
or env_options.get( or env_options.get(
@ -120,11 +120,13 @@ def cli(
tool_options = dict( tool_options = dict(
verbose=verbose, verbose=verbose,
silent=silent, silent=silent,
src_filters=src_filters, src_filters=env_src_filters,
flags=flags or env_options.get("check_flags"), flags=flags or env_options.get("check_flags"),
severity=[DefectItem.SEVERITY_LABELS[DefectItem.SEVERITY_HIGH]] severity=(
if silent [DefectItem.SEVERITY_LABELS[DefectItem.SEVERITY_HIGH]]
else severity or config.get("env:" + envname, "check_severity"), if silent
else severity or config.get("env:" + envname, "check_severity")
),
skip_packages=skip_packages or env_options.get("check_skip_packages"), skip_packages=skip_packages or env_options.get("check_skip_packages"),
platform_packages=env_options.get("platform_packages"), platform_packages=env_options.get("platform_packages"),
) )
@ -142,9 +144,11 @@ def cli(
result = {"env": envname, "tool": tool, "duration": time()} result = {"env": envname, "tool": tool, "duration": time()}
rc = ct.check( rc = ct.check(
on_defect_callback=None on_defect_callback=(
if (json_output or verbose) None
else lambda defect: click.echo(repr(defect)) if (json_output or verbose)
else lambda defect: click.echo(repr(defect))
)
) )
result["defects"] = ct.get_defects() result["defects"] = ct.get_defects()

View File

@ -18,9 +18,10 @@ import subprocess
import click import click
from platformio import VERSION, __install_requires__, __version__, app, exception from platformio import VERSION, __version__, app, exception
from platformio.http import fetch_http_content from platformio.http import fetch_http_content
from platformio.package.manager.core import update_core_packages from platformio.package.manager.core import update_core_packages
from platformio.pipdeps import get_pip_dependencies
from platformio.proc import get_pythonexe_path from platformio.proc import get_pythonexe_path
PYPI_JSON_URL = "https://pypi.org/pypi/platformio/json" PYPI_JSON_URL = "https://pypi.org/pypi/platformio/json"
@ -37,7 +38,7 @@ DEVELOP_INIT_SCRIPT_URL = (
@click.option("--verbose", "-v", is_flag=True) @click.option("--verbose", "-v", is_flag=True)
def cli(dev, only_dependencies, verbose): def cli(dev, only_dependencies, verbose):
if only_dependencies: if only_dependencies:
return upgrade_pypi_dependencies(verbose) return upgrade_pip_dependencies(verbose)
update_core_packages() update_core_packages()
@ -102,7 +103,7 @@ def cli(dev, only_dependencies, verbose):
return True return True
def upgrade_pypi_dependencies(verbose): def upgrade_pip_dependencies(verbose):
subprocess.run( subprocess.run(
[ [
get_pythonexe_path(), get_pythonexe_path(),
@ -111,7 +112,7 @@ def upgrade_pypi_dependencies(verbose):
"install", "install",
"--upgrade", "--upgrade",
"pip", "pip",
*__install_requires__, *get_pip_dependencies(),
], ],
check=True, check=True,
stdout=subprocess.PIPE if not verbose else None, stdout=subprocess.PIPE if not verbose else None,

View File

@ -55,7 +55,7 @@ from platformio.project.options import ProjectOptions
@click.option("--load-mode", type=ProjectOptions["env.debug_load_mode"].type) @click.option("--load-mode", type=ProjectOptions["env.debug_load_mode"].type)
@click.option("--verbose", "-v", is_flag=True) @click.option("--verbose", "-v", is_flag=True)
@click.option("--interface", type=click.Choice(["gdb"])) @click.option("--interface", type=click.Choice(["gdb"]))
@click.argument("__unprocessed", nargs=-1, type=click.UNPROCESSED) @click.argument("client_extra_args", nargs=-1, type=click.UNPROCESSED)
@click.pass_context @click.pass_context
def cli( def cli(
ctx, ctx,
@ -65,10 +65,13 @@ def cli(
load_mode, load_mode,
verbose, verbose,
interface, interface,
__unprocessed, client_extra_args,
): ):
app.set_session_var("custom_project_conf", project_conf) app.set_session_var("custom_project_conf", project_conf)
if not interface and client_extra_args:
raise click.UsageError("Please specify debugging interface")
# use env variables from Eclipse or CLion # use env variables from Eclipse or CLion
for name in ("CWD", "PWD", "PLATFORMIO_PROJECT_DIR"): for name in ("CWD", "PWD", "PLATFORMIO_PROJECT_DIR"):
if is_platformio_project(project_dir): if is_platformio_project(project_dir):
@ -92,7 +95,7 @@ def cli(
env_name, env_name,
load_mode, load_mode,
verbose, verbose,
__unprocessed, client_extra_args,
) )
if helpers.is_gdbmi_mode(): if helpers.is_gdbmi_mode():
os.environ["PLATFORMIO_DISABLE_PROGRESSBAR"] = "true" os.environ["PLATFORMIO_DISABLE_PROGRESSBAR"] = "true"
@ -103,19 +106,19 @@ def cli(
else: else:
debug_config = _configure(*configure_args) debug_config = _configure(*configure_args)
_run(project_dir, debug_config, __unprocessed) _run(project_dir, debug_config, client_extra_args)
return None return None
def _configure(ctx, project_config, env_name, load_mode, verbose, __unprocessed): def _configure(ctx, project_config, env_name, load_mode, verbose, client_extra_args):
platform = PlatformFactory.from_env(env_name, autoinstall=True) platform = PlatformFactory.from_env(env_name, autoinstall=True)
debug_config = DebugConfigFactory.new( debug_config = DebugConfigFactory.new(
platform, platform,
project_config, project_config,
env_name, env_name,
) )
if "--version" in __unprocessed: if "--version" in client_extra_args:
raise ReturnErrorCode( raise ReturnErrorCode(
subprocess.run( subprocess.run(
[debug_config.client_executable_path, "--version"], check=True [debug_config.client_executable_path, "--version"], check=True
@ -161,12 +164,12 @@ def _configure(ctx, project_config, env_name, load_mode, verbose, __unprocessed)
return debug_config return debug_config
def _run(project_dir, debug_config, __unprocessed): def _run(project_dir, debug_config, client_extra_args):
loop = asyncio.ProactorEventLoop() if IS_WINDOWS else asyncio.get_event_loop() loop = asyncio.ProactorEventLoop() if IS_WINDOWS else asyncio.get_event_loop()
asyncio.set_event_loop(loop) asyncio.set_event_loop(loop)
client = GDBClientProcess(project_dir, debug_config) client = GDBClientProcess(project_dir, debug_config)
coro = client.run(__unprocessed) coro = client.run(client_extra_args)
try: try:
signal.signal(signal.SIGINT, signal.SIG_IGN) signal.signal(signal.SIGINT, signal.SIG_IGN)
loop.run_until_complete(coro) loop.run_until_complete(coro)

View File

@ -24,7 +24,9 @@ from platformio.project.options import ProjectOptions
class DebugConfigBase: # pylint: disable=too-many-instance-attributes class DebugConfigBase: # pylint: disable=too-many-instance-attributes
def __init__(self, platform, project_config, env_name, port=None): DEFAULT_PORT = None
def __init__(self, platform, project_config, env_name):
self.platform = platform self.platform = platform
self.project_config = project_config self.project_config = project_config
self.env_name = env_name self.env_name = env_name
@ -48,7 +50,6 @@ class DebugConfigBase: # pylint: disable=too-many-instance-attributes
self._load_cmds = None self._load_cmds = None
self._port = None self._port = None
self.port = port
self.server = self._configure_server() self.server = self._configure_server()
try: try:
@ -120,8 +121,10 @@ class DebugConfigBase: # pylint: disable=too-many-instance-attributes
@property @property
def port(self): def port(self):
return ( return (
self.env_options.get("debug_port", self.tool_settings.get("port")) self._port
or self._port or self.env_options.get("debug_port")
or self.tool_settings.get("port")
or self.DEFAULT_PORT
) )
@port.setter @port.setter
@ -197,9 +200,11 @@ class DebugConfigBase: # pylint: disable=too-many-instance-attributes
cwd=server_package_dir if server_package else None, cwd=server_package_dir if server_package else None,
executable=result.get("executable"), executable=result.get("executable"),
arguments=[ arguments=[
a.replace("$PACKAGE_DIR", server_package_dir) (
if server_package_dir a.replace("$PACKAGE_DIR", server_package_dir)
else a if server_package_dir
else a
)
for a in result.get("arguments", []) for a in result.get("arguments", [])
], ],
) )

View File

@ -27,17 +27,13 @@ class DebugConfigFactory:
@classmethod @classmethod
def new(cls, platform, project_config, env_name): def new(cls, platform, project_config, env_name):
board_config = platform.board_config( board_id = project_config.get("env:" + env_name, "board")
project_config.get("env:" + env_name, "board")
)
tool_name = (
board_config.get_debug_tool_name(
project_config.get("env:" + env_name, "debug_tool")
)
if board_config
else None
)
config_cls = None config_cls = None
tool_name = None
if board_id:
tool_name = platform.board_config(
project_config.get("env:" + env_name, "board")
).get_debug_tool_name(project_config.get("env:" + env_name, "debug_tool"))
try: try:
mod = importlib.import_module("platformio.debug.config.%s" % tool_name) mod = importlib.import_module("platformio.debug.config.%s" % tool_name)
config_cls = getattr(mod, cls.get_clsname(tool_name)) config_cls = getattr(mod, cls.get_clsname(tool_name))

View File

@ -16,6 +16,7 @@ from platformio.debug.config.base import DebugConfigBase
class GenericDebugConfig(DebugConfigBase): class GenericDebugConfig(DebugConfigBase):
DEFAULT_PORT = ":3333"
GDB_INIT_SCRIPT = """ GDB_INIT_SCRIPT = """
define pio_reset_halt_target define pio_reset_halt_target
monitor reset halt monitor reset halt
@ -31,8 +32,3 @@ $LOAD_CMDS
pio_reset_halt_target pio_reset_halt_target
$INIT_BREAK $INIT_BREAK
""" """
def __init__(self, *args, **kwargs):
if "port" not in kwargs:
kwargs["port"] = ":3333"
super().__init__(*args, **kwargs)

View File

@ -16,6 +16,7 @@ from platformio.debug.config.base import DebugConfigBase
class JlinkDebugConfig(DebugConfigBase): class JlinkDebugConfig(DebugConfigBase):
DEFAULT_PORT = ":2331"
GDB_INIT_SCRIPT = """ GDB_INIT_SCRIPT = """
define pio_reset_halt_target define pio_reset_halt_target
monitor reset monitor reset
@ -36,11 +37,6 @@ $LOAD_CMDS
$INIT_BREAK $INIT_BREAK
""" """
def __init__(self, *args, **kwargs):
if "port" not in kwargs:
kwargs["port"] = ":2331"
super().__init__(*args, **kwargs)
@property @property
def server_ready_pattern(self): def server_ready_pattern(self):
return super().server_ready_pattern or ("Waiting for GDB connection") return super().server_ready_pattern or ("Waiting for GDB connection")

View File

@ -16,6 +16,7 @@ from platformio.debug.config.base import DebugConfigBase
class MspdebugDebugConfig(DebugConfigBase): class MspdebugDebugConfig(DebugConfigBase):
DEFAULT_PORT = ":2000"
GDB_INIT_SCRIPT = """ GDB_INIT_SCRIPT = """
define pio_reset_halt_target define pio_reset_halt_target
end end
@ -29,8 +30,3 @@ $LOAD_CMDS
pio_reset_halt_target pio_reset_halt_target
$INIT_BREAK $INIT_BREAK
""" """
def __init__(self, *args, **kwargs):
if "port" not in kwargs:
kwargs["port"] = ":2000"
super().__init__(*args, **kwargs)

View File

@ -16,6 +16,7 @@ from platformio.debug.config.base import DebugConfigBase
class QemuDebugConfig(DebugConfigBase): class QemuDebugConfig(DebugConfigBase):
DEFAULT_PORT = ":1234"
GDB_INIT_SCRIPT = """ GDB_INIT_SCRIPT = """
define pio_reset_halt_target define pio_reset_halt_target
monitor system_reset monitor system_reset
@ -30,8 +31,3 @@ $LOAD_CMDS
pio_reset_halt_target pio_reset_halt_target
$INIT_BREAK $INIT_BREAK
""" """
def __init__(self, *args, **kwargs):
if "port" not in kwargs:
kwargs["port"] = ":1234"
super().__init__(*args, **kwargs)

View File

@ -16,6 +16,7 @@ from platformio.debug.config.base import DebugConfigBase
class RenodeDebugConfig(DebugConfigBase): class RenodeDebugConfig(DebugConfigBase):
DEFAULT_PORT = ":3333"
GDB_INIT_SCRIPT = """ GDB_INIT_SCRIPT = """
define pio_reset_halt_target define pio_reset_halt_target
monitor machine Reset monitor machine Reset
@ -33,11 +34,6 @@ $INIT_BREAK
monitor start monitor start
""" """
def __init__(self, *args, **kwargs):
if "port" not in kwargs:
kwargs["port"] = ":3333"
super().__init__(*args, **kwargs)
@property @property
def server_ready_pattern(self): def server_ready_pattern(self):
return super().server_ready_pattern or ( return super().server_ready_pattern or (

View File

@ -62,7 +62,9 @@ class DebugServerProcess(DebugBaseProcess):
openocd_pipe_allowed = all( openocd_pipe_allowed = all(
[ [
not self.debug_config.env_options.get("debug_port"), not self.debug_config.env_options.get(
"debug_port", self.debug_config.tool_settings.get("port")
),
"gdb" in self.debug_config.client_executable_path, "gdb" in self.debug_config.client_executable_path,
"openocd" in server_executable, "openocd" in server_executable,
] ]

View File

@ -144,9 +144,9 @@ def list_mdns_services():
if service.properties: if service.properties:
try: try:
properties = { properties = {
k.decode("utf8"): v.decode("utf8") k.decode("utf8"): (
if isinstance(v, bytes) v.decode("utf8") if isinstance(v, bytes) else v
else v )
for k, v in service.properties.items() for k, v in service.properties.items()
} }
json.dumps(properties) json.dumps(properties)

View File

@ -125,9 +125,11 @@ def device_monitor_cmd(**options):
options = apply_project_monitor_options(options, project_options) options = apply_project_monitor_options(options, project_options)
register_filters(platform=platform, options=options) register_filters(platform=platform, options=options)
options["port"] = SerialPortFinder( options["port"] = SerialPortFinder(
board_config=platform.board_config(project_options.get("board")) board_config=(
if platform and project_options.get("board") platform.board_config(project_options.get("board"))
else None, if platform and project_options.get("board")
else None
),
upload_protocol=project_options.get("upload_protocol"), upload_protocol=project_options.get("upload_protocol"),
ensure_ready=True, ensure_ready=True,
).find(initial_port=options["port"]) ).find(initial_port=options["port"])

View File

@ -211,7 +211,7 @@ def change_filemtime(path, mtime):
def rmtree(path): def rmtree(path):
def _onerror(func, path, __): def _onexc(func, path, _):
try: try:
st_mode = os.stat(path).st_mode st_mode = os.stat(path).st_mode
if st_mode & stat.S_IREAD: if st_mode & stat.S_IREAD:
@ -224,4 +224,7 @@ def rmtree(path):
err=True, err=True,
) )
return shutil.rmtree(path, onerror=_onerror) # pylint: disable=unexpected-keyword-arg, deprecated-argument
if sys.version_info < (3, 12):
return shutil.rmtree(path, onerror=_onexc)
return shutil.rmtree(path, onexc=_onexc)

View File

@ -63,9 +63,9 @@ class MemUsageRPC(BaseRPCHandler):
device=current_report["device"], device=current_report["device"],
trend=dict( trend=dict(
current=current_report["memory"]["total"], current=current_report["memory"]["total"],
previous=previous_report["memory"]["total"] previous=(
if previous_report previous_report["memory"]["total"] if previous_report else None
else None, ),
), ),
top=dict( top=dict(
files=self._calculate_top_files(current_report["memory"]["files"])[ files=self._calculate_top_files(current_report["memory"]["files"])[

View File

@ -140,15 +140,19 @@ class ProjectRPC(BaseRPCHandler):
return dict( return dict(
platform=dict( platform=dict(
ownername=platform_pkg.metadata.spec.owner ownername=(
if platform_pkg.metadata.spec platform_pkg.metadata.spec.owner
else None, if platform_pkg.metadata.spec
else None
),
name=platform.name, name=platform.name,
title=platform.title, title=platform.title,
version=str(platform_pkg.metadata.version), version=str(platform_pkg.metadata.version),
), ),
board=platform.board_config(board_id).get_brief_data() board=(
if board_id platform.board_config(board_id).get_brief_data()
else None, if board_id
else None
),
frameworks=frameworks or None, frameworks=frameworks or None,
) )

View File

@ -15,7 +15,6 @@
import contextlib import contextlib
import itertools import itertools
import json import json
import os
import socket import socket
import time import time
@ -24,6 +23,7 @@ import httpx
from platformio import __check_internet_hosts__, app, util from platformio import __check_internet_hosts__, app, util
from platformio.cache import ContentCache, cleanup_content_cache from platformio.cache import ContentCache, cleanup_content_cache
from platformio.exception import PlatformioException, UserSideException from platformio.exception import PlatformioException, UserSideException
from platformio.pipdeps import is_proxy_set
RETRIES_BACKOFF_FACTOR = 2 # 0s, 2s, 4s, 8s, etc. RETRIES_BACKOFF_FACTOR = 2 # 0s, 2s, 4s, 8s, etc.
RETRIES_METHOD_WHITELIST = ["GET"] RETRIES_METHOD_WHITELIST = ["GET"]
@ -245,9 +245,7 @@ def _internet_on():
socket.setdefaulttimeout(timeout) socket.setdefaulttimeout(timeout)
for host in __check_internet_hosts__: for host in __check_internet_hosts__:
try: try:
for var in ("HTTP_PROXY", "HTTPS_PROXY", "ALL_PROXY"): if is_proxy_set():
if not os.getenv(var) and not os.getenv(var.lower()):
continue
httpx.get("http://%s" % host, follow_redirects=False, timeout=timeout) httpx.get("http://%s" % host, follow_redirects=False, timeout=timeout)
return True return True
# try to resolve `host` for both AF_INET and AF_INET6, and then try to connect # try to resolve `host` for both AF_INET and AF_INET6, and then try to connect

View File

@ -20,6 +20,7 @@ import click
from platformio import fs from platformio import fs
from platformio.package.exception import UnknownPackageError from platformio.package.exception import UnknownPackageError
from platformio.package.manager.core import get_core_package_dir
from platformio.package.manager.library import LibraryPackageManager from platformio.package.manager.library import LibraryPackageManager
from platformio.package.manager.platform import PlatformPackageManager from platformio.package.manager.platform import PlatformPackageManager
from platformio.package.manager.tool import ToolPackageManager from platformio.package.manager.tool import ToolPackageManager
@ -120,7 +121,7 @@ def install_project_env_dependencies(project_env, options=None):
# custom tools # custom tools
if options.get("tools"): if options.get("tools"):
installed_conds.append(_install_project_env_custom_tools(project_env, options)) installed_conds.append(_install_project_env_custom_tools(project_env, options))
# custom ibraries # custom libraries
if options.get("libraries"): if options.get("libraries"):
installed_conds.append( installed_conds.append(
_install_project_env_custom_libraries(project_env, options) _install_project_env_custom_libraries(project_env, options)
@ -152,6 +153,8 @@ def _install_project_env_platform(project_env, options):
skip_dependencies=options.get("skip_dependencies"), skip_dependencies=options.get("skip_dependencies"),
force=options.get("force"), force=options.get("force"),
) )
# ensure SCons is installed
get_core_package_dir("tool-scons")
return not already_up_to_date return not already_up_to_date
@ -219,9 +222,11 @@ def _install_project_env_libraries(project_env, options):
env_lm = LibraryPackageManager( env_lm = LibraryPackageManager(
os.path.join(config.get("platformio", "libdeps_dir"), project_env), os.path.join(config.get("platformio", "libdeps_dir"), project_env),
compatibility=PackageCompatibility(**compatibility_qualifiers) compatibility=(
if compatibility_qualifiers PackageCompatibility(**compatibility_qualifiers)
else None, if compatibility_qualifiers
else None
),
) )
private_lm = LibraryPackageManager( private_lm = LibraryPackageManager(
os.path.join(config.get("platformio", "lib_dir")) os.path.join(config.get("platformio", "lib_dir"))

View File

@ -86,6 +86,7 @@ def package_publish_cmd( # pylint: disable=too-many-arguments, too-many-locals
package, owner, typex, released_at, private, notify, no_interactive, non_interactive package, owner, typex, released_at, private, notify, no_interactive, non_interactive
): ):
click.secho("Preparing a package...", fg="cyan") click.secho("Preparing a package...", fg="cyan")
package = os.path.abspath(package)
no_interactive = no_interactive or non_interactive no_interactive = no_interactive or non_interactive
with AccountClient() as client: with AccountClient() as client:
owner = owner or client.get_logged_username() owner = owner or client.get_logged_username()

View File

@ -65,10 +65,12 @@ def print_search_item(item):
click.echo( click.echo(
"%s%s • Published on %s" "%s%s • Published on %s"
% ( % (
item["type"].capitalize() (
if item["tier"] == "community" item["type"].capitalize()
else click.style( if item["tier"] == "community"
("%s %s" % (item["tier"], item["type"])).title(), bold=True else click.style(
("%s %s" % (item["tier"], item["type"])).title(), bold=True
)
), ),
item["version"]["name"], item["version"]["name"],
util.parse_datetime(item["version"]["released_at"]).strftime("%c"), util.parse_datetime(item["version"]["released_at"]).strftime("%c"),

View File

@ -98,9 +98,9 @@ class PackageManagerInstallMixin:
else: else:
pkg = self.install_from_registry( pkg = self.install_from_registry(
spec, spec,
search_qualifiers=compatibility.to_search_qualifiers() search_qualifiers=(
if compatibility compatibility.to_search_qualifiers() if compatibility else None
else None, ),
) )
if not pkg or not pkg.metadata: if not pkg or not pkg.metadata:

View File

@ -294,9 +294,11 @@ class BaseManifestParser:
if not matched_files: if not matched_files:
continue continue
result[root] = dict( result[root] = dict(
name="Examples" name=(
if root == examples_dir "Examples"
else os.path.relpath(root, examples_dir), if root == examples_dir
else os.path.relpath(root, examples_dir)
),
base=os.path.relpath(root, package_dir), base=os.path.relpath(root, package_dir),
files=matched_files, files=matched_files,
) )
@ -540,6 +542,8 @@ class LibraryPropertiesManifestParser(BaseManifestParser):
"esp32": "espressif32", "esp32": "espressif32",
"arc32": "intel_arc32", "arc32": "intel_arc32",
"stm32": "ststm32", "stm32": "ststm32",
"nrf52": "nordicnrf52",
"rp2040": "raspberrypi",
} }
for arch in properties.get("architectures", "").split(","): for arch in properties.get("architectures", "").split(","):
if "particle-" in arch: if "particle-" in arch:

View File

@ -276,7 +276,7 @@ class ManifestSchema(BaseSchema):
@staticmethod @staticmethod
@memoized(expire="1h") @memoized(expire="1h")
def load_spdx_licenses(): def load_spdx_licenses():
version = "3.21" version = "3.22"
spdx_data_url = ( spdx_data_url = (
"https://raw.githubusercontent.com/spdx/license-list-data/" "https://raw.githubusercontent.com/spdx/license-list-data/"
f"v{version}/json/licenses.json" f"v{version}/json/licenses.json"

View File

@ -485,9 +485,11 @@ class PackageItem:
def __eq__(self, other): def __eq__(self, other):
conds = [ conds = [
os.path.realpath(self.path) == os.path.realpath(other.path) (
if self.path and other.path os.path.realpath(self.path) == os.path.realpath(other.path)
else self.path == other.path, if self.path and other.path
else self.path == other.path
),
self.metadata == other.metadata, self.metadata == other.metadata,
] ]
return all(conds) return all(conds)

71
platformio/pipdeps.py Normal file
View File

@ -0,0 +1,71 @@
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
import platform
import sys
PY36 = sys.version_info[0:2] == (3, 6)
def get_pip_dependencies():
core = [
"bottle == 0.12.*",
"click >=8.0.4, <9",
"colorama",
"httpx%s >=0.22.0, <0.27" % ("[socks]" if is_proxy_set(socks=True) else ""),
"marshmallow == 3.*",
"pyelftools >=0.27, <1",
"pyserial == 3.5.*", # keep in sync "device/monitor/terminal.py"
"semantic_version == 2.10.*",
"tabulate == 0.*",
]
home = [
# PIO Home requirements
"ajsonrpc == 1.2.*",
"starlette >=0.19, <0.36",
"uvicorn %s" % ("== 0.16.0" if PY36 else ">=0.16, <0.28"),
"wsproto == 1.*",
]
extra = []
# issue #4702; Broken "requests/charset_normalizer" on macOS ARM
if platform.system() == "Darwin" and "arm" in platform.machine().lower():
extra.append("chardet>=3.0.2,<6")
# issue 4614: urllib3 v2.0 only supports OpenSSL 1.1.1+
try:
import ssl # pylint: disable=import-outside-toplevel
if ssl.OPENSSL_VERSION.startswith("OpenSSL ") and ssl.OPENSSL_VERSION_INFO < (
1,
1,
1,
):
extra.append("urllib3<2")
except ImportError:
pass
return core + home + extra
def is_proxy_set(socks=False):
for var in ("HTTP_PROXY", "HTTPS_PROXY", "ALL_PROXY"):
value = os.getenv(var, os.getenv(var.lower()))
if not value or (socks and not value.startswith("socks5://")):
continue
return True
return False

View File

@ -116,9 +116,9 @@ class PlatformRunMixin:
args, args,
stdout=proc.BuildAsyncPipe( stdout=proc.BuildAsyncPipe(
line_callback=self._on_stdout_line, line_callback=self._on_stdout_line,
data_callback=lambda data: None data_callback=lambda data: (
if self.silent None if self.silent else _write_and_flush(sys.stdout, data)
else _write_and_flush(sys.stdout, data), ),
), ),
stderr=proc.BuildAsyncPipe( stderr=proc.BuildAsyncPipe(
line_callback=self._on_stderr_line, line_callback=self._on_stderr_line,

View File

@ -169,6 +169,7 @@ class PlatformBase( # pylint: disable=too-many-instance-attributes,too-many-pub
return self._BOARDS_CACHE[id_] if id_ else self._BOARDS_CACHE return self._BOARDS_CACHE[id_] if id_ else self._BOARDS_CACHE
def board_config(self, id_): def board_config(self, id_):
assert id_
return self.get_boards(id_) return self.get_boards(id_)
def get_package_type(self, name): def get_package_type(self, name):

View File

@ -69,7 +69,7 @@ class BuildAsyncPipe(AsyncPipeBase):
print_immediately = False print_immediately = False
for char in iter(lambda: self._pipe_reader.read(1), ""): for char in iter(lambda: self._pipe_reader.read(1), ""):
self._buffer += char # self._buffer += char
if line and char.strip() and line[-3:] == (char * 3): if line and char.strip() and line[-3:] == (char * 3):
print_immediately = True print_immediately = True

View File

@ -82,9 +82,11 @@ def lint_configuration(json_output=False):
( (
click.style(error["type"], fg="red"), click.style(error["type"], fg="red"),
error["message"], error["message"],
error.get("source", "") + (f":{error.get('lineno')}") (
if "lineno" in error error.get("source", "") + (f":{error.get('lineno')}")
else "", if "lineno" in error
else ""
),
) )
for error in errors for error in errors
], ],

View File

@ -14,14 +14,16 @@
import configparser import configparser
import glob import glob
import hashlib
import json import json
import os import os
import re import re
import time
import click import click
from platformio import fs from platformio import fs
from platformio.compat import MISSING, string_types from platformio.compat import MISSING, hashlib_encode_data, string_types
from platformio.project import exception from platformio.project import exception
from platformio.project.options import ProjectOptions from platformio.project.options import ProjectOptions
@ -41,7 +43,17 @@ CONFIG_HEADER = """
class ProjectConfigBase: class ProjectConfigBase:
ENVNAME_RE = re.compile(r"^[a-z\d\_\-]+$", flags=re.I) ENVNAME_RE = re.compile(r"^[a-z\d\_\-]+$", flags=re.I)
INLINE_COMMENT_RE = re.compile(r"\s+;.*$") INLINE_COMMENT_RE = re.compile(r"\s+;.*$")
VARTPL_RE = re.compile(r"\$\{([^\.\}\()]+)\.([^\}]+)\}") VARTPL_RE = re.compile(r"\$\{(?:([^\.\}\()]+)\.)?([^\}]+)\}")
BUILTIN_VARS = {
"PROJECT_DIR": lambda: os.getcwd(), # pylint: disable=unnecessary-lambda
"PROJECT_HASH": lambda: "%s-%s"
% (
os.path.basename(os.getcwd()),
hashlib.sha1(hashlib_encode_data(os.getcwd())).hexdigest()[:10],
),
"UNIX_TIME": lambda: str(int(time.time())),
}
CUSTOM_OPTION_PREFIXES = ("custom_", "board_") CUSTOM_OPTION_PREFIXES = ("custom_", "board_")
@ -152,6 +164,7 @@ class ProjectConfigBase:
@staticmethod @staticmethod
def get_section_scope(section): def get_section_scope(section):
assert section
return section.split(":", 1)[0] if ":" in section else section return section.split(":", 1)[0] if ":" in section else section
def walk_options(self, root_section): def walk_options(self, root_section):
@ -274,7 +287,7 @@ class ProjectConfigBase:
value = ( value = (
default if default != MISSING else self._parser.get(section, option) default if default != MISSING else self._parser.get(section, option)
) )
return self._expand_interpolations(section, value) return self._expand_interpolations(section, option, value)
if option_meta.sysenvvar: if option_meta.sysenvvar:
envvar_value = os.getenv(option_meta.sysenvvar) envvar_value = os.getenv(option_meta.sysenvvar)
@ -297,24 +310,50 @@ class ProjectConfigBase:
if value == MISSING: if value == MISSING:
return None return None
return self._expand_interpolations(section, value) return self._expand_interpolations(section, option, value)
def _expand_interpolations(self, parent_section, value): def _expand_interpolations(self, section, option, value):
if ( if not value or not isinstance(value, string_types) or not "$" in value:
not value return value
or not isinstance(value, string_types)
or not all(["${" in value, "}" in value]) # legacy support for variables delclared without "${}"
): legacy_vars = ["PROJECT_HASH"]
stop = False
while not stop:
stop = True
for name in legacy_vars:
x = value.find(f"${name}")
if x < 0 or value[x - 1] == "$":
continue
value = "%s${%s}%s" % (value[:x], name, value[x + len(name) + 1 :])
stop = False
warn_msg = (
"Invalid variable declaration. Please use "
f"`${{{name}}}` instead of `${name}`"
)
if warn_msg not in self.warnings:
self.warnings.append(warn_msg)
if not all(["${" in value, "}" in value]):
return value return value
return self.VARTPL_RE.sub( return self.VARTPL_RE.sub(
lambda match: self._re_interpolation_handler(parent_section, match), value lambda match: self._re_interpolation_handler(section, option, match), value
) )
def _re_interpolation_handler(self, parent_section, match): def _re_interpolation_handler(self, parent_section, parent_option, match):
section, option = match.group(1), match.group(2) section, option = match.group(1), match.group(2)
# handle built-in variables
if section is None:
if option in self.BUILTIN_VARS:
return self.BUILTIN_VARS[option]()
# SCons varaibles
return f"${{{option}}}"
# handle system environment variables # handle system environment variables
if section == "sysenv": if section == "sysenv":
return os.getenv(option) return os.getenv(option)
# handle ${this.*} # handle ${this.*}
if section == "this": if section == "this":
section = parent_section section = parent_section
@ -322,21 +361,18 @@ class ProjectConfigBase:
if not parent_section.startswith("env:"): if not parent_section.startswith("env:"):
raise exception.ProjectOptionValueError( raise exception.ProjectOptionValueError(
f"`${{this.__env__}}` is called from the `{parent_section}` " f"`${{this.__env__}}` is called from the `{parent_section}` "
"section that is not valid PlatformIO environment, see", "section that is not valid PlatformIO environment. Please "
option, f"check `{parent_option}` option in the `{section}` section"
" ",
section,
) )
return parent_section[4:] return parent_section[4:]
# handle nested calls # handle nested calls
try: try:
value = self.get(section, option) value = self.get(section, option)
except RecursionError as exc: except RecursionError as exc:
raise exception.ProjectOptionValueError( raise exception.ProjectOptionValueError(
"Infinite recursion has been detected", f"Infinite recursion has been detected for `{option}` "
option, f"option in the `{section}` section"
" ",
section,
) from exc ) from exc
if isinstance(value, list): if isinstance(value, list):
return "\n".join(value) return "\n".join(value)
@ -363,10 +399,8 @@ class ProjectConfigBase:
if not self.expand_interpolations: if not self.expand_interpolations:
return value return value
raise exception.ProjectOptionValueError( raise exception.ProjectOptionValueError(
exc.format_message(), "%s for `%s` option in the `%s` section (%s)"
option, % (exc.format_message(), option, section, option_meta.description)
" (%s) " % option_meta.description,
section,
) )
@staticmethod @staticmethod
@ -439,8 +473,9 @@ class ProjectConfigLintMixin:
try: try:
config = cls.get_instance(path) config = cls.get_instance(path)
config.validate(silent=True) config.validate(silent=True)
warnings = config.warnings warnings = config.warnings # in case "as_tuple" fails
config.as_tuple() config.as_tuple()
warnings = config.warnings
except Exception as exc: # pylint: disable=broad-exception-caught except Exception as exc: # pylint: disable=broad-exception-caught
if exc.__cause__ is not None: if exc.__cause__ is not None:
exc = exc.__cause__ exc = exc.__cause__

View File

@ -51,4 +51,4 @@ class InvalidEnvNameError(ProjectError, UserSideException):
class ProjectOptionValueError(ProjectError, UserSideException): class ProjectOptionValueError(ProjectError, UserSideException):
MESSAGE = "{0} for option `{1}`{2}in section [{3}]" pass

View File

@ -91,9 +91,11 @@ class ProjectGenerator:
"default_debug_env_name": get_default_debug_env(self.config), "default_debug_env_name": get_default_debug_env(self.config),
"env_name": self.env_name, "env_name": self.env_name,
"user_home_dir": os.path.abspath(fs.expanduser("~")), "user_home_dir": os.path.abspath(fs.expanduser("~")),
"platformio_path": sys.argv[0] "platformio_path": (
if os.path.isfile(sys.argv[0]) sys.argv[0]
else where_is_program("platformio"), if os.path.isfile(sys.argv[0])
else where_is_program("platformio")
),
"env_path": os.getenv("PATH"), "env_path": os.getenv("PATH"),
"env_pathsep": os.pathsep, "env_pathsep": os.pathsep,
} }

View File

@ -1,4 +1,3 @@
% import codecs
% import json % import json
% import os % import os
% %
@ -47,9 +46,14 @@
% return data % return data
% end % end
% %
% def _contains_external_configurations(launch_config): % def _contains_custom_configurations(launch_config):
% pio_config_names = [
% c["name"]
% for c in get_pio_configurations()
% ]
% return any( % return any(
% c.get("type", "") != "platformio-debug" % c.get("type", "") != "platformio-debug"
% or c.get("name", "") in pio_config_names
% for c in launch_config.get("configurations", []) % for c in launch_config.get("configurations", [])
% ) % )
% end % end
@ -59,10 +63,14 @@
% return launch_config % return launch_config
% end % end
% %
% pio_config_names = [
% c["name"]
% for c in get_pio_configurations()
% ]
% external_configurations = [ % external_configurations = [
% config % c
% for config in launch_config["configurations"] % for c in launch_config["configurations"]
% if config.get("type", "") != "platformio-debug" % if c.get("type", "") != "platformio-debug" or c.get("name", "") not in pio_config_names
% ] % ]
% %
% launch_config["configurations"] = external_configurations % launch_config["configurations"] = external_configurations
@ -73,11 +81,11 @@
% launch_config = {"version": "0.2.0", "configurations": []} % launch_config = {"version": "0.2.0", "configurations": []}
% launch_file = os.path.join(project_dir, ".vscode", "launch.json") % launch_file = os.path.join(project_dir, ".vscode", "launch.json")
% if os.path.isfile(launch_file): % if os.path.isfile(launch_file):
% with codecs.open(launch_file, "r", encoding="utf8") as fp: % with open(launch_file, "r", encoding="utf8") as fp:
% launch_data = _remove_comments(fp.readlines()) % launch_data = _remove_comments(fp.readlines())
% try: % try:
% prev_config = json.loads(launch_data) % prev_config = json.loads(launch_data)
% if _contains_external_configurations(prev_config): % if _contains_custom_configurations(prev_config):
% launch_config = _remove_pio_configurations(prev_config) % launch_config = _remove_pio_configurations(prev_config)
% end % end
% except: % except:
@ -91,9 +99,9 @@
% %
// AUTOMATICALLY GENERATED FILE. PLEASE DO NOT MODIFY IT MANUALLY // AUTOMATICALLY GENERATED FILE. PLEASE DO NOT MODIFY IT MANUALLY
// //
// PIO Unified Debugger // PlatformIO Debugging Solution
// //
// Documentation: https://docs.platformio.org/page/plus/debugging.html // Documentation: https://docs.platformio.org/en/latest/plus/debugging.html
// Configuration: https://docs.platformio.org/page/projectconf/section_env_debug.html // Configuration: https://docs.platformio.org/en/latest/projectconf/sections/env/options/debug/index.html
{{ json.dumps(get_launch_configuration(), indent=4, ensure_ascii=False) }} {{ json.dumps(get_launch_configuration(), indent=4, ensure_ascii=False) }}

View File

@ -14,14 +14,13 @@
# pylint: disable=redefined-builtin, too-many-arguments # pylint: disable=redefined-builtin, too-many-arguments
import hashlib
import os import os
from collections import OrderedDict from collections import OrderedDict
import click import click
from platformio import fs from platformio import fs
from platformio.compat import IS_WINDOWS, hashlib_encode_data from platformio.compat import IS_WINDOWS
class ConfigOption: # pylint: disable=too-many-instance-attributes class ConfigOption: # pylint: disable=too-many-instance-attributes
@ -80,30 +79,6 @@ def ConfigEnvOption(*args, **kwargs):
return ConfigOption("env", *args, **kwargs) return ConfigOption("env", *args, **kwargs)
def calculate_path_hash(path):
return "%s-%s" % (
os.path.basename(path),
hashlib.sha1(hashlib_encode_data(path.lower())).hexdigest()[:10],
)
def expand_dir_templates(path):
project_dir = os.getcwd()
tpls = {
"$PROJECT_DIR": lambda: project_dir,
"$PROJECT_HASH": lambda: calculate_path_hash(project_dir),
}
done = False
while not done:
done = True
for tpl, cb in tpls.items():
if tpl not in path:
continue
path = path.replace(tpl, cb())
done = False
return path
def validate_dir(path): def validate_dir(path):
if not path: if not path:
return path return path
@ -112,8 +87,6 @@ def validate_dir(path):
return path return path
if path.startswith("~"): if path.startswith("~"):
path = fs.expanduser(path) path = fs.expanduser(path)
if "$" in path:
path = expand_dir_templates(path)
return os.path.abspath(path) return os.path.abspath(path)
@ -137,6 +110,7 @@ ProjectOptions = OrderedDict(
group="generic", group="generic",
name="name", name="name",
description="A project name", description="A project name",
default=lambda: os.path.basename(os.getcwd()),
), ),
ConfigPlatformioOption( ConfigPlatformioOption(
group="generic", group="generic",
@ -240,7 +214,7 @@ ProjectOptions = OrderedDict(
"external library dependencies" "external library dependencies"
), ),
sysenvvar="PLATFORMIO_WORKSPACE_DIR", sysenvvar="PLATFORMIO_WORKSPACE_DIR",
default=os.path.join("$PROJECT_DIR", ".pio"), default=os.path.join("${PROJECT_DIR}", ".pio"),
validate=validate_dir, validate=validate_dir,
), ),
ConfigPlatformioOption( ConfigPlatformioOption(
@ -266,17 +240,6 @@ ProjectOptions = OrderedDict(
default=os.path.join("${platformio.workspace_dir}", "libdeps"), default=os.path.join("${platformio.workspace_dir}", "libdeps"),
validate=validate_dir, validate=validate_dir,
), ),
ConfigPlatformioOption(
group="directory",
name="memusage_dir",
description=(
"A location where PlatformIO Core will store "
"project memory usage reports"
),
sysenvvar="PLATFORMIO_MEMUSAGE_DIR",
default=os.path.join("${platformio.workspace_dir}", "memusage"),
validate=validate_dir,
),
ConfigPlatformioOption( ConfigPlatformioOption(
group="directory", group="directory",
name="include_dir", name="include_dir",
@ -285,7 +248,7 @@ ProjectOptions = OrderedDict(
"System automatically adds this path to CPPPATH scope" "System automatically adds this path to CPPPATH scope"
), ),
sysenvvar="PLATFORMIO_INCLUDE_DIR", sysenvvar="PLATFORMIO_INCLUDE_DIR",
default=os.path.join("$PROJECT_DIR", "include"), default=os.path.join("${PROJECT_DIR}", "include"),
validate=validate_dir, validate=validate_dir,
), ),
ConfigPlatformioOption( ConfigPlatformioOption(
@ -296,7 +259,7 @@ ProjectOptions = OrderedDict(
"project C/C++ source files" "project C/C++ source files"
), ),
sysenvvar="PLATFORMIO_SRC_DIR", sysenvvar="PLATFORMIO_SRC_DIR",
default=os.path.join("$PROJECT_DIR", "src"), default=os.path.join("${PROJECT_DIR}", "src"),
validate=validate_dir, validate=validate_dir,
), ),
ConfigPlatformioOption( ConfigPlatformioOption(
@ -304,7 +267,7 @@ ProjectOptions = OrderedDict(
name="lib_dir", name="lib_dir",
description="A storage for the custom/private project libraries", description="A storage for the custom/private project libraries",
sysenvvar="PLATFORMIO_LIB_DIR", sysenvvar="PLATFORMIO_LIB_DIR",
default=os.path.join("$PROJECT_DIR", "lib"), default=os.path.join("${PROJECT_DIR}", "lib"),
validate=validate_dir, validate=validate_dir,
), ),
ConfigPlatformioOption( ConfigPlatformioOption(
@ -315,7 +278,7 @@ ProjectOptions = OrderedDict(
"file system (SPIFFS, etc.)" "file system (SPIFFS, etc.)"
), ),
sysenvvar="PLATFORMIO_DATA_DIR", sysenvvar="PLATFORMIO_DATA_DIR",
default=os.path.join("$PROJECT_DIR", "data"), default=os.path.join("${PROJECT_DIR}", "data"),
validate=validate_dir, validate=validate_dir,
), ),
ConfigPlatformioOption( ConfigPlatformioOption(
@ -326,7 +289,7 @@ ProjectOptions = OrderedDict(
"test source files" "test source files"
), ),
sysenvvar="PLATFORMIO_TEST_DIR", sysenvvar="PLATFORMIO_TEST_DIR",
default=os.path.join("$PROJECT_DIR", "test"), default=os.path.join("${PROJECT_DIR}", "test"),
validate=validate_dir, validate=validate_dir,
), ),
ConfigPlatformioOption( ConfigPlatformioOption(
@ -334,7 +297,7 @@ ProjectOptions = OrderedDict(
name="boards_dir", name="boards_dir",
description="A storage for custom board manifests", description="A storage for custom board manifests",
sysenvvar="PLATFORMIO_BOARDS_DIR", sysenvvar="PLATFORMIO_BOARDS_DIR",
default=os.path.join("$PROJECT_DIR", "boards"), default=os.path.join("${PROJECT_DIR}", "boards"),
validate=validate_dir, validate=validate_dir,
), ),
ConfigPlatformioOption( ConfigPlatformioOption(
@ -342,7 +305,7 @@ ProjectOptions = OrderedDict(
name="monitor_dir", name="monitor_dir",
description="A storage for custom monitor filters", description="A storage for custom monitor filters",
sysenvvar="PLATFORMIO_MONITOR_DIR", sysenvvar="PLATFORMIO_MONITOR_DIR",
default=os.path.join("$PROJECT_DIR", "monitor"), default=os.path.join("${PROJECT_DIR}", "monitor"),
validate=validate_dir, validate=validate_dir,
), ),
ConfigPlatformioOption( ConfigPlatformioOption(
@ -353,7 +316,7 @@ ProjectOptions = OrderedDict(
"synchronize extra files between remote machines" "synchronize extra files between remote machines"
), ),
sysenvvar="PLATFORMIO_SHARED_DIR", sysenvvar="PLATFORMIO_SHARED_DIR",
default=os.path.join("$PROJECT_DIR", "shared"), default=os.path.join("${PROJECT_DIR}", "shared"),
validate=validate_dir, validate=validate_dir,
), ),
# #

View File

@ -41,9 +41,11 @@ def access_list_cmd(owner, urn_type, json_output): # pylint: disable=unused-arg
table_data.append( table_data.append(
( (
"Access:", "Access:",
click.style("Private", fg="red") (
if resource.get("private", False) click.style("Private", fg="red")
else "Public", if resource.get("private", False)
else "Public"
),
) )
) )
table_data.append( table_data.append(

View File

@ -53,9 +53,11 @@ class RegistryFileMirrorIterator:
"head", "head",
self._url_parts.path, self._url_parts.path,
follow_redirects=False, follow_redirects=False,
params=dict(bypass=",".join(self._visited_mirrors)) params=(
if self._visited_mirrors dict(bypass=",".join(self._visited_mirrors))
else None, if self._visited_mirrors
else None
),
x_with_authorization=registry.allowed_private_packages(), x_with_authorization=registry.allowed_private_packages(),
) )
stop_conditions = [ stop_conditions = [
@ -91,7 +93,7 @@ class RegistryFileMirrorIterator:
endpoint = f"https://dl.{host}" endpoint = f"https://dl.{host}"
if endpoint not in endpoints: if endpoint not in endpoints:
endpoints.append(endpoint) endpoints.append(endpoint)
RegistryFileMirrorIterator.HTTP_CLIENT_INSTANCES[ RegistryFileMirrorIterator.HTTP_CLIENT_INSTANCES[self._mirror] = (
self._mirror RegistryClient(endpoints)
] = RegistryClient(endpoints) )
return RegistryFileMirrorIterator.HTTP_CLIENT_INSTANCES[self._mirror] return RegistryFileMirrorIterator.HTTP_CLIENT_INSTANCES[self._mirror]

View File

@ -123,9 +123,11 @@ class DeviceMonitorClient( # pylint: disable=too-many-instance-attributes
index=i + 1, index=i + 1,
host=device[0] + ":" if len(result) > 1 else "", host=device[0] + ":" if len(result) > 1 else "",
port=device[1]["port"], port=device[1]["port"],
description=device[1]["description"] description=(
if device[1]["description"] != "n/a" device[1]["description"]
else "", if device[1]["description"] != "n/a"
else ""
),
) )
) )
device_index = click.prompt( device_index = click.prompt(

View File

@ -62,11 +62,13 @@ class JsonTestReport(TestReportBase):
test_dir=test_suite.test_dir, test_dir=test_suite.test_dir,
status=test_suite.status.name, status=test_suite.status.name,
duration=test_suite.duration, duration=test_suite.duration,
timestamp=datetime.datetime.fromtimestamp(test_suite.timestamp).strftime( timestamp=(
"%Y-%m-%dT%H:%M:%S" datetime.datetime.fromtimestamp(test_suite.timestamp).strftime(
) "%Y-%m-%dT%H:%M:%S"
if test_suite.timestamp )
else None, if test_suite.timestamp
else None
),
testcase_nums=len(test_suite.cases), testcase_nums=len(test_suite.cases),
error_nums=test_suite.get_status_nums(TestStatus.ERRORED), error_nums=test_suite.get_status_nums(TestStatus.ERRORED),
failure_nums=test_suite.get_status_nums(TestStatus.FAILED), failure_nums=test_suite.get_status_nums(TestStatus.FAILED),

View File

@ -143,6 +143,8 @@ def get_systype():
arch = "x86_" + platform.architecture()[0] arch = "x86_" + platform.architecture()[0]
if "x86" in arch: if "x86" in arch:
arch = "amd64" if "64" in arch else "x86" arch = "amd64" if "64" in arch else "x86"
if arch == "aarch64" and platform.architecture()[0] == "32bit":
arch = "armv7l"
return "%s_%s" % (system, arch) if arch else system return "%s_%s" % (system, arch) if arch else system
@ -168,9 +170,8 @@ def items_in_list(needle, haystack):
def parse_datetime(datestr): def parse_datetime(datestr):
if "T" in datestr and "Z" in datestr: assert "T" in datestr and "Z" in datestr
return datetime.datetime.strptime(datestr, "%Y-%m-%dT%H:%M:%SZ") return datetime.datetime.strptime(datestr, "%Y-%m-%dT%H:%M:%SZ")
return datetime.datetime.strptime(datestr)
def merge_dicts(d1, d2, path=None): def merge_dicts(d1, d2, path=None):

View File

@ -39,7 +39,7 @@ RST_COPYRIGHT = """.. Copyright (c) 2014-present PlatformIO <contact@platformio
limitations under the License. limitations under the License.
""" """
SKIP_DEBUG_TOOLS = ["esp-bridge", "esp-builtin"] SKIP_DEBUG_TOOLS = ["esp-bridge", "esp-builtin", "dfu"]
STATIC_FRAMEWORK_DATA = { STATIC_FRAMEWORK_DATA = {
"arduino": { "arduino": {

View File

@ -12,7 +12,6 @@
# See the License for the specific language governing permissions and # See the License for the specific language governing permissions and
# limitations under the License. # limitations under the License.
import os
from setuptools import find_packages, setup from setuptools import find_packages, setup
from platformio import ( from platformio import (
@ -23,15 +22,8 @@ from platformio import (
__title__, __title__,
__url__, __url__,
__version__, __version__,
__install_requires__,
) )
from platformio.pipdeps import get_pip_dependencies
# handle extra dependency for SOCKS proxy
if any(
os.getenv(key, "").startswith("socks5://")
for key in ("HTTP_PROXY", "HTTPS_PROXY", "ALL_PROXY")
):
__install_requires__.append("socksio")
setup( setup(
name=__title__, name=__title__,
@ -42,7 +34,7 @@ setup(
author_email=__email__, author_email=__email__,
url=__url__, url=__url__,
license=__license__, license=__license__,
install_requires=__install_requires__, install_requires=get_pip_dependencies(),
python_requires=">=3.6", python_requires=">=3.6",
packages=find_packages(include=["platformio", "platformio.*"]), packages=find_packages(include=["platformio", "platformio.*"]),
package_data={ package_data={

View File

@ -18,7 +18,7 @@ import os
import pytest import pytest
from platformio import fs from platformio import __core_packages__, fs
from platformio.package.commands.install import package_install_cmd from platformio.package.commands.install import package_install_cmd
from platformio.package.manager.library import LibraryPackageManager from platformio.package.manager.library import LibraryPackageManager
from platformio.package.manager.platform import PlatformPackageManager from platformio.package.manager.platform import PlatformPackageManager
@ -148,7 +148,7 @@ def test_skip_dependencies(
), ),
PackageSpec("ESPAsyncWebServer-esphome@2.1.0"), PackageSpec("ESPAsyncWebServer-esphome@2.1.0"),
] ]
assert len(ToolPackageManager().get_installed()) == 0 assert len(ToolPackageManager().get_installed()) == 1 # SCons
def test_baremetal_project( def test_baremetal_project(
@ -177,6 +177,7 @@ def test_baremetal_project(
), ),
] ]
assert pkgs_to_specs(ToolPackageManager().get_installed()) == [ assert pkgs_to_specs(ToolPackageManager().get_installed()) == [
PackageSpec("tool-scons@%s" % __core_packages__["tool-scons"][1:]),
PackageSpec("toolchain-atmelavr@1.70300.191015"), PackageSpec("toolchain-atmelavr@1.70300.191015"),
] ]
@ -209,6 +210,7 @@ def test_project(
] ]
assert pkgs_to_specs(ToolPackageManager().get_installed()) == [ assert pkgs_to_specs(ToolPackageManager().get_installed()) == [
PackageSpec("framework-arduino-avr-attiny@1.5.2"), PackageSpec("framework-arduino-avr-attiny@1.5.2"),
PackageSpec("tool-scons@%s" % __core_packages__["tool-scons"][1:]),
PackageSpec("toolchain-atmelavr@1.70300.191015"), PackageSpec("toolchain-atmelavr@1.70300.191015"),
] ]
assert config.get("env:devkit", "lib_deps") == [ assert config.get("env:devkit", "lib_deps") == [
@ -443,7 +445,7 @@ def test_custom_project_libraries(
) )
assert pkgs_to_specs(lm.get_installed()) == [ assert pkgs_to_specs(lm.get_installed()) == [
PackageSpec("ArduinoJson@5.13.4"), PackageSpec("ArduinoJson@5.13.4"),
PackageSpec("Nanopb@0.4.7"), PackageSpec("Nanopb@0.4.8"),
] ]
assert config.get("env:devkit", "lib_deps") == [ assert config.get("env:devkit", "lib_deps") == [
"bblanchon/ArduinoJson@^5", "bblanchon/ArduinoJson@^5",

View File

@ -198,6 +198,7 @@ def test_project(clirunner, validate_cliresult, isolated_pio_core, tmp_path):
assert pkgs_to_names(lm.get_installed()) == ["DallasTemperature", "OneWire"] assert pkgs_to_names(lm.get_installed()) == ["DallasTemperature", "OneWire"]
assert pkgs_to_names(ToolPackageManager().get_installed()) == [ assert pkgs_to_names(ToolPackageManager().get_installed()) == [
"framework-arduino-avr-attiny", "framework-arduino-avr-attiny",
"tool-scons",
"toolchain-atmelavr", "toolchain-atmelavr",
] ]
assert config.get("env:devkit", "lib_deps") == [ assert config.get("env:devkit", "lib_deps") == [
@ -224,7 +225,7 @@ def test_project(clirunner, validate_cliresult, isolated_pio_core, tmp_path):
os.path.join(config.get("platformio", "libdeps_dir"), "devkit") os.path.join(config.get("platformio", "libdeps_dir"), "devkit")
) )
assert not pkgs_to_names(lm.get_installed()) assert not pkgs_to_names(lm.get_installed())
assert not pkgs_to_names(ToolPackageManager().get_installed()) assert pkgs_to_names(ToolPackageManager().get_installed()) == ["tool-scons"]
assert config.get("env:devkit", "lib_deps") == [ assert config.get("env:devkit", "lib_deps") == [
"milesburton/DallasTemperature@^3.9.1" "milesburton/DallasTemperature@^3.9.1"
] ]

View File

@ -16,7 +16,7 @@
import os import os
from platformio import fs from platformio import __core_packages__, fs
from platformio.package.commands.install import package_install_cmd from platformio.package.commands.install import package_install_cmd
from platformio.package.commands.update import package_update_cmd from platformio.package.commands.update import package_update_cmd
from platformio.package.exception import UnknownPackageError from platformio.package.exception import UnknownPackageError
@ -172,6 +172,7 @@ def test_project(
] ]
assert pkgs_to_specs(ToolPackageManager().get_installed()) == [ assert pkgs_to_specs(ToolPackageManager().get_installed()) == [
PackageSpec("framework-arduino-avr-attiny@1.3.2"), PackageSpec("framework-arduino-avr-attiny@1.3.2"),
PackageSpec("tool-scons@%s" % __core_packages__["tool-scons"][1:]),
PackageSpec("toolchain-atmelavr@1.50400.190710"), PackageSpec("toolchain-atmelavr@1.50400.190710"),
] ]
assert config.get("env:devkit", "lib_deps") == [ assert config.get("env:devkit", "lib_deps") == [
@ -201,6 +202,7 @@ def test_project(
] ]
assert pkgs_to_specs(ToolPackageManager().get_installed()) == [ assert pkgs_to_specs(ToolPackageManager().get_installed()) == [
PackageSpec("framework-arduino-avr-attiny@1.3.2"), PackageSpec("framework-arduino-avr-attiny@1.3.2"),
PackageSpec("tool-scons@%s" % __core_packages__["tool-scons"][1:]),
PackageSpec("toolchain-atmelavr@1.70300.191015"), PackageSpec("toolchain-atmelavr@1.70300.191015"),
PackageSpec("toolchain-atmelavr@1.50400.190710"), PackageSpec("toolchain-atmelavr@1.50400.190710"),
] ]

View File

@ -540,6 +540,16 @@ int main() {
""" """
) )
if framework == "zephyr":
zephyr_dir = tmpdir.mkdir("zephyr")
zephyr_dir.join("prj.conf").write("# nothing here")
zephyr_dir.join("CMakeLists.txt").write(
"""cmake_minimum_required(VERSION 3.16.0)
find_package(Zephyr REQUIRED HINTS $ENV{ZEPHYR_BASE})
project(hello_world)
target_sources(app PRIVATE ../src/main.c)"""
)
tmpdir.join("platformio.ini").write(config) tmpdir.join("platformio.ini").write(config)
result = clirunner.invoke(cmd_check, ["--project-dir", str(tmpdir)]) result = clirunner.invoke(cmd_check, ["--project-dir", str(tmpdir)])
validate_cliresult(result) validate_cliresult(result)
@ -757,3 +767,39 @@ check_patterns =
assert errors + warnings + style == EXPECTED_DEFECTS * 2 assert errors + warnings + style == EXPECTED_DEFECTS * 2
assert "main.cpp" not in result.output assert "main.cpp" not in result.output
def test_check_src_filter_multiple_envs(clirunner, validate_cliresult, tmpdir_factory):
tmpdir = tmpdir_factory.mktemp("project")
config = """
[env]
check_tool = cppcheck
check_src_filters =
+<src/*>
[env:check_sources]
platform = native
[env:check_tests]
platform = native
check_src_filters =
+<test/*>
"""
tmpdir.join("platformio.ini").write(config)
src_dir = tmpdir.mkdir("src")
src_dir.join("main.cpp").write(TEST_CODE)
src_dir.mkdir("spi").join("spi.cpp").write(TEST_CODE)
tmpdir.mkdir("test").join("test.cpp").write(TEST_CODE)
result = clirunner.invoke(
cmd_check, ["--project-dir", str(tmpdir), "-e", "check_tests"]
)
validate_cliresult(result)
errors, warnings, style = count_defects(result.output)
assert errors + warnings + style == EXPECTED_DEFECTS
assert "test.cpp" in result.output
assert "main.cpp" not in result.output

View File

@ -651,4 +651,4 @@ def test_update_without_metadata(isolated_pio_core, tmpdir_factory):
lm.set_log_level(logging.ERROR) lm.set_log_level(logging.ERROR)
new_pkg = lm.update(pkg) new_pkg = lm.update(pkg)
assert len(lm.get_installed()) == 4 assert len(lm.get_installed()) == 4
assert new_pkg.metadata.spec.owner == "ottowinter" assert new_pkg.metadata.spec.owner == "heman"

View File

@ -33,7 +33,6 @@ BASE_CONFIG = """
[platformio] [platformio]
env_default = base, extra_2 env_default = base, extra_2
src_dir = ${custom.src_dir} src_dir = ${custom.src_dir}
build_dir = ${custom.build_dir}
extra_configs = extra_configs =
extra_envs.ini extra_envs.ini
extra_debug.ini extra_debug.ini
@ -61,7 +60,6 @@ build_flags = -D RELEASE
[custom] [custom]
src_dir = source src_dir = source
build_dir = ~/tmp/pio-$PROJECT_HASH
debug_flags = -D RELEASE debug_flags = -D RELEASE
lib_flags = -lc -lm lib_flags = -lc -lm
extra_flags = ${sysenv.__PIO_TEST_CNF_EXTRA_FLAGS} extra_flags = ${sysenv.__PIO_TEST_CNF_EXTRA_FLAGS}
@ -319,7 +317,6 @@ def test_getraw_value(config):
config.getraw("custom", "debug_server") config.getraw("custom", "debug_server")
== f"\n{packages_dir}/tool-openocd/openocd\n--help" == f"\n{packages_dir}/tool-openocd/openocd\n--help"
) )
assert config.getraw("platformio", "build_dir") == "~/tmp/pio-$PROJECT_HASH"
# renamed option # renamed option
assert config.getraw("env:extra_1", "lib_install") == "574" assert config.getraw("env:extra_1", "lib_install") == "574"
@ -360,7 +357,6 @@ def test_get_value(config):
assert config.get("platformio", "src_dir") == os.path.abspath( assert config.get("platformio", "src_dir") == os.path.abspath(
os.path.join(os.getcwd(), "source") os.path.join(os.getcwd(), "source")
) )
assert "$PROJECT_HASH" not in config.get("platformio", "build_dir")
# renamed option # renamed option
assert config.get("env:extra_1", "lib_install") == ["574"] assert config.get("env:extra_1", "lib_install") == ["574"]
@ -371,7 +367,6 @@ def test_get_value(config):
def test_items(config): def test_items(config):
assert config.items("custom") == [ assert config.items("custom") == [
("src_dir", "source"), ("src_dir", "source"),
("build_dir", "~/tmp/pio-$PROJECT_HASH"),
("debug_flags", "-D DEBUG=1"), ("debug_flags", "-D DEBUG=1"),
("lib_flags", "-lc -lm"), ("lib_flags", "-lc -lm"),
("extra_flags", ""), ("extra_flags", ""),
@ -525,7 +520,6 @@ def test_dump(tmpdir_factory):
[ [
("env_default", ["base", "extra_2"]), ("env_default", ["base", "extra_2"]),
("src_dir", "${custom.src_dir}"), ("src_dir", "${custom.src_dir}"),
("build_dir", "${custom.build_dir}"),
("extra_configs", ["extra_envs.ini", "extra_debug.ini"]), ("extra_configs", ["extra_envs.ini", "extra_debug.ini"]),
], ],
), ),
@ -549,7 +543,6 @@ def test_dump(tmpdir_factory):
"custom", "custom",
[ [
("src_dir", "source"), ("src_dir", "source"),
("build_dir", "~/tmp/pio-$PROJECT_HASH"),
("debug_flags", "-D RELEASE"), ("debug_flags", "-D RELEASE"),
("lib_flags", "-lc -lm"), ("lib_flags", "-lc -lm"),
("extra_flags", "${sysenv.__PIO_TEST_CNF_EXTRA_FLAGS}"), ("extra_flags", "${sysenv.__PIO_TEST_CNF_EXTRA_FLAGS}"),
@ -631,26 +624,66 @@ custom_option = ${this.board}
assert config.get("env:myenv", "build_flags") == ["-Dmyenv"] assert config.get("env:myenv", "build_flags") == ["-Dmyenv"]
def test_project_name(tmp_path: Path):
project_dir = tmp_path / "my-project-name"
project_dir.mkdir()
project_conf = project_dir / "platformio.ini"
project_conf.write_text(
"""
[env:myenv]
"""
)
with fs.cd(str(project_dir)):
config = ProjectConfig(str(project_conf))
assert config.get("platformio", "name") == "my-project-name"
# custom name
project_conf.write_text(
"""
[platformio]
name = custom-project-name
"""
)
config = ProjectConfig(str(project_conf))
assert config.get("platformio", "name") == "custom-project-name"
def test_nested_interpolation(tmp_path: Path): def test_nested_interpolation(tmp_path: Path):
project_conf = tmp_path / "platformio.ini" project_conf = tmp_path / "platformio.ini"
project_conf.write_text( project_conf.write_text(
""" """
[platformio] [platformio]
build_dir = ~/tmp/pio-$PROJECT_HASH build_dir = /tmp/pio-$PROJECT_HASH
data_dir = $PROJECT_DIR/assets
[env:myenv] [env:myenv]
build_flags =
-D UTIME=${UNIX_TIME}
-I ${PROJECTSRC_DIR}/hal
-Wl,-Map,${BUILD_DIR}/${PROGNAME}.map
test_testing_command = test_testing_command =
${platformio.packages_dir}/tool-simavr/bin/simavr ${platformio.packages_dir}/tool-simavr/bin/simavr
-m -m
atmega328p atmega328p
-f -f
16000000L 16000000L
${UPLOAD_PORT and "-p "+UPLOAD_PORT}
${platformio.build_dir}/${this.__env__}/firmware.elf ${platformio.build_dir}/${this.__env__}/firmware.elf
""" """
) )
config = ProjectConfig(str(project_conf)) config = ProjectConfig(str(project_conf))
assert config.get("platformio", "data_dir").endswith(
os.path.join("$PROJECT_DIR", "assets")
)
assert config.get("env:myenv", "build_flags")[0][-10:].isdigit()
assert config.get("env:myenv", "build_flags")[1] == "-I ${PROJECTSRC_DIR}/hal"
assert (
config.get("env:myenv", "build_flags")[2]
== "-Wl,-Map,${BUILD_DIR}/${PROGNAME}.map"
)
testing_command = config.get("env:myenv", "test_testing_command") testing_command = config.get("env:myenv", "test_testing_command")
assert "$" not in " ".join(testing_command) assert "$" not in testing_command[0]
assert testing_command[5] == '${UPLOAD_PORT and "-p "+UPLOAD_PORT}'
def test_extends_order(tmp_path: Path): def test_extends_order(tmp_path: Path):
@ -707,11 +740,16 @@ def test_linting_warnings(tmp_path: Path):
project_conf = tmp_path / "platformio.ini" project_conf = tmp_path / "platformio.ini"
project_conf.write_text( project_conf.write_text(
""" """
[platformio]
build_dir = /tmp/pio-$PROJECT_HASH
[env:app1] [env:app1]
lib_use = 1 lib_use = 1
test_testing_command = /usr/bin/flash-tool -p $UPLOAD_PORT -b $UPLOAD_SPEED
""" """
) )
result = ProjectConfig.lint(str(project_conf)) result = ProjectConfig.lint(str(project_conf))
assert not result["errors"] assert not result["errors"]
assert result["warnings"] and len(result["warnings"]) == 1 assert result["warnings"] and len(result["warnings"]) == 2
assert "deprecated" in result["warnings"][0] assert "deprecated" in result["warnings"][0]
assert "Invalid variable declaration" in result["warnings"][1]

View File

@ -0,0 +1,82 @@
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import json
from platformio.project.commands.metadata import project_metadata_cmd
def test_metadata_dump(clirunner, validate_cliresult, tmpdir):
tmpdir.join("platformio.ini").write(
"""
[env:native]
platform = native
"""
)
component_dir = tmpdir.mkdir("lib").mkdir("component")
component_dir.join("library.json").write(
"""
{
"name": "component",
"version": "1.0.0"
}
"""
)
component_dir.mkdir("include").join("component.h").write(
"""
#define I_AM_COMPONENT
void dummy(void);
"""
)
component_dir.mkdir("src").join("component.cpp").write(
"""
#include <component.h>
void dummy(void ) {};
"""
)
tmpdir.mkdir("src").join("main.c").write(
"""
#include <component.h>
#ifndef I_AM_COMPONENT
#error "I_AM_COMPONENT"
#endif
int main() {
}
"""
)
metadata_path = tmpdir.join("metadata.json")
result = clirunner.invoke(
project_metadata_cmd,
[
"--project-dir",
str(tmpdir),
"-e",
"native",
"--json-output",
"--json-output-path",
str(metadata_path),
],
)
validate_cliresult(result)
with open(str(metadata_path), encoding="utf8") as fp:
metadata = json.load(fp)["native"]
assert len(metadata["includes"]["build"]) == 3
assert len(metadata["includes"]["compatlib"]) == 2

View File

@ -55,7 +55,7 @@ commands =
[testenv:docs] [testenv:docs]
deps = deps =
sphinx sphinx
sphinx-rtd-theme==1.2.2 sphinx-rtd-theme==2.0.0
sphinx-notfound-page sphinx-notfound-page
sphinx-copybutton sphinx-copybutton
restructuredtext-lint restructuredtext-lint