mirror of
https://github.com/platformio/platformio-core.git
synced 2025-06-25 17:41:54 +02:00
Compare commits
51 Commits
Author | SHA1 | Date | |
---|---|---|---|
444c57b4a6 | |||
d787648e71 | |||
846588deec | |||
79142965ce | |||
93bc4fae6c | |||
1aa256d63c | |||
3a133af1a6 | |||
f93d3d509b | |||
145142ea6c | |||
b4b02982d6 | |||
841489c154 | |||
23c142dffd | |||
fc946baa93 | |||
a447022e7f | |||
4c697d9032 | |||
a71443a2ee | |||
20e076191e | |||
d907ecb9e9 | |||
c950d6d366 | |||
29cd2d2bdb | |||
a584a6bce3 | |||
4dc7ea5bd0 | |||
1be6e10f99 | |||
c9016d6939 | |||
baab25a48c | |||
4d4f5a217b | |||
b6d1f4d769 | |||
90fc36cf2d | |||
9be0a8248d | |||
d15314689d | |||
1d4b5c8051 | |||
47a87c57f2 | |||
ec2d01f277 | |||
4e05309e02 | |||
1fd3a4061f | |||
014ac79c87 | |||
dd3fe909a1 | |||
c1afb364e9 | |||
f3c27eadf6 | |||
fe2fd5e880 | |||
07e7dc4717 | |||
a94e5bd5ab | |||
f5ab0e5ddd | |||
3e20abec90 | |||
a4276b4ea6 | |||
cade63fba5 | |||
3a57661230 | |||
33fadd028d | |||
647b131d9b | |||
b537004a75 | |||
fe2e8a0a40 |
8
.github/workflows/core.yml
vendored
8
.github/workflows/core.yml
vendored
@ -8,7 +8,7 @@ jobs:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
os: [ubuntu-latest, windows-latest, macos-latest]
|
||||
python-version: ["3.11", "3.12", "3.13.0-rc.2"]
|
||||
python-version: ["3.11", "3.12", "3.13"]
|
||||
|
||||
runs-on: ${{ matrix.os }}
|
||||
|
||||
@ -27,6 +27,12 @@ jobs:
|
||||
python -m pip install --upgrade pip
|
||||
pip install tox
|
||||
|
||||
- name: Run "codespell" on Linux
|
||||
if: startsWith(matrix.os, 'ubuntu')
|
||||
run: |
|
||||
python -m pip install codespell
|
||||
make codespell
|
||||
|
||||
- name: Core System Info
|
||||
run: |
|
||||
tox -e py
|
||||
|
7
.github/workflows/deployment.yml
vendored
7
.github/workflows/deployment.yml
vendored
@ -24,7 +24,7 @@ jobs:
|
||||
- name: Install dependencies
|
||||
run: |
|
||||
python -m pip install --upgrade pip
|
||||
pip install tox wheel
|
||||
pip install tox build
|
||||
|
||||
- name: Deployment Tests
|
||||
env:
|
||||
@ -34,9 +34,8 @@ jobs:
|
||||
run: |
|
||||
tox -e testcore
|
||||
|
||||
- name: Build Python source tarball
|
||||
# run: python setup.py sdist bdist_wheel
|
||||
run: python setup.py sdist
|
||||
- name: Build Python distributions
|
||||
run: python -m build
|
||||
|
||||
- name: Publish package to PyPI
|
||||
if: ${{ github.ref == 'refs/heads/master' }}
|
||||
|
10
.github/workflows/docs.yml
vendored
10
.github/workflows/docs.yml
vendored
@ -11,7 +11,7 @@ jobs:
|
||||
with:
|
||||
submodules: "recursive"
|
||||
- name: Set up Python
|
||||
uses: actions/setup-python@v4
|
||||
uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: "3.11"
|
||||
- name: Install dependencies
|
||||
@ -40,7 +40,7 @@ jobs:
|
||||
|
||||
- name: Save artifact
|
||||
if: ${{ github.event_name == 'push' }}
|
||||
uses: actions/upload-artifact@v3
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: docs
|
||||
path: ./docs.tar.gz
|
||||
@ -57,7 +57,7 @@ jobs:
|
||||
if: ${{ github.event_name == 'push' }}
|
||||
steps:
|
||||
- name: Download artifact
|
||||
uses: actions/download-artifact@v3
|
||||
uses: actions/download-artifact@v4
|
||||
with:
|
||||
name: docs
|
||||
- name: Unpack artifact
|
||||
@ -65,7 +65,7 @@ jobs:
|
||||
mkdir ./${{ env.LATEST_DOCS_DIR }}
|
||||
tar -xzf ./docs.tar.gz -C ./${{ env.LATEST_DOCS_DIR }}
|
||||
- name: Delete Artifact
|
||||
uses: geekyeggo/delete-artifact@v2
|
||||
uses: geekyeggo/delete-artifact@v5
|
||||
with:
|
||||
name: docs
|
||||
- name: Select Docs type
|
||||
@ -101,7 +101,7 @@ jobs:
|
||||
exit 1
|
||||
fi
|
||||
- name: Deploy to Github Pages
|
||||
uses: peaceiris/actions-gh-pages@v3
|
||||
uses: peaceiris/actions-gh-pages@v4
|
||||
with:
|
||||
personal_token: ${{ secrets.DEPLOY_GH_DOCS_TOKEN }}
|
||||
external_repository: ${{ env.DOCS_REPO }}
|
||||
|
2
.github/workflows/examples.yml
vendored
2
.github/workflows/examples.yml
vendored
@ -34,7 +34,7 @@ jobs:
|
||||
run: |
|
||||
# Free space
|
||||
sudo apt clean
|
||||
docker rmi $(docker image ls -aq)
|
||||
# docker rmi $(docker image ls -aq)
|
||||
df -h
|
||||
tox -e testexamples
|
||||
|
||||
|
24
HISTORY.rst
24
HISTORY.rst
@ -8,6 +8,7 @@ Release Notes
|
||||
.. |UNITTESTING| replace:: `Unit Testing <https://docs.platformio.org/en/latest/advanced/unit-testing/index.html>`__
|
||||
.. |DEBUGGING| replace:: `Debugging <https://docs.platformio.org/en/latest/plus/debugging.html>`__
|
||||
.. |STATICCODEANALYSIS| replace:: `Static Code Analysis <https://docs.platformio.org/en/latest/advanced/static-code-analysis/index.html>`__
|
||||
.. |PIOHOME| replace:: `PIO Home <https://docs.platformio.org/en/latest/home/index.html>`__
|
||||
|
||||
.. _release_notes_6:
|
||||
|
||||
@ -18,6 +19,27 @@ Unlock the true potential of embedded software development with
|
||||
PlatformIO's collaborative ecosystem, embracing declarative principles,
|
||||
test-driven methodologies, and modern toolchains for unrivaled success.
|
||||
|
||||
6.1.19 (2025-??-??)
|
||||
~~~~~~~~~~~~~~~~~~~
|
||||
|
||||
* Fixed a regression issue where custom build flags were not properly reflected in the `compile_commands.json <https://docs.platformio.org/en/latest/integration/compile_commands.html>`__ file, ensuring accurate compilation database generation
|
||||
|
||||
6.1.18 (2025-03-11)
|
||||
~~~~~~~~~~~~~~~~~~~
|
||||
|
||||
* Resolved a regression issue that prevented |PIOHOME| from opening external links (`issue #5084 <https://github.com/platformio/platformio-core/issues/5084>`_)
|
||||
|
||||
6.1.17 (2025-02-13)
|
||||
~~~~~~~~~~~~~~~~~~~
|
||||
|
||||
* Introduced the `PLATFORMIO_RUN_JOBS <https://docs.platformio.org/en/latest/envvars.html#envvar-PLATFORMIO_RUN_JOBS>`__ environment variable, allowing manual override of the number of parallel build jobs (`issue #5077 <https://github.com/platformio/platformio-core/issues/5077>`_)
|
||||
* Added support for ``tar.xz`` tarball dependencies (`pull #4974 <https://github.com/platformio/platformio-core/pull/4974>`_)
|
||||
* Ensured that dependencies of private libraries are no longer unnecessarily re-installed, optimizing dependency management and reducing redundant operations (`issue #4987 <https://github.com/platformio/platformio-core/issues/4987>`_)
|
||||
* Resolved an issue where the ``compiledb`` target failed to properly escape compiler executable paths containing spaces (`issue #4998 <https://github.com/platformio/platformio-core/issues/4998>`_)
|
||||
* Resolved an issue with incorrect path resolution when linking static libraries via the `build_flags <https://docs.platformio.org/en/latest/projectconf/sections/env/options/build/build_flags.html>`__ option (`issue #5004 <https://github.com/platformio/platformio-core/issues/5004>`_)
|
||||
* Resolved an issue where the ``--project-dir`` flag did not function correctly with the `pio check <https://docs.platformio.org/en/latest/core/userguide/cmd_check.html>`__ and `pio debug <https://docs.platformio.org/en/latest/core/userguide/cmd_debug.html>`__ commands (`issue #5029 <https://github.com/platformio/platformio-core/issues/5029>`_)
|
||||
* Resolved an issue where the |LDF| occasionally excluded bundled platform libraries from the dependency graph (`pull #4941 <https://github.com/platformio/platformio-core/pull/4941>`_)
|
||||
|
||||
6.1.16 (2024-09-26)
|
||||
~~~~~~~~~~~~~~~~~~~
|
||||
|
||||
@ -76,7 +98,7 @@ test-driven methodologies, and modern toolchains for unrivaled success.
|
||||
~~~~~~~~~~~~~~~~~~~
|
||||
|
||||
* Resolved a possible issue that may cause generated projects for `PlatformIO IDE for VSCode <https://docs.platformio.org/en/latest/integration/ide/vscode.html>`__ to fail to launch a debug session because of a missing "objdump" binary when GDB is not part of the toolchain package
|
||||
* Resolved a regression issue that resulted in the malfunction of the Memory Inspection feature within `PIO Home <https://docs.platformio.org/en/latest/home/index.html>`__
|
||||
* Resolved a regression issue that resulted in the malfunction of the Memory Inspection feature within |PIOHOME|
|
||||
|
||||
6.1.10 (2023-08-11)
|
||||
~~~~~~~~~~~~~~~~~~~
|
||||
|
5
Makefile
5
Makefile
@ -10,10 +10,13 @@ format:
|
||||
black ./platformio
|
||||
black ./tests
|
||||
|
||||
codespell:
|
||||
codespell --skip "./build,./docs/_build" -L "AtLeast,TRE,ans,dout,homestate,ser"
|
||||
|
||||
test:
|
||||
pytest --verbose --exitfirst -n 6 --dist=loadscope tests --ignore tests/test_examples.py
|
||||
|
||||
before-commit: isort format lint
|
||||
before-commit: codespell isort format lint
|
||||
|
||||
clean-docs:
|
||||
rm -rf docs/_build
|
||||
|
2
docs
2
docs
Submodule docs updated: dd3d549bdb...70ab7ee27b
2
examples
2
examples
Submodule examples updated: 2585734bbf...0409a90a01
@ -12,7 +12,7 @@
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
VERSION = (6, 1, 16)
|
||||
VERSION = (6, 1, "19a2")
|
||||
__version__ = ".".join([str(s) for s in VERSION])
|
||||
|
||||
__title__ = "platformio"
|
||||
|
@ -147,13 +147,13 @@ if env.subst("$BUILD_CACHE_DIR"):
|
||||
if not int(ARGUMENTS.get("PIOVERBOSE", 0)):
|
||||
click.echo("Verbose mode can be enabled via `-v, --verbose` option")
|
||||
|
||||
if not os.path.isdir(env.subst("$BUILD_DIR")):
|
||||
os.makedirs(env.subst("$BUILD_DIR"))
|
||||
|
||||
# Dynamically load dependent tools
|
||||
if "compiledb" in COMMAND_LINE_TARGETS:
|
||||
env.Tool("compilation_db")
|
||||
|
||||
if not os.path.isdir(env.subst("$BUILD_DIR")):
|
||||
os.makedirs(env.subst("$BUILD_DIR"))
|
||||
|
||||
env.LoadProjectOptions()
|
||||
env.LoadPioPlatform()
|
||||
|
||||
|
@ -59,6 +59,7 @@ def GetBuildType(env):
|
||||
|
||||
def BuildProgram(env):
|
||||
env.ProcessProgramDeps()
|
||||
env.ProcessCompileDbToolchainOption()
|
||||
env.ProcessProjectDeps()
|
||||
|
||||
# append into the beginning a main LD script
|
||||
@ -126,26 +127,26 @@ def ProcessProgramDeps(env):
|
||||
# remove specified flags
|
||||
env.ProcessUnFlags(env.get("BUILD_UNFLAGS"))
|
||||
|
||||
env.ProcessCompileDbToolchainOption()
|
||||
|
||||
|
||||
def ProcessCompileDbToolchainOption(env):
|
||||
if "compiledb" in COMMAND_LINE_TARGETS:
|
||||
# Resolve absolute path of toolchain
|
||||
for cmd in ("CC", "CXX", "AS"):
|
||||
if cmd not in env:
|
||||
continue
|
||||
if os.path.isabs(env[cmd]):
|
||||
continue
|
||||
env[cmd] = where_is_program(
|
||||
env.subst("$%s" % cmd), env.subst("${ENV['PATH']}")
|
||||
)
|
||||
if "compiledb" not in COMMAND_LINE_TARGETS:
|
||||
return
|
||||
|
||||
if env.get("COMPILATIONDB_INCLUDE_TOOLCHAIN"):
|
||||
print("Warning! `COMPILATIONDB_INCLUDE_TOOLCHAIN` is scoping")
|
||||
for scope, includes in env.DumpIntegrationIncludes().items():
|
||||
if scope in ("toolchain",):
|
||||
env.Append(CPPPATH=includes)
|
||||
# Resolve absolute path of toolchain
|
||||
for cmd in ("CC", "CXX", "AS"):
|
||||
if cmd not in env:
|
||||
continue
|
||||
if os.path.isabs(env[cmd]) or '"' in env[cmd]:
|
||||
continue
|
||||
env[cmd] = where_is_program(env.subst("$%s" % cmd), env.subst("${ENV['PATH']}"))
|
||||
if " " in env[cmd]: # issue #4998: Space in compilator path
|
||||
env[cmd] = f'"{env[cmd]}"'
|
||||
|
||||
if env.get("COMPILATIONDB_INCLUDE_TOOLCHAIN"):
|
||||
print("Warning! `COMPILATIONDB_INCLUDE_TOOLCHAIN` is scoping")
|
||||
for scope, includes in env.DumpIntegrationIncludes().items():
|
||||
if scope in ("toolchain",):
|
||||
env.Append(CPPPATH=includes)
|
||||
|
||||
|
||||
def ProcessProjectDeps(env):
|
||||
@ -219,6 +220,11 @@ def ParseFlagsExtended(env, flags): # pylint: disable=too-many-branches
|
||||
if os.path.isdir(p):
|
||||
result[k][i] = os.path.abspath(p)
|
||||
|
||||
# fix relative LIBs
|
||||
for i, l in enumerate(result.get("LIBS", [])):
|
||||
if isinstance(l, FS.File):
|
||||
result["LIBS"][i] = os.path.abspath(l.get_path())
|
||||
|
||||
# fix relative path for "-include"
|
||||
for i, f in enumerate(result.get("CCFLAGS", [])):
|
||||
if isinstance(f, tuple) and f[0] == "-include":
|
||||
|
@ -1159,6 +1159,8 @@ def ConfigureProjectLibBuilder(env):
|
||||
for lb in lib_builders:
|
||||
if lb in found_lbs:
|
||||
lb.search_deps_recursive(lb.get_search_files())
|
||||
# refill found libs after recursive search
|
||||
found_lbs = [lb for lb in lib_builders if lb.is_dependent]
|
||||
for lb in lib_builders:
|
||||
for deplb in lb.depbuilders[:]:
|
||||
if deplb not in found_lbs:
|
||||
|
@ -19,7 +19,6 @@ import json
|
||||
import os
|
||||
import shutil
|
||||
from collections import Counter
|
||||
from os.path import dirname, isfile
|
||||
from time import time
|
||||
|
||||
import click
|
||||
@ -77,7 +76,7 @@ def cli( # pylint: disable=too-many-positional-arguments
|
||||
app.set_session_var("custom_project_conf", project_conf)
|
||||
|
||||
# find project directory on upper level
|
||||
if isfile(project_dir):
|
||||
if os.path.isfile(project_dir):
|
||||
project_dir = find_project_dir_above(project_dir)
|
||||
|
||||
results = []
|
||||
@ -150,7 +149,7 @@ def cli( # pylint: disable=too-many-positional-arguments
|
||||
print_processing_header(tool, envname, env_dump)
|
||||
|
||||
ct = CheckToolFactory.new(
|
||||
tool, project_dir, config, envname, tool_options
|
||||
tool, os.getcwd(), config, envname, tool_options
|
||||
)
|
||||
|
||||
result = {"env": envname, "tool": tool, "duration": time()}
|
||||
@ -250,12 +249,12 @@ def collect_component_stats(result):
|
||||
components[component].update({DefectItem.SEVERITY_LABELS[defect.severity]: 1})
|
||||
|
||||
for defect in result.get("defects", []):
|
||||
component = dirname(defect.file) or defect.file
|
||||
component = os.path.dirname(defect.file) or defect.file
|
||||
_append_defect(component, defect)
|
||||
|
||||
if component.lower().startswith(get_project_dir().lower()):
|
||||
while os.sep in component:
|
||||
component = dirname(component)
|
||||
component = os.path.dirname(component)
|
||||
_append_defect(component, defect)
|
||||
|
||||
return components
|
||||
|
@ -76,5 +76,5 @@ def settings_set(ctx, name, value):
|
||||
@click.pass_context
|
||||
def settings_reset(ctx):
|
||||
app.reset_settings()
|
||||
click.secho("The settings have been reseted!", fg="green")
|
||||
click.secho("The settings have been reset!", fg="green")
|
||||
ctx.invoke(settings_get)
|
||||
|
@ -86,7 +86,7 @@ def cli( # pylint: disable=too-many-positional-arguments
|
||||
|
||||
if not interface:
|
||||
return helpers.predebug_project(
|
||||
ctx, project_dir, project_config, env_name, False, verbose
|
||||
ctx, os.getcwd(), project_config, env_name, False, verbose
|
||||
)
|
||||
|
||||
configure_args = (
|
||||
@ -106,7 +106,7 @@ def cli( # pylint: disable=too-many-positional-arguments
|
||||
else:
|
||||
debug_config = _configure(*configure_args)
|
||||
|
||||
_run(project_dir, debug_config, client_extra_args)
|
||||
_run(os.getcwd(), debug_config, client_extra_args)
|
||||
|
||||
return None
|
||||
|
||||
|
@ -12,8 +12,6 @@
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import platform
|
||||
|
||||
from platformio.compat import is_proxy_set
|
||||
|
||||
|
||||
@ -30,9 +28,8 @@ def get_core_dependencies():
|
||||
|
||||
def get_pip_dependencies():
|
||||
core = [
|
||||
'bottle == 0.12.*; python_version < "3.7"',
|
||||
'bottle == 0.13.*; python_version >= "3.7"',
|
||||
"click >=8.0.4, <9",
|
||||
"bottle == 0.13.*",
|
||||
"click >=8.0.4, <8.1.8",
|
||||
"colorama",
|
||||
"marshmallow == 3.*",
|
||||
"pyelftools >=0.27, <1",
|
||||
@ -45,19 +42,16 @@ def get_pip_dependencies():
|
||||
home = [
|
||||
# PIO Home requirements
|
||||
"ajsonrpc == 1.2.*",
|
||||
"starlette >=0.19, <0.40",
|
||||
'uvicorn == 0.16.0; python_version < "3.7"',
|
||||
'uvicorn >=0.16, <0.31; python_version >= "3.7"',
|
||||
"starlette >=0.19, <0.47",
|
||||
"uvicorn >=0.16, <0.35",
|
||||
"wsproto == 1.*",
|
||||
]
|
||||
|
||||
extra = []
|
||||
|
||||
# issue #4702; Broken "requests/charset_normalizer" on macOS ARM
|
||||
if platform.system() == "Darwin" and "arm" in platform.machine().lower():
|
||||
extra.append(
|
||||
'chardet >= 3.0.2,<6; platform_system == "Darwin" and "arm" in platform_machine'
|
||||
)
|
||||
extra.append(
|
||||
'chardet >= 3.0.2,<6; platform_system == "Darwin" and "arm" in platform_machine'
|
||||
)
|
||||
|
||||
# issue 4614: urllib3 v2.0 only supports OpenSSL 1.1.1+
|
||||
try:
|
||||
|
@ -54,7 +54,7 @@ def package_exec_cmd(obj, package, call, args):
|
||||
os.environ["PIO_PYTHON_EXE"] = get_pythonexe_path()
|
||||
|
||||
# inject current python interpreter on Windows
|
||||
if args[0].endswith(".py"):
|
||||
if args and args[0].endswith(".py"):
|
||||
args = [os.environ["PIO_PYTHON_EXE"]] + list(args)
|
||||
if not os.path.exists(args[1]):
|
||||
args[1] = where_is_program(args[1])
|
||||
|
@ -297,7 +297,11 @@ def _install_project_private_library_deps(private_pkg, private_lm, env_lm, optio
|
||||
if not spec.external and not spec.owner:
|
||||
continue
|
||||
pkg = private_lm.get_package(spec)
|
||||
if not pkg and not env_lm.get_package(spec):
|
||||
if (
|
||||
not pkg
|
||||
and not private_lm.get_package(spec)
|
||||
and not env_lm.get_package(spec)
|
||||
):
|
||||
pkg = env_lm.install(
|
||||
spec,
|
||||
skip_dependencies=True,
|
||||
|
@ -111,7 +111,7 @@ def uninstall_project_env_dependencies(project_env, options=None):
|
||||
uninstalled_conds.append(
|
||||
_uninstall_project_env_custom_tools(project_env, options)
|
||||
)
|
||||
# custom ibraries
|
||||
# custom libraries
|
||||
if options.get("libraries"):
|
||||
uninstalled_conds.append(
|
||||
_uninstall_project_env_custom_libraries(project_env, options)
|
||||
|
@ -110,7 +110,7 @@ def update_project_env_dependencies(project_env, options=None):
|
||||
# custom tools
|
||||
if options.get("tools"):
|
||||
updated_conds.append(_update_project_env_custom_tools(project_env, options))
|
||||
# custom ibraries
|
||||
# custom libraries
|
||||
if options.get("libraries"):
|
||||
updated_conds.append(_update_project_env_custom_libraries(project_env, options))
|
||||
# declared dependencies
|
||||
|
@ -34,7 +34,7 @@ class FileDownloader:
|
||||
url,
|
||||
stream=True,
|
||||
)
|
||||
if self._http_response.status_code != 200:
|
||||
if self._http_response.status_code not in (200, 203):
|
||||
raise PackageException(
|
||||
"Got the unrecognized status code '{0}' when downloaded {1}".format(
|
||||
self._http_response.status_code, url
|
||||
|
@ -276,7 +276,7 @@ class ManifestSchema(BaseSchema):
|
||||
@staticmethod
|
||||
@memoized(expire="1h")
|
||||
def load_spdx_licenses():
|
||||
version = "3.24.0"
|
||||
version = "3.26.0"
|
||||
spdx_data_url = (
|
||||
"https://raw.githubusercontent.com/spdx/license-list-data/"
|
||||
f"v{version}/json/licenses.json"
|
||||
|
@ -396,7 +396,7 @@ class PackageSpec: # pylint: disable=too-many-instance-attributes
|
||||
parts.path.endswith(".git"),
|
||||
# Handle GitHub URL (https://github.com/user/package)
|
||||
parts.netloc in ("github.com", "gitlab.com", "bitbucket.com")
|
||||
and not parts.path.endswith((".zip", ".tar.gz")),
|
||||
and not parts.path.endswith((".zip", ".tar.gz", ".tar.xz")),
|
||||
]
|
||||
hg_conditions = [
|
||||
# Handle Developer Mbed URL
|
||||
|
@ -152,6 +152,7 @@ class FileUnpacker:
|
||||
magic_map = {
|
||||
b"\x1f\x8b\x08": TARArchiver,
|
||||
b"\x42\x5a\x68": TARArchiver,
|
||||
b"\xfd\x37\x7a\x58\x5a\x00": TARArchiver,
|
||||
b"\x50\x4b\x03\x04": ZIPArchiver,
|
||||
}
|
||||
magic_len = max(len(k) for k in magic_map)
|
||||
|
@ -44,7 +44,7 @@ def cast_version_to_semver(value, force=True, raise_exception=False):
|
||||
|
||||
def pepver_to_semver(pepver):
|
||||
return cast_version_to_semver(
|
||||
re.sub(r"(\.\d+)\.?(dev|a|b|rc|post)", r"\1-\2.", pepver, 1)
|
||||
re.sub(r"(\.\d+)\.?(dev|a|b|rc|post)", r"\1-\2.", pepver, count=1)
|
||||
)
|
||||
|
||||
|
||||
|
@ -33,7 +33,7 @@ class PlatformFactory:
|
||||
|
||||
@staticmethod
|
||||
def load_platform_module(name, path):
|
||||
# backward compatibiility with the legacy dev-platforms
|
||||
# backward compatibility with the legacy dev-platforms
|
||||
sys.modules["platformio.managers.platform"] = base
|
||||
try:
|
||||
return load_python_module("platformio.platform.%s" % name, path)
|
||||
|
@ -201,9 +201,7 @@ new version when next recompiled. The header file eliminates the labor of
|
||||
finding and changing all the copies as well as the risk that a failure to
|
||||
find one copy will result in inconsistencies within a program.
|
||||
|
||||
In C, the usual convention is to give header files names that end with `.h'.
|
||||
It is most portable to use only letters, digits, dashes, and underscores in
|
||||
header file names, and at most one dot.
|
||||
In C, the convention is to give header files names that end with `.h'.
|
||||
|
||||
Read more about using header files in official GCC documentation:
|
||||
|
||||
@ -222,12 +220,12 @@ def init_lib_readme(lib_dir):
|
||||
fp.write(
|
||||
"""
|
||||
This directory is intended for project specific (private) libraries.
|
||||
PlatformIO will compile them to static libraries and link into executable file.
|
||||
PlatformIO will compile them to static libraries and link into the executable file.
|
||||
|
||||
The source code of each library should be placed in an own separate directory
|
||||
("lib/your_library_name/[here are source files]").
|
||||
The source code of each library should be placed in a separate directory
|
||||
("lib/your_library_name/[Code]").
|
||||
|
||||
For example, see a structure of the following two libraries `Foo` and `Bar`:
|
||||
For example, see the structure of the following example libraries `Foo` and `Bar`:
|
||||
|
||||
|--lib
|
||||
| |
|
||||
@ -237,7 +235,7 @@ For example, see a structure of the following two libraries `Foo` and `Bar`:
|
||||
| | |--src
|
||||
| | |- Bar.c
|
||||
| | |- Bar.h
|
||||
| | |- library.json (optional, custom build options, etc) https://docs.platformio.org/page/librarymanager/config.html
|
||||
| | |- library.json (optional. for custom build options, etc) https://docs.platformio.org/page/librarymanager/config.html
|
||||
| |
|
||||
| |--Foo
|
||||
| | |- Foo.c
|
||||
@ -249,7 +247,7 @@ For example, see a structure of the following two libraries `Foo` and `Bar`:
|
||||
|--src
|
||||
|- main.c
|
||||
|
||||
and a contents of `src/main.c`:
|
||||
Example contents of `src/main.c` using Foo and Bar:
|
||||
```
|
||||
#include <Foo.h>
|
||||
#include <Bar.h>
|
||||
@ -261,8 +259,8 @@ int main (void)
|
||||
|
||||
```
|
||||
|
||||
PlatformIO Library Dependency Finder will find automatically dependent
|
||||
libraries scanning project source files.
|
||||
The PlatformIO Library Dependency Finder will find automatically dependent
|
||||
libraries by scanning project source files.
|
||||
|
||||
More information about PlatformIO Library Dependency Finder
|
||||
- https://docs.platformio.org/page/librarymanager/ldf.html
|
||||
|
@ -347,7 +347,7 @@ class ProjectConfigBase:
|
||||
if section is None:
|
||||
if option in self.BUILTIN_VARS:
|
||||
return self.BUILTIN_VARS[option]()
|
||||
# SCons varaibles
|
||||
# SCons variables
|
||||
return f"${{{option}}}"
|
||||
|
||||
# handle system environment variables
|
||||
|
@ -158,7 +158,7 @@ def load_build_metadata(project_dir, env_or_envs, cache=False, build_type=None):
|
||||
return result or None
|
||||
|
||||
|
||||
# Backward compatibiility with dev-platforms
|
||||
# Backward compatibility with dev-platforms
|
||||
load_project_ide_data = load_build_metadata
|
||||
|
||||
|
||||
|
@ -17,7 +17,7 @@
|
||||
# common.symbolFiles=<Symbol Files loaded by debugger>
|
||||
# (This value is overwritten by a launcher specific symbolFiles value if the latter exists)
|
||||
#
|
||||
# In runDir, symbolFiles and env fields you can use these macroses:
|
||||
# In runDir, symbolFiles and env fields you can use these macros:
|
||||
# ${PROJECT_DIR} - project directory absolute path
|
||||
# ${OUTPUT_PATH} - linker output path (relative to project directory path)
|
||||
# ${OUTPUT_BASENAME}- linker output filename
|
||||
|
@ -33,9 +33,11 @@ from platformio.test.runners.base import CTX_META_TEST_IS_RUNNING
|
||||
# pylint: disable=too-many-arguments,too-many-locals,too-many-branches
|
||||
|
||||
try:
|
||||
DEFAULT_JOB_NUMS = cpu_count()
|
||||
SYSTEM_CPU_COUNT = cpu_count()
|
||||
except NotImplementedError:
|
||||
DEFAULT_JOB_NUMS = 1
|
||||
SYSTEM_CPU_COUNT = 1
|
||||
|
||||
DEFAULT_JOB_NUMS = int(os.getenv("PLATFORMIO_RUN_JOBS", SYSTEM_CPU_COUNT))
|
||||
|
||||
|
||||
@click.command("run", short_help="Run project targets (build, upload, clean, etc.)")
|
||||
|
@ -65,16 +65,16 @@ class memoized:
|
||||
|
||||
|
||||
class throttle:
|
||||
def __init__(self, threshhold):
|
||||
self.threshhold = threshhold # milliseconds
|
||||
def __init__(self, threshold):
|
||||
self.threshold = threshold # milliseconds
|
||||
self.last = 0
|
||||
|
||||
def __call__(self, func):
|
||||
@functools.wraps(func)
|
||||
def wrapper(*args, **kwargs):
|
||||
diff = int(round((time.time() - self.last) * 1000))
|
||||
if diff < self.threshhold:
|
||||
time.sleep((self.threshhold - diff) * 0.001)
|
||||
if diff < self.threshold:
|
||||
time.sleep((self.threshold - diff) * 0.001)
|
||||
self.last = time.time()
|
||||
return func(*args, **kwargs)
|
||||
|
||||
|
@ -31,7 +31,7 @@ PROJECT_CONFIG_TPL = """
|
||||
[env]
|
||||
platform = platformio/atmelavr@^3.4.0
|
||||
lib_deps =
|
||||
milesburton/DallasTemperature@^3.9.1
|
||||
milesburton/DallasTemperature@^4.0.4
|
||||
https://github.com/esphome/ESPAsyncWebServer/archive/refs/tags/v2.1.0.zip
|
||||
|
||||
[env:baremetal]
|
||||
@ -215,7 +215,7 @@ def test_project(
|
||||
PackageSpec("toolchain-atmelavr@1.70300.191015"),
|
||||
]
|
||||
assert config.get("env:devkit", "lib_deps") == [
|
||||
"milesburton/DallasTemperature@^3.9.1",
|
||||
"milesburton/DallasTemperature@^4.0.4",
|
||||
"https://github.com/esphome/ESPAsyncWebServer/archive/refs/tags/v2.1.0.zip",
|
||||
]
|
||||
|
||||
@ -241,7 +241,7 @@ def test_private_lib_deps(
|
||||
"version": "1.0.0",
|
||||
"dependencies": {
|
||||
"bblanchon/ArduinoJson": "^5",
|
||||
"milesburton/DallasTemperature": "^3.9.1"
|
||||
"milesburton/DallasTemperature": "^4.0.4"
|
||||
}
|
||||
}
|
||||
"""
|
||||
@ -340,7 +340,7 @@ def test_remove_project_unused_libdeps(
|
||||
),
|
||||
]
|
||||
|
||||
# manually remove from cofiguration file
|
||||
# manually remove from configuration file
|
||||
config.set("env:baremetal", "lib_deps", ["bblanchon/ArduinoJson@^5"])
|
||||
config.save()
|
||||
result = clirunner.invoke(
|
||||
@ -446,7 +446,7 @@ def test_custom_project_libraries(
|
||||
)
|
||||
assert pkgs_to_specs(lm.get_installed()) == [
|
||||
PackageSpec("ArduinoJson@5.13.4"),
|
||||
PackageSpec("Nanopb@0.4.9"),
|
||||
PackageSpec("Nanopb@0.4.91"),
|
||||
]
|
||||
assert config.get("env:devkit", "lib_deps") == [
|
||||
"bblanchon/ArduinoJson@^5",
|
||||
|
@ -56,7 +56,7 @@ def test_project(clirunner, validate_cliresult, isolated_pio_core, tmp_path):
|
||||
re.MULTILINE,
|
||||
)
|
||||
assert re.search(
|
||||
r"^DallasTemperature\s+3\.\d\.1\s+3\.\d+\.\d+\s+3\.\d+\.\d+\s+Library\s+devkit",
|
||||
r"^DallasTemperature\s+3\.\d\.1\s+3\.\d+\.\d+\s+4\.\d+\.\d+\s+Library\s+devkit",
|
||||
result.output,
|
||||
re.MULTILINE,
|
||||
)
|
||||
|
@ -58,12 +58,14 @@ def test_global_packages(
|
||||
validate_cliresult(result)
|
||||
assert pkgs_to_names(LibraryPackageManager().get_installed()) == [
|
||||
"ArduinoJson",
|
||||
"Async TCP",
|
||||
"AsyncMqttClient",
|
||||
"AsyncTCP",
|
||||
"AsyncTCP_RP2040W",
|
||||
"Bounce2",
|
||||
"ESP Async WebServer",
|
||||
"ESPAsyncTCP",
|
||||
"ESPAsyncTCP",
|
||||
"ESPAsyncTCP-esphome",
|
||||
"Homie",
|
||||
]
|
||||
# uninstall all deps
|
||||
@ -96,12 +98,14 @@ def test_global_packages(
|
||||
validate_cliresult(result)
|
||||
assert pkgs_to_names(LibraryPackageManager().get_installed()) == [
|
||||
"ArduinoJson",
|
||||
"Async TCP",
|
||||
"AsyncMqttClient",
|
||||
"AsyncTCP",
|
||||
"AsyncTCP_RP2040W",
|
||||
"Bounce2",
|
||||
"ESP Async WebServer",
|
||||
"ESPAsyncTCP",
|
||||
"ESPAsyncTCP",
|
||||
"ESPAsyncTCP-esphome",
|
||||
]
|
||||
# remove specific dependency
|
||||
result = clirunner.invoke(
|
||||
@ -116,6 +120,7 @@ def test_global_packages(
|
||||
assert pkgs_to_names(LibraryPackageManager().get_installed()) == [
|
||||
"ArduinoJson",
|
||||
"AsyncMqttClient",
|
||||
"AsyncTCP",
|
||||
"Bounce2",
|
||||
"ESPAsyncTCP",
|
||||
]
|
||||
|
@ -34,7 +34,7 @@ PROJECT_OUTDATED_CONFIG_TPL = """
|
||||
platform = platformio/atmelavr@^2
|
||||
framework = arduino
|
||||
board = attiny88
|
||||
lib_deps = milesburton/DallasTemperature@^3.8.0
|
||||
lib_deps = milesburton/DallasTemperature@^3.9.1
|
||||
"""
|
||||
|
||||
PROJECT_UPDATED_CONFIG_TPL = """
|
||||
@ -42,7 +42,7 @@ PROJECT_UPDATED_CONFIG_TPL = """
|
||||
platform = platformio/atmelavr@<4
|
||||
framework = arduino
|
||||
board = attiny88
|
||||
lib_deps = milesburton/DallasTemperature@^3.8.0
|
||||
lib_deps = milesburton/DallasTemperature@^3.9.1
|
||||
"""
|
||||
|
||||
|
||||
@ -179,7 +179,7 @@ def test_project(
|
||||
PackageSpec("toolchain-atmelavr@1.50400.190710"),
|
||||
]
|
||||
assert config.get("env:devkit", "lib_deps") == [
|
||||
"milesburton/DallasTemperature@^3.8.0"
|
||||
"milesburton/DallasTemperature@^3.9.1"
|
||||
]
|
||||
|
||||
# update packages
|
||||
@ -195,10 +195,7 @@ def test_project(
|
||||
assert pkgs[0].metadata.name == "atmelavr"
|
||||
assert pkgs[0].metadata.version.major == 3
|
||||
assert pkgs_to_specs(lm.get_installed()) == [
|
||||
PackageSpec(
|
||||
"DallasTemperature@%s"
|
||||
% get_pkg_latest_version("milesburton/DallasTemperature")
|
||||
),
|
||||
PackageSpec("DallasTemperature@3.11.0"),
|
||||
PackageSpec(
|
||||
"OneWire@%s" % get_pkg_latest_version("paulstoffregen/OneWire")
|
||||
),
|
||||
@ -210,7 +207,7 @@ def test_project(
|
||||
PackageSpec("toolchain-atmelavr@1.50400.190710"),
|
||||
]
|
||||
assert config.get("env:devkit", "lib_deps") == [
|
||||
"milesburton/DallasTemperature@^3.8.0"
|
||||
"milesburton/DallasTemperature@^3.9.1"
|
||||
]
|
||||
|
||||
# update again
|
||||
@ -230,7 +227,7 @@ def test_custom_project_libraries(
|
||||
project_dir = tmp_path / "project"
|
||||
project_dir.mkdir()
|
||||
(project_dir / "platformio.ini").write_text(PROJECT_OUTDATED_CONFIG_TPL)
|
||||
spec = "milesburton/DallasTemperature@^3.8.0"
|
||||
spec = "milesburton/DallasTemperature@^3.9.1"
|
||||
result = clirunner.invoke(
|
||||
package_install_cmd,
|
||||
["-d", str(project_dir), "-e", "devkit", "-l", spec],
|
||||
@ -251,15 +248,15 @@ def test_custom_project_libraries(
|
||||
# update package
|
||||
result = clirunner.invoke(
|
||||
package_update_cmd,
|
||||
["-e", "devkit", "-l", "milesburton/DallasTemperature@^3.8.0"],
|
||||
["-e", "devkit", "-l", "milesburton/DallasTemperature@^3.9.1"],
|
||||
)
|
||||
assert ProjectConfig().get("env:devkit", "lib_deps") == [
|
||||
"milesburton/DallasTemperature@^3.8.0"
|
||||
"milesburton/DallasTemperature@^3.9.1"
|
||||
]
|
||||
# try again
|
||||
result = clirunner.invoke(
|
||||
package_update_cmd,
|
||||
["-e", "devkit", "-l", "milesburton/DallasTemperature@^3.8.0"],
|
||||
["-e", "devkit", "-l", "milesburton/DallasTemperature@^3.9.1"],
|
||||
)
|
||||
validate_cliresult(result)
|
||||
assert "Already up-to-date." in result.output
|
||||
@ -276,16 +273,13 @@ def test_custom_project_libraries(
|
||||
os.path.join(config.get("platformio", "libdeps_dir"), "devkit")
|
||||
)
|
||||
assert pkgs_to_specs(lm.get_installed()) == [
|
||||
PackageSpec(
|
||||
"DallasTemperature@%s"
|
||||
% get_pkg_latest_version("milesburton/DallasTemperature")
|
||||
),
|
||||
PackageSpec("DallasTemperature@3.11.0"),
|
||||
PackageSpec(
|
||||
"OneWire@%s" % get_pkg_latest_version("paulstoffregen/OneWire")
|
||||
),
|
||||
]
|
||||
assert config.get("env:devkit", "lib_deps") == [
|
||||
"milesburton/DallasTemperature@^3.8.0"
|
||||
"milesburton/DallasTemperature@^3.9.1"
|
||||
]
|
||||
|
||||
# unknown libraries
|
||||
|
@ -235,7 +235,7 @@ def test_global_lib_update_check(clirunner, validate_cliresult):
|
||||
validate_cliresult(result)
|
||||
output = json.loads(result.output)
|
||||
assert set(
|
||||
["Adafruit PN532", "AsyncMqttClient", "ESPAsyncTCP", "NeoPixelBus"]
|
||||
["Adafruit PN532", "AsyncMqttClient", "AsyncTCP", "ESPAsyncTCP", "NeoPixelBus"]
|
||||
) == set(lib["name"] for lib in output)
|
||||
|
||||
|
||||
|
@ -246,67 +246,67 @@ int main(int argc, char *argv[]) {
|
||||
)
|
||||
|
||||
|
||||
@pytest.mark.skipif(
|
||||
sys.platform != "darwin", reason="runs only on macOS (issue with SimAVR)"
|
||||
)
|
||||
def test_custom_testing_command(clirunner, validate_cliresult, tmp_path: Path):
|
||||
project_dir = tmp_path / "project"
|
||||
project_dir.mkdir()
|
||||
(project_dir / "platformio.ini").write_text(
|
||||
"""
|
||||
[env:uno]
|
||||
platform = atmelavr
|
||||
framework = arduino
|
||||
board = uno
|
||||
# @pytest.mark.skipif(
|
||||
# sys.platform != "darwin", reason="runs only on macOS (issue with SimAVR)"
|
||||
# )
|
||||
# def test_custom_testing_command(clirunner, validate_cliresult, tmp_path: Path):
|
||||
# project_dir = tmp_path / "project"
|
||||
# project_dir.mkdir()
|
||||
# (project_dir / "platformio.ini").write_text(
|
||||
# """
|
||||
# [env:uno]
|
||||
# platform = atmelavr
|
||||
# framework = arduino
|
||||
# board = uno
|
||||
|
||||
platform_packages =
|
||||
platformio/tool-simavr @ ^1
|
||||
test_speed = 9600
|
||||
test_testing_command =
|
||||
${platformio.packages_dir}/tool-simavr/bin/simavr
|
||||
-m
|
||||
atmega328p
|
||||
-f
|
||||
16000000L
|
||||
${platformio.build_dir}/${this.__env__}/firmware.elf
|
||||
"""
|
||||
)
|
||||
test_dir = project_dir / "test" / "test_dummy"
|
||||
test_dir.mkdir(parents=True)
|
||||
(test_dir / "test_main.cpp").write_text(
|
||||
"""
|
||||
#include <Arduino.h>
|
||||
#include <unity.h>
|
||||
# platform_packages =
|
||||
# platformio/tool-simavr @ ^1
|
||||
# test_speed = 9600
|
||||
# test_testing_command =
|
||||
# ${platformio.packages_dir}/tool-simavr/bin/simavr
|
||||
# -m
|
||||
# atmega328p
|
||||
# -f
|
||||
# 16000000L
|
||||
# ${platformio.build_dir}/${this.__env__}/firmware.elf
|
||||
# """
|
||||
# )
|
||||
# test_dir = project_dir / "test" / "test_dummy"
|
||||
# test_dir.mkdir(parents=True)
|
||||
# (test_dir / "test_main.cpp").write_text(
|
||||
# """
|
||||
# #include <Arduino.h>
|
||||
# #include <unity.h>
|
||||
|
||||
void setUp(void) {
|
||||
// set stuff up here
|
||||
}
|
||||
# void setUp(void) {
|
||||
# // set stuff up here
|
||||
# }
|
||||
|
||||
void tearDown(void) {
|
||||
// clean stuff up here
|
||||
}
|
||||
# void tearDown(void) {
|
||||
# // clean stuff up here
|
||||
# }
|
||||
|
||||
void dummy_test(void) {
|
||||
TEST_ASSERT_EQUAL(1, 1);
|
||||
}
|
||||
# void dummy_test(void) {
|
||||
# TEST_ASSERT_EQUAL(1, 1);
|
||||
# }
|
||||
|
||||
void setup() {
|
||||
UNITY_BEGIN();
|
||||
RUN_TEST(dummy_test);
|
||||
UNITY_END();
|
||||
}
|
||||
# void setup() {
|
||||
# UNITY_BEGIN();
|
||||
# RUN_TEST(dummy_test);
|
||||
# UNITY_END();
|
||||
# }
|
||||
|
||||
void loop() {
|
||||
delay(1000);
|
||||
}
|
||||
"""
|
||||
)
|
||||
result = clirunner.invoke(
|
||||
pio_test_cmd,
|
||||
["-d", str(project_dir), "--without-uploading"],
|
||||
)
|
||||
validate_cliresult(result)
|
||||
assert "dummy_test" in result.output
|
||||
# void loop() {
|
||||
# delay(1000);
|
||||
# }
|
||||
# """
|
||||
# )
|
||||
# result = clirunner.invoke(
|
||||
# pio_test_cmd,
|
||||
# ["-d", str(project_dir), "--without-uploading"],
|
||||
# )
|
||||
# validate_cliresult(result)
|
||||
# assert "dummy_test" in result.output
|
||||
|
||||
|
||||
def test_unity_setup_teardown(clirunner, validate_cliresult, tmpdir):
|
||||
|
@ -335,7 +335,7 @@ def test_symlink(tmp_path: Path):
|
||||
# uninstall
|
||||
lm.uninstall("External")
|
||||
assert ["Installed"] == [pkg.metadata.name for pkg in lm.get_installed()]
|
||||
# ensure original package was not rmeoved
|
||||
# ensure original package was not removed
|
||||
assert external_pkg_dir.is_dir()
|
||||
|
||||
# install again, remove from a disk
|
||||
|
7
tox.ini
7
tox.ini
@ -27,11 +27,12 @@ passenv = *
|
||||
usedevelop = True
|
||||
deps =
|
||||
black
|
||||
codespell
|
||||
isort
|
||||
jsondiff
|
||||
pylint
|
||||
pytest
|
||||
pytest-xdist
|
||||
jsondiff
|
||||
commands =
|
||||
{envpython} --version
|
||||
pio system info
|
||||
@ -54,8 +55,8 @@ commands =
|
||||
|
||||
[testenv:docs]
|
||||
deps =
|
||||
sphinx
|
||||
sphinx-rtd-theme==2.0.0
|
||||
sphinx-rtd-theme==3.0.2
|
||||
sphinxcontrib-googleanalytics
|
||||
sphinx-notfound-page
|
||||
sphinx-copybutton
|
||||
restructuredtext-lint
|
||||
|
Reference in New Issue
Block a user