mirror of
https://github.com/platformio/platformio-core.git
synced 2025-07-30 18:17:13 +02:00
Merge branch 'release/v6.1.10'
This commit is contained in:
7
.github/workflows/core.yml
vendored
7
.github/workflows/core.yml
vendored
@ -8,7 +8,7 @@ jobs:
|
|||||||
fail-fast: false
|
fail-fast: false
|
||||||
matrix:
|
matrix:
|
||||||
os: [ubuntu-20.04, windows-latest, macos-latest]
|
os: [ubuntu-20.04, windows-latest, macos-latest]
|
||||||
python-version: ["3.6", "3.9", "3.11"]
|
python-version: ["3.6", "3.7", "3.8", "3.9", "3.10", "3.11"]
|
||||||
|
|
||||||
runs-on: ${{ matrix.os }}
|
runs-on: ${{ matrix.os }}
|
||||||
|
|
||||||
@ -27,12 +27,17 @@ jobs:
|
|||||||
python -m pip install --upgrade pip
|
python -m pip install --upgrade pip
|
||||||
pip install tox
|
pip install tox
|
||||||
|
|
||||||
|
- name: Core System Info
|
||||||
|
run: |
|
||||||
|
tox -e py
|
||||||
|
|
||||||
- name: Python Lint
|
- name: Python Lint
|
||||||
if: ${{ matrix.python-version != '3.6' }}
|
if: ${{ matrix.python-version != '3.6' }}
|
||||||
run: |
|
run: |
|
||||||
tox -e lint
|
tox -e lint
|
||||||
|
|
||||||
- name: Integration Tests
|
- name: Integration Tests
|
||||||
|
if: ${{ matrix.python-version == '3.9' }}
|
||||||
run: |
|
run: |
|
||||||
tox -e testcore
|
tox -e testcore
|
||||||
|
|
||||||
|
13
HISTORY.rst
13
HISTORY.rst
@ -13,7 +13,18 @@ Release Notes
|
|||||||
PlatformIO Core 6
|
PlatformIO Core 6
|
||||||
-----------------
|
-----------------
|
||||||
|
|
||||||
**A professional collaborative platform for declarative, safety-critical, and test-driven embedded development.**
|
Unlock the true potential of embedded software development with
|
||||||
|
PlatformIO's collaborative ecosystem, embracing declarative principles,
|
||||||
|
test-driven methodologies, and modern toolchains for unrivaled success.
|
||||||
|
|
||||||
|
6.1.10 (2023-08-11)
|
||||||
|
~~~~~~~~~~~~~~~~~~~
|
||||||
|
|
||||||
|
* Resolved an issue that caused generated projects for `PlatformIO IDE for VSCode <https://docs.platformio.org/en/latest/integration/ide/vscode.html>`__ to break when the ``-iprefix`` compiler flag was used
|
||||||
|
* Resolved an issue encountered while utilizing the `pio pkg exec <https://docs.platformio.org/en/latest/core/userguide/pkg/cmd_exec.html>`__ command on the Windows platform to execute Python scripts from a package
|
||||||
|
* Implemented a crucial improvement to the `pio run <https://docs.platformio.org/en/latest/core/userguide/cmd_run.html>`__ command, guaranteeing that the ``monitor`` target is not executed if any of the preceding targets, such as ``upload``, encounter failures
|
||||||
|
* `Cppcheck <https://docs.platformio.org/en/latest/plus/check-tools/cppcheck.html>`__ v2.11 with new checks, CLI commands and various analysis improvements
|
||||||
|
* Resolved a critical issue that arose on macOS ARM platforms due to the Python "requests" module, leading to a "ModuleNotFoundError: No module named 'chardet'" (`issue #4702 <https://github.com/platformio/platformio-core/issues/4702>`_)
|
||||||
|
|
||||||
6.1.9 (2023-07-06)
|
6.1.9 (2023-07-06)
|
||||||
~~~~~~~~~~~~~~~~~~
|
~~~~~~~~~~~~~~~~~~
|
||||||
|
@ -36,9 +36,11 @@ PlatformIO Core
|
|||||||
.. image:: https://raw.githubusercontent.com/platformio/platformio-web/develop/app/images/platformio-ide-laptop.png
|
.. image:: https://raw.githubusercontent.com/platformio/platformio-web/develop/app/images/platformio-ide-laptop.png
|
||||||
:target: https://platformio.org?utm_source=github&utm_medium=core
|
:target: https://platformio.org?utm_source=github&utm_medium=core
|
||||||
|
|
||||||
`PlatformIO <https://platformio.org>`_ is a professional collaborative platform for embedded development.
|
`PlatformIO <https://platformio.org>`_: Your Gateway to Embedded Software Development Excellence.
|
||||||
|
|
||||||
**A place where Developers and Teams have true Freedom! No more vendor lock-in!**
|
Unlock the true potential of embedded software development with
|
||||||
|
PlatformIO's collaborative ecosystem, embracing declarative principles,
|
||||||
|
test-driven methodologies, and modern toolchains for unrivaled success.
|
||||||
|
|
||||||
* Open source, maximum permissive Apache 2.0 license
|
* Open source, maximum permissive Apache 2.0 license
|
||||||
* Cross-platform IDE and Unified Debugger
|
* Cross-platform IDE and Unified Debugger
|
||||||
|
2
docs
2
docs
Submodule docs updated: f8dbf012e4...295991a9c2
2
examples
2
examples
Submodule examples updated: 4b572ec9fe...4bed26fd0d
@ -12,20 +12,16 @@
|
|||||||
# See the License for the specific language governing permissions and
|
# See the License for the specific language governing permissions and
|
||||||
# limitations under the License.
|
# limitations under the License.
|
||||||
|
|
||||||
VERSION = (6, 1, 9)
|
VERSION = (6, 1, 10)
|
||||||
__version__ = ".".join([str(s) for s in VERSION])
|
__version__ = ".".join([str(s) for s in VERSION])
|
||||||
|
|
||||||
__title__ = "platformio"
|
__title__ = "platformio"
|
||||||
__description__ = (
|
__description__ = (
|
||||||
"A professional collaborative platform for embedded development. "
|
"Your Gateway to Embedded Software Development Excellence. "
|
||||||
"Cross-platform IDE and Unified Debugger. "
|
"Unlock the true potential of embedded software development "
|
||||||
"Static Code Analyzer and Remote Unit Testing. "
|
"with PlatformIO's collaborative ecosystem, embracing "
|
||||||
"Multi-platform and Multi-architecture Build System. "
|
"declarative principles, test-driven methodologies, and "
|
||||||
"Firmware File Explorer and Memory Inspection. "
|
"modern toolchains for unrivaled success."
|
||||||
"IoT, Arduino, CMSIS, ESP-IDF, FreeRTOS, libOpenCM3, mbedOS, Pulp OS, SPL, "
|
|
||||||
"STM32Cube, Zephyr RTOS, ARM, AVR, Espressif (ESP8266/ESP32), FPGA, "
|
|
||||||
"MCS-51 (8051), MSP430, Nordic (nRF51/nRF52), NXP i.MX RT, PIC32, RISC-V, "
|
|
||||||
"STMicroelectronics (STM8/STM32), Teensy"
|
|
||||||
)
|
)
|
||||||
__url__ = "https://platformio.org"
|
__url__ = "https://platformio.org"
|
||||||
|
|
||||||
@ -46,7 +42,7 @@ __core_packages__ = {
|
|||||||
"contrib-piohome": "~3.4.2",
|
"contrib-piohome": "~3.4.2",
|
||||||
"contrib-pioremote": "~1.0.0",
|
"contrib-pioremote": "~1.0.0",
|
||||||
"tool-scons": "~4.40502.0",
|
"tool-scons": "~4.40502.0",
|
||||||
"tool-cppcheck": "~1.270.0",
|
"tool-cppcheck": "~1.21100.0",
|
||||||
"tool-clangtidy": "~1.150005.0",
|
"tool-clangtidy": "~1.150005.0",
|
||||||
"tool-pvs-studio": "~7.18.0",
|
"tool-pvs-studio": "~7.18.0",
|
||||||
}
|
}
|
||||||
@ -56,3 +52,22 @@ __check_internet_hosts__ = [
|
|||||||
"88.198.170.159", # platformio.org
|
"88.198.170.159", # platformio.org
|
||||||
"github.com",
|
"github.com",
|
||||||
] + __registry_mirror_hosts__
|
] + __registry_mirror_hosts__
|
||||||
|
|
||||||
|
__install_requires__ = [
|
||||||
|
# Core requirements
|
||||||
|
"bottle == 0.12.*",
|
||||||
|
"click >=8.0.4, <=8.2",
|
||||||
|
"colorama",
|
||||||
|
"marshmallow == 3.*",
|
||||||
|
"pyelftools == 0.29",
|
||||||
|
"pyserial == 3.5.*", # keep in sync "device/monitor/terminal.py"
|
||||||
|
"requests == 2.*",
|
||||||
|
"semantic_version == 2.10.*",
|
||||||
|
"tabulate == 0.*",
|
||||||
|
] + [
|
||||||
|
# PIO Home requirements
|
||||||
|
"ajsonrpc == 1.2.*",
|
||||||
|
"starlette >=0.19, <0.32",
|
||||||
|
"uvicorn >=0.16, <0.24",
|
||||||
|
"wsproto == 1.*",
|
||||||
|
]
|
||||||
|
@ -258,6 +258,10 @@ def get_cid():
|
|||||||
return cid
|
return cid
|
||||||
|
|
||||||
|
|
||||||
|
def get_project_id(project_dir):
|
||||||
|
return hashlib.sha1(hashlib_encode_data(project_dir)).hexdigest()
|
||||||
|
|
||||||
|
|
||||||
def get_user_agent():
|
def get_user_agent():
|
||||||
data = [
|
data = [
|
||||||
"PlatformIO/%s" % __version__,
|
"PlatformIO/%s" % __version__,
|
||||||
@ -270,6 +274,8 @@ def get_user_agent():
|
|||||||
data.append("IDE/%s" % os.getenv("PLATFORMIO_IDE"))
|
data.append("IDE/%s" % os.getenv("PLATFORMIO_IDE"))
|
||||||
data.append("Python/%s" % platform.python_version())
|
data.append("Python/%s" % platform.python_version())
|
||||||
data.append("Platform/%s" % platform.platform())
|
data.append("Platform/%s" % platform.platform())
|
||||||
|
if not get_setting("enable_telemetry"):
|
||||||
|
data.append("Telemetry/0")
|
||||||
return " ".join(data)
|
return " ".join(data)
|
||||||
|
|
||||||
|
|
||||||
|
@ -38,7 +38,6 @@ AllowSubstExceptions(NameError)
|
|||||||
# append CLI arguments to build environment
|
# append CLI arguments to build environment
|
||||||
clivars = Variables(None)
|
clivars = Variables(None)
|
||||||
clivars.AddVariables(
|
clivars.AddVariables(
|
||||||
("PLATFORM_MANIFEST",),
|
|
||||||
("BUILD_SCRIPT",),
|
("BUILD_SCRIPT",),
|
||||||
("PROJECT_CONFIG",),
|
("PROJECT_CONFIG",),
|
||||||
("PIOENV",),
|
("PIOENV",),
|
||||||
@ -72,8 +71,7 @@ DEFAULT_ENV_OPTIONS = dict(
|
|||||||
variables=clivars,
|
variables=clivars,
|
||||||
# Propagating External Environment
|
# Propagating External Environment
|
||||||
ENV=os.environ,
|
ENV=os.environ,
|
||||||
TIMESTAMP=int(time()),
|
UNIX_TIME=int(time()),
|
||||||
UNIX_TIME="$TIMESTAMP", # deprecated
|
|
||||||
BUILD_DIR=os.path.join("$PROJECT_BUILD_DIR", "$PIOENV"),
|
BUILD_DIR=os.path.join("$PROJECT_BUILD_DIR", "$PIOENV"),
|
||||||
BUILD_SRC_DIR=os.path.join("$BUILD_DIR", "src"),
|
BUILD_SRC_DIR=os.path.join("$BUILD_DIR", "src"),
|
||||||
BUILD_TEST_DIR=os.path.join("$BUILD_DIR", "test"),
|
BUILD_TEST_DIR=os.path.join("$BUILD_DIR", "test"),
|
||||||
|
@ -12,11 +12,9 @@
|
|||||||
# See the License for the specific language governing permissions and
|
# See the License for the specific language governing permissions and
|
||||||
# limitations under the License.
|
# limitations under the License.
|
||||||
|
|
||||||
|
|
||||||
import glob
|
import glob
|
||||||
import os
|
import os
|
||||||
|
|
||||||
import click
|
|
||||||
import SCons.Defaults # pylint: disable=import-error
|
import SCons.Defaults # pylint: disable=import-error
|
||||||
import SCons.Subst # pylint: disable=import-error
|
import SCons.Subst # pylint: disable=import-error
|
||||||
from SCons.Script import COMMAND_LINE_TARGETS # pylint: disable=import-error
|
from SCons.Script import COMMAND_LINE_TARGETS # pylint: disable=import-error
|
||||||
@ -140,9 +138,9 @@ def dump_svd_path(env):
|
|||||||
return None
|
return None
|
||||||
|
|
||||||
|
|
||||||
def _subst_cmd(env, cmd):
|
def _split_flags_string(env, s):
|
||||||
args = env.subst_list(cmd, SCons.Subst.SUBST_CMD)[0]
|
args = env.subst_list(s, SCons.Subst.SUBST_CMD)[0]
|
||||||
return " ".join([SCons.Subst.quote_spaces(arg) for arg in args])
|
return [str(arg) for arg in args]
|
||||||
|
|
||||||
|
|
||||||
def DumpIntegrationData(*args):
|
def DumpIntegrationData(*args):
|
||||||
@ -155,12 +153,8 @@ def DumpIntegrationData(*args):
|
|||||||
],
|
],
|
||||||
"defines": dump_defines(projenv),
|
"defines": dump_defines(projenv),
|
||||||
"includes": projenv.DumpIntegrationIncludes(),
|
"includes": projenv.DumpIntegrationIncludes(),
|
||||||
"cc_flags": click.parser.split_arg_string(
|
"cc_flags": _split_flags_string(projenv, "$CFLAGS $CCFLAGS $CPPFLAGS"),
|
||||||
_subst_cmd(projenv, "$CFLAGS $CCFLAGS $CPPFLAGS")
|
"cxx_flags": _split_flags_string(projenv, "$CXXFLAGS $CCFLAGS $CPPFLAGS"),
|
||||||
),
|
|
||||||
"cxx_flags": click.parser.split_arg_string(
|
|
||||||
_subst_cmd(projenv, "$CXXFLAGS $CCFLAGS $CPPFLAGS")
|
|
||||||
),
|
|
||||||
"cc_path": where_is_program(
|
"cc_path": where_is_program(
|
||||||
globalenv.subst("$CC"), globalenv.subst("${ENV['PATH']}")
|
globalenv.subst("$CC"), globalenv.subst("${ENV['PATH']}")
|
||||||
),
|
),
|
||||||
|
@ -33,9 +33,7 @@ from platformio.project.config import ProjectOptions
|
|||||||
@util.memoized()
|
@util.memoized()
|
||||||
def _PioPlatform():
|
def _PioPlatform():
|
||||||
env = DefaultEnvironment()
|
env = DefaultEnvironment()
|
||||||
p = PlatformFactory.new(os.path.dirname(env["PLATFORM_MANIFEST"]))
|
return PlatformFactory.from_env(env["PIOENV"], targets=COMMAND_LINE_TARGETS)
|
||||||
p.configure_project_packages(env["PIOENV"], COMMAND_LINE_TARGETS)
|
|
||||||
return p
|
|
||||||
|
|
||||||
|
|
||||||
def PioPlatform(_):
|
def PioPlatform(_):
|
||||||
|
@ -53,7 +53,7 @@ def _get_symbol_locations(env, elf_path, addrs):
|
|||||||
locations = [line for line in result["out"].split("\n") if line]
|
locations = [line for line in result["out"].split("\n") if line]
|
||||||
assert len(addrs) == len(locations)
|
assert len(addrs) == len(locations)
|
||||||
|
|
||||||
return dict(zip(addrs, [l.strip() for l in locations]))
|
return dict(zip(addrs, [loc.strip() for loc in locations]))
|
||||||
|
|
||||||
|
|
||||||
def _get_demangled_names(env, mangled_names):
|
def _get_demangled_names(env, mangled_names):
|
||||||
@ -73,31 +73,7 @@ def _get_demangled_names(env, mangled_names):
|
|||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
def _determine_section(sections, symbol_addr):
|
def _collect_sections_info(env, elffile):
|
||||||
for section, info in sections.items():
|
|
||||||
if not _is_flash_section(info) and not _is_ram_section(info):
|
|
||||||
continue
|
|
||||||
if symbol_addr in range(info["start_addr"], info["start_addr"] + info["size"]):
|
|
||||||
return section
|
|
||||||
return "unknown"
|
|
||||||
|
|
||||||
|
|
||||||
def _is_ram_section(section):
|
|
||||||
return (
|
|
||||||
section.get("type", "") in ("SHT_NOBITS", "SHT_PROGBITS")
|
|
||||||
and section.get("flags", "") == "WA"
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
def _is_flash_section(section):
|
|
||||||
return section.get("type", "") == "SHT_PROGBITS" and "A" in section.get("flags", "")
|
|
||||||
|
|
||||||
|
|
||||||
def _is_valid_symbol(symbol_name, symbol_type, symbol_address):
|
|
||||||
return symbol_name and symbol_address != 0 and symbol_type != "STT_NOTYPE"
|
|
||||||
|
|
||||||
|
|
||||||
def _collect_sections_info(elffile):
|
|
||||||
sections = {}
|
sections = {}
|
||||||
for section in elffile.iter_sections():
|
for section in elffile.iter_sections():
|
||||||
if section.is_null() or section.name.startswith(".debug"):
|
if section.is_null() or section.name.startswith(".debug"):
|
||||||
@ -107,13 +83,18 @@ def _collect_sections_info(elffile):
|
|||||||
section_flags = describe_sh_flags(section["sh_flags"])
|
section_flags = describe_sh_flags(section["sh_flags"])
|
||||||
section_size = section.data_size
|
section_size = section.data_size
|
||||||
|
|
||||||
sections[section.name] = {
|
section_data = {
|
||||||
|
"name": section.name,
|
||||||
"size": section_size,
|
"size": section_size,
|
||||||
"start_addr": section["sh_addr"],
|
"start_addr": section["sh_addr"],
|
||||||
"type": section_type,
|
"type": section_type,
|
||||||
"flags": section_flags,
|
"flags": section_flags,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
sections[section.name] = section_data
|
||||||
|
sections[section.name]["in_flash"] = env.pioSizeIsFlashSection(section_data)
|
||||||
|
sections[section.name]["in_ram"] = env.pioSizeIsRamSection(section_data)
|
||||||
|
|
||||||
return sections
|
return sections
|
||||||
|
|
||||||
|
|
||||||
@ -136,7 +117,7 @@ def _collect_symbols_info(env, elffile, elf_path, sections):
|
|||||||
symbol_size = s["st_size"]
|
symbol_size = s["st_size"]
|
||||||
symbol_type = symbol_info["type"]
|
symbol_type = symbol_info["type"]
|
||||||
|
|
||||||
if not _is_valid_symbol(s.name, symbol_type, symbol_addr):
|
if not env.pioSizeIsValidSymbol(s.name, symbol_type, symbol_addr):
|
||||||
continue
|
continue
|
||||||
|
|
||||||
symbol = {
|
symbol = {
|
||||||
@ -145,7 +126,7 @@ def _collect_symbols_info(env, elffile, elf_path, sections):
|
|||||||
"name": s.name,
|
"name": s.name,
|
||||||
"type": symbol_type,
|
"type": symbol_type,
|
||||||
"size": symbol_size,
|
"size": symbol_size,
|
||||||
"section": _determine_section(sections, symbol_addr),
|
"section": env.pioSizeDetermineSection(sections, symbol_addr),
|
||||||
}
|
}
|
||||||
|
|
||||||
if s.name.startswith("_Z"):
|
if s.name.startswith("_Z"):
|
||||||
@ -175,12 +156,36 @@ def _collect_symbols_info(env, elffile, elf_path, sections):
|
|||||||
return symbols
|
return symbols
|
||||||
|
|
||||||
|
|
||||||
def _calculate_firmware_size(sections):
|
def pioSizeDetermineSection(_, sections, symbol_addr):
|
||||||
|
for section, info in sections.items():
|
||||||
|
if not info.get("in_flash", False) and not info.get("in_ram", False):
|
||||||
|
continue
|
||||||
|
if symbol_addr in range(info["start_addr"], info["start_addr"] + info["size"]):
|
||||||
|
return section
|
||||||
|
return "unknown"
|
||||||
|
|
||||||
|
|
||||||
|
def pioSizeIsValidSymbol(_, symbol_name, symbol_type, symbol_address):
|
||||||
|
return symbol_name and symbol_address != 0 and symbol_type != "STT_NOTYPE"
|
||||||
|
|
||||||
|
|
||||||
|
def pioSizeIsRamSection(_, section):
|
||||||
|
return (
|
||||||
|
section.get("type", "") in ("SHT_NOBITS", "SHT_PROGBITS")
|
||||||
|
and section.get("flags", "") == "WA"
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def pioSizeIsFlashSection(_, section):
|
||||||
|
return section.get("type", "") == "SHT_PROGBITS" and "A" in section.get("flags", "")
|
||||||
|
|
||||||
|
|
||||||
|
def pioSizeCalculateFirmwareSize(_, sections):
|
||||||
flash_size = ram_size = 0
|
flash_size = ram_size = 0
|
||||||
for section_info in sections.values():
|
for section_info in sections.values():
|
||||||
if _is_flash_section(section_info):
|
if section_info.get("in_flash", False):
|
||||||
flash_size += section_info.get("size", 0)
|
flash_size += section_info.get("size", 0)
|
||||||
if _is_ram_section(section_info):
|
if section_info.get("in_ram", False):
|
||||||
ram_size += section_info.get("size", 0)
|
ram_size += section_info.get("size", 0)
|
||||||
|
|
||||||
return ram_size, flash_size
|
return ram_size, flash_size
|
||||||
@ -210,8 +215,8 @@ def DumpSizeData(_, target, source, env): # pylint: disable=unused-argument
|
|||||||
sys.stderr.write("Elf file doesn't contain DWARF information")
|
sys.stderr.write("Elf file doesn't contain DWARF information")
|
||||||
env.Exit(1)
|
env.Exit(1)
|
||||||
|
|
||||||
sections = _collect_sections_info(elffile)
|
sections = _collect_sections_info(env, elffile)
|
||||||
firmware_ram, firmware_flash = _calculate_firmware_size(sections)
|
firmware_ram, firmware_flash = env.pioSizeCalculateFirmwareSize(sections)
|
||||||
data["memory"]["total"] = {
|
data["memory"]["total"] = {
|
||||||
"ram_size": firmware_ram,
|
"ram_size": firmware_ram,
|
||||||
"flash_size": firmware_flash,
|
"flash_size": firmware_flash,
|
||||||
@ -226,9 +231,11 @@ def DumpSizeData(_, target, source, env): # pylint: disable=unused-argument
|
|||||||
|
|
||||||
symbol_size = symbol.get("size", 0)
|
symbol_size = symbol.get("size", 0)
|
||||||
section = sections.get(symbol.get("section", ""), {})
|
section = sections.get(symbol.get("section", ""), {})
|
||||||
if _is_ram_section(section):
|
if not section:
|
||||||
|
continue
|
||||||
|
if section.get("in_ram", False):
|
||||||
files[file_path]["ram_size"] += symbol_size
|
files[file_path]["ram_size"] += symbol_size
|
||||||
if _is_flash_section(section):
|
if section.get("in_flash", False):
|
||||||
files[file_path]["flash_size"] += symbol_size
|
files[file_path]["flash_size"] += symbol_size
|
||||||
|
|
||||||
files[file_path]["symbols"].append(symbol)
|
files[file_path]["symbols"].append(symbol)
|
||||||
@ -250,5 +257,10 @@ def exists(_):
|
|||||||
|
|
||||||
|
|
||||||
def generate(env):
|
def generate(env):
|
||||||
|
env.AddMethod(pioSizeIsRamSection)
|
||||||
|
env.AddMethod(pioSizeIsFlashSection)
|
||||||
|
env.AddMethod(pioSizeCalculateFirmwareSize)
|
||||||
|
env.AddMethod(pioSizeDetermineSection)
|
||||||
|
env.AddMethod(pioSizeIsValidSymbol)
|
||||||
env.AddMethod(DumpSizeData)
|
env.AddMethod(DumpSizeData)
|
||||||
return env
|
return env
|
||||||
|
@ -18,7 +18,7 @@ import subprocess
|
|||||||
|
|
||||||
import click
|
import click
|
||||||
|
|
||||||
from platformio import VERSION, __version__, app, exception
|
from platformio import VERSION, __install_requires__, __version__, app, exception
|
||||||
from platformio.http import fetch_remote_content
|
from platformio.http import fetch_remote_content
|
||||||
from platformio.package.manager.core import update_core_packages
|
from platformio.package.manager.core import update_core_packages
|
||||||
from platformio.proc import get_pythonexe_path
|
from platformio.proc import get_pythonexe_path
|
||||||
@ -33,9 +33,14 @@ DEVELOP_INIT_SCRIPT_URL = (
|
|||||||
|
|
||||||
@click.command("upgrade", short_help="Upgrade PlatformIO Core to the latest version")
|
@click.command("upgrade", short_help="Upgrade PlatformIO Core to the latest version")
|
||||||
@click.option("--dev", is_flag=True, help="Use development branch")
|
@click.option("--dev", is_flag=True, help="Use development branch")
|
||||||
|
@click.option("--only-dependencies", is_flag=True)
|
||||||
@click.option("--verbose", "-v", is_flag=True)
|
@click.option("--verbose", "-v", is_flag=True)
|
||||||
def cli(dev, verbose):
|
def cli(dev, only_dependencies, verbose):
|
||||||
|
if only_dependencies:
|
||||||
|
return upgrade_pypi_dependencies(verbose)
|
||||||
|
|
||||||
update_core_packages()
|
update_core_packages()
|
||||||
|
|
||||||
if not dev and __version__ == get_latest_version():
|
if not dev and __version__ == get_latest_version():
|
||||||
return click.secho(
|
return click.secho(
|
||||||
"You're up-to-date!\nPlatformIO %s is currently the "
|
"You're up-to-date!\nPlatformIO %s is currently the "
|
||||||
@ -50,11 +55,21 @@ def cli(dev, verbose):
|
|||||||
pkg_spec = DEVELOP_ZIP_URL if to_develop else "platformio"
|
pkg_spec = DEVELOP_ZIP_URL if to_develop else "platformio"
|
||||||
|
|
||||||
try:
|
try:
|
||||||
|
# PIO Core
|
||||||
subprocess.run(
|
subprocess.run(
|
||||||
[python_exe, "-m", "pip", "install", "--upgrade", pkg_spec],
|
[python_exe, "-m", "pip", "install", "--upgrade", pkg_spec],
|
||||||
check=True,
|
check=True,
|
||||||
stdout=subprocess.PIPE if not verbose else None,
|
stdout=subprocess.PIPE if not verbose else None,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
# PyPI dependencies
|
||||||
|
subprocess.run(
|
||||||
|
[python_exe, "-m", "platformio", "upgrade", "--only-dependencies"],
|
||||||
|
check=False,
|
||||||
|
stdout=subprocess.PIPE,
|
||||||
|
)
|
||||||
|
|
||||||
|
# Check version
|
||||||
output = subprocess.run(
|
output = subprocess.run(
|
||||||
[python_exe, "-m", "platformio", "--version"],
|
[python_exe, "-m", "platformio", "--version"],
|
||||||
check=True,
|
check=True,
|
||||||
@ -87,9 +102,20 @@ def cli(dev, verbose):
|
|||||||
return True
|
return True
|
||||||
|
|
||||||
|
|
||||||
def get_pkg_spec(to_develop):
|
def upgrade_pypi_dependencies(verbose):
|
||||||
if to_develop:
|
subprocess.run(
|
||||||
return
|
[
|
||||||
|
get_pythonexe_path(),
|
||||||
|
"-m",
|
||||||
|
"pip",
|
||||||
|
"install",
|
||||||
|
"--upgrade",
|
||||||
|
"pip",
|
||||||
|
*__install_requires__,
|
||||||
|
],
|
||||||
|
check=True,
|
||||||
|
stdout=subprocess.PIPE if not verbose else None,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
def get_latest_version():
|
def get_latest_version():
|
||||||
|
@ -109,9 +109,7 @@ def cli(
|
|||||||
|
|
||||||
|
|
||||||
def _configure(ctx, project_config, env_name, load_mode, verbose, __unprocessed):
|
def _configure(ctx, project_config, env_name, load_mode, verbose, __unprocessed):
|
||||||
platform = PlatformFactory.new(
|
platform = PlatformFactory.from_env(env_name, autoinstall=True)
|
||||||
project_config.get(f"env:{env_name}", "platform"), autoinstall=True
|
|
||||||
)
|
|
||||||
debug_config = DebugConfigFactory.new(
|
debug_config = DebugConfigFactory.new(
|
||||||
platform,
|
platform,
|
||||||
project_config,
|
project_config,
|
||||||
|
@ -25,6 +25,7 @@ from platformio.home.rpc.handlers.app import AppRPC
|
|||||||
from platformio.home.rpc.handlers.base import BaseRPCHandler
|
from platformio.home.rpc.handlers.base import BaseRPCHandler
|
||||||
from platformio.home.rpc.handlers.piocore import PIOCoreRPC
|
from platformio.home.rpc.handlers.piocore import PIOCoreRPC
|
||||||
from platformio.package.manager.platform import PlatformPackageManager
|
from platformio.package.manager.platform import PlatformPackageManager
|
||||||
|
from platformio.platform.factory import PlatformFactory
|
||||||
from platformio.project.config import ProjectConfig
|
from platformio.project.config import ProjectConfig
|
||||||
from platformio.project.exception import ProjectError
|
from platformio.project.exception import ProjectError
|
||||||
from platformio.project.helpers import get_project_dir, is_platformio_project
|
from platformio.project.helpers import get_project_dir, is_platformio_project
|
||||||
@ -37,8 +38,12 @@ class ProjectRPC(BaseRPCHandler):
|
|||||||
def config_call(init_kwargs, method, *args):
|
def config_call(init_kwargs, method, *args):
|
||||||
assert isinstance(init_kwargs, dict)
|
assert isinstance(init_kwargs, dict)
|
||||||
assert "path" in init_kwargs
|
assert "path" in init_kwargs
|
||||||
project_dir = get_project_dir()
|
if os.path.isdir(init_kwargs["path"]):
|
||||||
if os.path.isfile(init_kwargs["path"]):
|
project_dir = init_kwargs["path"]
|
||||||
|
init_kwargs["path"] = os.path.join(init_kwargs["path"], "platformio.ini")
|
||||||
|
elif os.path.isfile(init_kwargs["path"]):
|
||||||
|
project_dir = get_project_dir()
|
||||||
|
else:
|
||||||
project_dir = os.path.dirname(init_kwargs["path"])
|
project_dir = os.path.dirname(init_kwargs["path"])
|
||||||
with fs.cd(project_dir):
|
with fs.cd(project_dir):
|
||||||
return getattr(ProjectConfig(**init_kwargs), method)(*args)
|
return getattr(ProjectConfig(**init_kwargs), method)(*args)
|
||||||
@ -335,3 +340,47 @@ class ProjectRPC(BaseRPCHandler):
|
|||||||
encoding="utf-8",
|
encoding="utf-8",
|
||||||
)
|
)
|
||||||
return []
|
return []
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def configuration(project_dir, env):
|
||||||
|
assert is_platformio_project(project_dir)
|
||||||
|
with fs.cd(project_dir):
|
||||||
|
config = ProjectConfig(os.path.join(project_dir, "platformio.ini"))
|
||||||
|
platform = PlatformFactory.from_env(env, autoinstall=True)
|
||||||
|
platform_pkg = PlatformPackageManager().get_package(platform.get_dir())
|
||||||
|
board_id = config.get(f"env:{env}", "board", None)
|
||||||
|
|
||||||
|
# frameworks
|
||||||
|
frameworks = []
|
||||||
|
for name in config.get(f"env:{env}", "framework", []):
|
||||||
|
if name not in platform.frameworks:
|
||||||
|
continue
|
||||||
|
f_pkg_name = platform.frameworks[name].get("package")
|
||||||
|
if not f_pkg_name:
|
||||||
|
continue
|
||||||
|
f_pkg = platform.get_package(f_pkg_name)
|
||||||
|
if not f_pkg:
|
||||||
|
continue
|
||||||
|
f_manifest = platform.pm.load_manifest(f_pkg)
|
||||||
|
frameworks.append(
|
||||||
|
dict(
|
||||||
|
name=name,
|
||||||
|
title=f_manifest.get("title"),
|
||||||
|
version=str(f_pkg.metadata.version),
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
return dict(
|
||||||
|
platform=dict(
|
||||||
|
ownername=platform_pkg.metadata.spec.owner
|
||||||
|
if platform_pkg.metadata.spec
|
||||||
|
else None,
|
||||||
|
name=platform.name,
|
||||||
|
title=platform.title,
|
||||||
|
version=str(platform_pkg.metadata.version),
|
||||||
|
),
|
||||||
|
board=platform.board_config(board_id).get_brief_data()
|
||||||
|
if board_id
|
||||||
|
else None,
|
||||||
|
frameworks=frameworks or None,
|
||||||
|
)
|
||||||
|
@ -20,7 +20,7 @@ import click
|
|||||||
from platformio.compat import IS_MACOS, IS_WINDOWS
|
from platformio.compat import IS_MACOS, IS_WINDOWS
|
||||||
from platformio.exception import ReturnErrorCode, UserSideException
|
from platformio.exception import ReturnErrorCode, UserSideException
|
||||||
from platformio.package.manager.tool import ToolPackageManager
|
from platformio.package.manager.tool import ToolPackageManager
|
||||||
from platformio.proc import get_pythonexe_path
|
from platformio.proc import get_pythonexe_path, where_is_program
|
||||||
|
|
||||||
|
|
||||||
@click.command("exec", short_help="Run command from package tool")
|
@click.command("exec", short_help="Run command from package tool")
|
||||||
@ -52,9 +52,13 @@ def package_exec_cmd(obj, package, call, args):
|
|||||||
|
|
||||||
inject_pkg_to_environ(pkg)
|
inject_pkg_to_environ(pkg)
|
||||||
os.environ["PIO_PYTHON_EXE"] = get_pythonexe_path()
|
os.environ["PIO_PYTHON_EXE"] = get_pythonexe_path()
|
||||||
|
|
||||||
# inject current python interpreter on Windows
|
# inject current python interpreter on Windows
|
||||||
if IS_WINDOWS and args and args[0].endswith(".py"):
|
if args[0].endswith(".py"):
|
||||||
args = [os.environ["PIO_PYTHON_EXE"]] + list(args)
|
args = [os.environ["PIO_PYTHON_EXE"]] + list(args)
|
||||||
|
if not os.path.exists(args[1]):
|
||||||
|
args[1] = where_is_program(args[1])
|
||||||
|
|
||||||
result = None
|
result = None
|
||||||
try:
|
try:
|
||||||
run_options = dict(shell=call is not None, env=os.environ)
|
run_options = dict(shell=call is not None, env=os.environ)
|
||||||
|
@ -206,7 +206,7 @@ def _install_project_env_libraries(project_env, options):
|
|||||||
config = ProjectConfig.get_instance()
|
config = ProjectConfig.get_instance()
|
||||||
|
|
||||||
compatibility_qualifiers = {}
|
compatibility_qualifiers = {}
|
||||||
if config.get(f"env:{project_env}", "platform"):
|
if config.get(f"env:{project_env}", "platform", None):
|
||||||
try:
|
try:
|
||||||
p = PlatformFactory.new(config.get(f"env:{project_env}", "platform"))
|
p = PlatformFactory.new(config.get(f"env:{project_env}", "platform"))
|
||||||
compatibility_qualifiers["platforms"] = [p.name]
|
compatibility_qualifiers["platforms"] = [p.name]
|
||||||
|
@ -22,6 +22,7 @@ from platformio.package.manager.library import LibraryPackageManager
|
|||||||
from platformio.package.manager.platform import PlatformPackageManager
|
from platformio.package.manager.platform import PlatformPackageManager
|
||||||
from platformio.package.manager.tool import ToolPackageManager
|
from platformio.package.manager.tool import ToolPackageManager
|
||||||
from platformio.package.meta import PackageItem, PackageSpec
|
from platformio.package.meta import PackageItem, PackageSpec
|
||||||
|
from platformio.platform.exception import UnknownPlatform
|
||||||
from platformio.platform.factory import PlatformFactory
|
from platformio.platform.factory import PlatformFactory
|
||||||
from platformio.project.config import ProjectConfig
|
from platformio.project.config import ProjectConfig
|
||||||
|
|
||||||
@ -187,20 +188,20 @@ def list_project_packages(options):
|
|||||||
|
|
||||||
|
|
||||||
def print_project_env_platform_packages(project_env, options):
|
def print_project_env_platform_packages(project_env, options):
|
||||||
config = ProjectConfig.get_instance()
|
try:
|
||||||
platform = config.get(f"env:{project_env}", "platform")
|
p = PlatformFactory.from_env(project_env)
|
||||||
if not platform:
|
except UnknownPlatform:
|
||||||
return None
|
|
||||||
pkg = PlatformPackageManager().get_package(platform)
|
|
||||||
if not pkg:
|
|
||||||
return None
|
return None
|
||||||
click.echo(
|
click.echo(
|
||||||
"Platform %s"
|
"Platform %s"
|
||||||
% (humanize_package(pkg, platform, verbose=options.get("verbose")))
|
% (
|
||||||
|
humanize_package(
|
||||||
|
PlatformPackageManager().get_package(p.get_dir()),
|
||||||
|
p.config.get(f"env:{project_env}", "platform"),
|
||||||
|
verbose=options.get("verbose"),
|
||||||
|
)
|
||||||
|
)
|
||||||
)
|
)
|
||||||
p = PlatformFactory.new(pkg)
|
|
||||||
if project_env:
|
|
||||||
p.configure_project_packages(project_env)
|
|
||||||
print_dependency_tree(
|
print_dependency_tree(
|
||||||
p.pm,
|
p.pm,
|
||||||
specs=[p.get_package_spec(name) for name in p.packages],
|
specs=[p.get_package_spec(name) for name in p.packages],
|
||||||
|
@ -62,10 +62,9 @@ class OutdatedCandidate:
|
|||||||
)
|
)
|
||||||
@click.option("-e", "--environment", "environments", multiple=True)
|
@click.option("-e", "--environment", "environments", multiple=True)
|
||||||
def package_outdated_cmd(project_dir, environments):
|
def package_outdated_cmd(project_dir, environments):
|
||||||
candidates = fetch_outdated_candidates(
|
with fs.cd(project_dir):
|
||||||
project_dir, environments, with_progress=True
|
candidates = fetch_outdated_candidates(environments, with_progress=True)
|
||||||
)
|
print_outdated_candidates(candidates)
|
||||||
print_outdated_candidates(candidates)
|
|
||||||
|
|
||||||
|
|
||||||
def print_outdated_candidates(candidates):
|
def print_outdated_candidates(candidates):
|
||||||
@ -126,8 +125,10 @@ def get_candidate_update_color(outdated):
|
|||||||
return None
|
return None
|
||||||
|
|
||||||
|
|
||||||
def fetch_outdated_candidates(project_dir, environments, with_progress=False):
|
def fetch_outdated_candidates(environments, with_progress=False):
|
||||||
candidates = []
|
candidates = []
|
||||||
|
config = ProjectConfig.get_instance()
|
||||||
|
config.validate(environments)
|
||||||
|
|
||||||
def _add_candidate(data):
|
def _add_candidate(data):
|
||||||
new_candidate = OutdatedCandidate(
|
new_candidate = OutdatedCandidate(
|
||||||
@ -139,20 +140,16 @@ def fetch_outdated_candidates(project_dir, environments, with_progress=False):
|
|||||||
return
|
return
|
||||||
candidates.append(new_candidate)
|
candidates.append(new_candidate)
|
||||||
|
|
||||||
with fs.cd(project_dir):
|
# platforms
|
||||||
config = ProjectConfig.get_instance()
|
for item in find_platform_candidates(config, environments):
|
||||||
config.validate(environments)
|
_add_candidate(item)
|
||||||
|
# platform package dependencies
|
||||||
|
for dep_item in find_platform_dependency_candidates(item["env"]):
|
||||||
|
_add_candidate(dep_item)
|
||||||
|
|
||||||
# platforms
|
# libraries
|
||||||
for item in find_platform_candidates(config, environments):
|
for item in find_library_candidates(config, environments):
|
||||||
_add_candidate(item)
|
_add_candidate(item)
|
||||||
# platform package dependencies
|
|
||||||
for dep_item in find_platform_dependency_candidates(item):
|
|
||||||
_add_candidate(dep_item)
|
|
||||||
|
|
||||||
# libraries
|
|
||||||
for item in find_library_candidates(config, environments):
|
|
||||||
_add_candidate(item)
|
|
||||||
|
|
||||||
result = []
|
result = []
|
||||||
if not with_progress:
|
if not with_progress:
|
||||||
@ -172,7 +169,7 @@ def find_platform_candidates(config, environments):
|
|||||||
result = []
|
result = []
|
||||||
pm = PlatformPackageManager()
|
pm = PlatformPackageManager()
|
||||||
for env in config.envs():
|
for env in config.envs():
|
||||||
platform = config.get(f"env:{env}", "platform")
|
platform = config.get(f"env:{env}", "platform", None)
|
||||||
if not platform or (environments and env not in environments):
|
if not platform or (environments and env not in environments):
|
||||||
continue
|
continue
|
||||||
spec = PackageSpec(platform)
|
spec = PackageSpec(platform)
|
||||||
@ -183,14 +180,13 @@ def find_platform_candidates(config, environments):
|
|||||||
return result
|
return result
|
||||||
|
|
||||||
|
|
||||||
def find_platform_dependency_candidates(platform_candidate):
|
def find_platform_dependency_candidates(env):
|
||||||
result = []
|
result = []
|
||||||
p = PlatformFactory.new(platform_candidate["spec"])
|
p = PlatformFactory.from_env(env)
|
||||||
p.configure_project_packages(platform_candidate["env"])
|
|
||||||
for pkg in p.get_installed_packages():
|
for pkg in p.get_installed_packages():
|
||||||
result.append(
|
result.append(
|
||||||
dict(
|
dict(
|
||||||
env=platform_candidate["env"],
|
env=env,
|
||||||
pm=p.pm,
|
pm=p.pm,
|
||||||
pkg=pkg,
|
pkg=pkg,
|
||||||
spec=p.get_package_spec(pkg.metadata.name),
|
spec=p.get_package_spec(pkg.metadata.name),
|
||||||
|
@ -35,7 +35,7 @@ from platformio.package.manager._update import PackageManagerUpdateMixin
|
|||||||
from platformio.package.manifest.parser import ManifestParserFactory
|
from platformio.package.manifest.parser import ManifestParserFactory
|
||||||
from platformio.package.meta import (
|
from platformio.package.meta import (
|
||||||
PackageItem,
|
PackageItem,
|
||||||
PackageMetaData,
|
PackageMetadata,
|
||||||
PackageSpec,
|
PackageSpec,
|
||||||
PackageType,
|
PackageType,
|
||||||
)
|
)
|
||||||
@ -199,7 +199,7 @@ class BasePackageManager( # pylint: disable=too-many-public-methods,too-many-in
|
|||||||
|
|
||||||
def build_metadata(self, pkg_dir, spec, vcs_revision=None):
|
def build_metadata(self, pkg_dir, spec, vcs_revision=None):
|
||||||
manifest = self.load_manifest(pkg_dir)
|
manifest = self.load_manifest(pkg_dir)
|
||||||
metadata = PackageMetaData(
|
metadata = PackageMetadata(
|
||||||
type=self.pkg_type,
|
type=self.pkg_type,
|
||||||
name=manifest.get("name"),
|
name=manifest.get("name"),
|
||||||
version=manifest.get("version"),
|
version=manifest.get("version"),
|
||||||
|
@ -401,7 +401,7 @@ class PackageSpec: # pylint: disable=too-many-instance-attributes
|
|||||||
return name
|
return name
|
||||||
|
|
||||||
|
|
||||||
class PackageMetaData:
|
class PackageMetadata:
|
||||||
def __init__( # pylint: disable=redefined-builtin
|
def __init__( # pylint: disable=redefined-builtin
|
||||||
self, type, name, version, spec=None
|
self, type, name, version, spec=None
|
||||||
):
|
):
|
||||||
@ -416,7 +416,7 @@ class PackageMetaData:
|
|||||||
|
|
||||||
def __repr__(self):
|
def __repr__(self):
|
||||||
return (
|
return (
|
||||||
"PackageMetaData <type={type} name={name} version={version} "
|
"PackageMetadata <type={type} name={name} version={version} "
|
||||||
"spec={spec}".format(**self.as_dict())
|
"spec={spec}".format(**self.as_dict())
|
||||||
)
|
)
|
||||||
|
|
||||||
@ -466,7 +466,7 @@ class PackageMetaData:
|
|||||||
data["spec"]["uri"] = data["spec"]["url"]
|
data["spec"]["uri"] = data["spec"]["url"]
|
||||||
del data["spec"]["url"]
|
del data["spec"]["url"]
|
||||||
data["spec"] = PackageSpec(**data["spec"])
|
data["spec"] = PackageSpec(**data["spec"])
|
||||||
return PackageMetaData(**data)
|
return PackageMetadata(**data)
|
||||||
|
|
||||||
|
|
||||||
class PackageItem:
|
class PackageItem:
|
||||||
@ -515,7 +515,7 @@ class PackageItem:
|
|||||||
for location in self.get_metafile_locations():
|
for location in self.get_metafile_locations():
|
||||||
manifest_path = os.path.join(location, self.METAFILE_NAME)
|
manifest_path = os.path.join(location, self.METAFILE_NAME)
|
||||||
if os.path.isfile(manifest_path):
|
if os.path.isfile(manifest_path):
|
||||||
return PackageMetaData.load(manifest_path)
|
return PackageMetadata.load(manifest_path)
|
||||||
return None
|
return None
|
||||||
|
|
||||||
def dump_meta(self):
|
def dump_meta(self):
|
||||||
|
@ -51,13 +51,10 @@ class PlatformRunMixin:
|
|||||||
assert isinstance(targets, list)
|
assert isinstance(targets, list)
|
||||||
|
|
||||||
self.ensure_engine_compatible()
|
self.ensure_engine_compatible()
|
||||||
self.configure_project_packages(variables["pioenv"], targets)
|
|
||||||
|
|
||||||
self.silent = silent
|
self.silent = silent
|
||||||
self.verbose = verbose or app.get_setting("force_verbose")
|
self.verbose = verbose or app.get_setting("force_verbose")
|
||||||
|
|
||||||
variables["platform_manifest"] = self.manifest_path
|
|
||||||
|
|
||||||
if "build_script" not in variables:
|
if "build_script" not in variables:
|
||||||
variables["build_script"] = self.get_build_script()
|
variables["build_script"] = self.get_build_script()
|
||||||
if not os.path.isfile(variables["build_script"]):
|
if not os.path.isfile(variables["build_script"]):
|
||||||
|
@ -34,6 +34,7 @@ class PlatformBase( # pylint: disable=too-many-instance-attributes,too-many-pub
|
|||||||
|
|
||||||
def __init__(self, manifest_path):
|
def __init__(self, manifest_path):
|
||||||
self.manifest_path = manifest_path
|
self.manifest_path = manifest_path
|
||||||
|
self.project_env = None # set by factory.from_env(env)
|
||||||
self.silent = False
|
self.silent = False
|
||||||
self.verbose = False
|
self.verbose = False
|
||||||
|
|
||||||
|
@ -21,6 +21,8 @@ from platformio.compat import load_python_module
|
|||||||
from platformio.package.meta import PackageItem
|
from platformio.package.meta import PackageItem
|
||||||
from platformio.platform import base
|
from platformio.platform import base
|
||||||
from platformio.platform.exception import UnknownPlatform
|
from platformio.platform.exception import UnknownPlatform
|
||||||
|
from platformio.project.config import ProjectConfig
|
||||||
|
from platformio.project.exception import UndefinedEnvPlatformError
|
||||||
|
|
||||||
|
|
||||||
class PlatformFactory:
|
class PlatformFactory:
|
||||||
@ -88,3 +90,14 @@ class PlatformFactory:
|
|||||||
_instance = platform_cls(os.path.join(platform_dir, "platform.json"))
|
_instance = platform_cls(os.path.join(platform_dir, "platform.json"))
|
||||||
assert isinstance(_instance, base.PlatformBase)
|
assert isinstance(_instance, base.PlatformBase)
|
||||||
return _instance
|
return _instance
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def from_env(cls, env, targets=None, autoinstall=False):
|
||||||
|
config = ProjectConfig.get_instance()
|
||||||
|
spec = config.get(f"env:{env}", "platform", None)
|
||||||
|
if not spec:
|
||||||
|
raise UndefinedEnvPlatformError(env)
|
||||||
|
p = cls.new(spec, autoinstall=autoinstall)
|
||||||
|
p.project_env = env
|
||||||
|
p.configure_project_packages(env, targets)
|
||||||
|
return p
|
||||||
|
@ -185,10 +185,17 @@ def copy_pythonpath_to_osenv():
|
|||||||
|
|
||||||
|
|
||||||
def where_is_program(program, envpath=None):
|
def where_is_program(program, envpath=None):
|
||||||
env = os.environ
|
env = os.environ.copy()
|
||||||
if envpath:
|
if envpath:
|
||||||
env["PATH"] = envpath
|
env["PATH"] = envpath
|
||||||
|
|
||||||
|
# look up in $PATH
|
||||||
|
for bin_dir in env.get("PATH", "").split(os.pathsep):
|
||||||
|
if os.path.isfile(os.path.join(bin_dir, program)):
|
||||||
|
return os.path.join(bin_dir, program)
|
||||||
|
if IS_WINDOWS and os.path.isfile(os.path.join(bin_dir, "%s.exe" % program)):
|
||||||
|
return os.path.join(bin_dir, "%s.exe" % program)
|
||||||
|
|
||||||
# try OS's built-in commands
|
# try OS's built-in commands
|
||||||
try:
|
try:
|
||||||
result = exec_command(["where" if IS_WINDOWS else "which", program], env=env)
|
result = exec_command(["where" if IS_WINDOWS else "which", program], env=env)
|
||||||
@ -197,13 +204,6 @@ def where_is_program(program, envpath=None):
|
|||||||
except OSError:
|
except OSError:
|
||||||
pass
|
pass
|
||||||
|
|
||||||
# look up in $PATH
|
|
||||||
for bin_dir in env.get("PATH", "").split(os.pathsep):
|
|
||||||
if os.path.isfile(os.path.join(bin_dir, program)):
|
|
||||||
return os.path.join(bin_dir, program)
|
|
||||||
if os.path.isfile(os.path.join(bin_dir, "%s.exe" % program)):
|
|
||||||
return os.path.join(bin_dir, "%s.exe" % program)
|
|
||||||
|
|
||||||
return program
|
return program
|
||||||
|
|
||||||
|
|
||||||
|
@ -26,6 +26,7 @@ from platformio.package.manager.platform import PlatformPackageManager
|
|||||||
from platformio.platform.exception import UnknownBoard
|
from platformio.platform.exception import UnknownBoard
|
||||||
from platformio.platform.factory import PlatformFactory
|
from platformio.platform.factory import PlatformFactory
|
||||||
from platformio.project.config import ProjectConfig
|
from platformio.project.config import ProjectConfig
|
||||||
|
from platformio.project.exception import UndefinedEnvPlatformError
|
||||||
from platformio.project.helpers import is_platformio_project
|
from platformio.project.helpers import is_platformio_project
|
||||||
from platformio.project.integration.generator import ProjectGenerator
|
from platformio.project.integration.generator import ProjectGenerator
|
||||||
from platformio.project.options import ProjectOptions
|
from platformio.project.options import ProjectOptions
|
||||||
@ -366,13 +367,10 @@ def update_project_env(environment, extra_project_options=None):
|
|||||||
|
|
||||||
|
|
||||||
def init_sample_code(config, environment):
|
def init_sample_code(config, environment):
|
||||||
platform_spec = config.get(f"env:{environment}", "platform", None)
|
|
||||||
if not platform_spec:
|
|
||||||
return None
|
|
||||||
p = PlatformFactory.new(platform_spec)
|
|
||||||
try:
|
try:
|
||||||
|
p = PlatformFactory.from_env(environment)
|
||||||
return p.generate_sample_code(config, environment)
|
return p.generate_sample_code(config, environment)
|
||||||
except NotImplementedError:
|
except (NotImplementedError, UndefinedEnvPlatformError):
|
||||||
pass
|
pass
|
||||||
|
|
||||||
framework = config.get(f"env:{environment}", "framework", None)
|
framework = config.get(f"env:{environment}", "framework", None)
|
||||||
|
@ -1,3 +1 @@
|
|||||||
.pio
|
.pio
|
||||||
CMakeListsPrivate.txt
|
|
||||||
cmake-build-*/
|
|
||||||
|
@ -1,33 +0,0 @@
|
|||||||
# !!! WARNING !!! AUTO-GENERATED FILE, PLEASE DO NOT MODIFY IT AND USE
|
|
||||||
# https://docs.platformio.org/page/projectconf/section_env_build.html#build-flags
|
|
||||||
#
|
|
||||||
# If you need to override existing CMake configuration or add extra,
|
|
||||||
# please create `CMakeListsUser.txt` in the root of project.
|
|
||||||
# The `CMakeListsUser.txt` will not be overwritten by PlatformIO.
|
|
||||||
|
|
||||||
cmake_minimum_required(VERSION 3.13)
|
|
||||||
set(CMAKE_SYSTEM_NAME Generic)
|
|
||||||
set(CMAKE_C_COMPILER_WORKS 1)
|
|
||||||
set(CMAKE_CXX_COMPILER_WORKS 1)
|
|
||||||
|
|
||||||
project("{{project_name}}" C CXX)
|
|
||||||
|
|
||||||
include(CMakeListsPrivate.txt)
|
|
||||||
|
|
||||||
if(EXISTS ${CMAKE_CURRENT_SOURCE_DIR}/CMakeListsUser.txt)
|
|
||||||
include(CMakeListsUser.txt)
|
|
||||||
endif()
|
|
||||||
|
|
||||||
add_custom_target(
|
|
||||||
Production ALL
|
|
||||||
COMMAND platformio -c clion run "$<$<NOT:$<CONFIG:All>>:-e${CMAKE_BUILD_TYPE}>"
|
|
||||||
WORKING_DIRECTORY ${CMAKE_CURRENT_SOURCE_DIR}
|
|
||||||
)
|
|
||||||
|
|
||||||
add_custom_target(
|
|
||||||
Debug ALL
|
|
||||||
COMMAND platformio -c clion debug "$<$<NOT:$<CONFIG:All>>:-e${CMAKE_BUILD_TYPE}>"
|
|
||||||
WORKING_DIRECTORY ${CMAKE_CURRENT_SOURCE_DIR}
|
|
||||||
)
|
|
||||||
|
|
||||||
add_executable(Z_DUMMY_TARGET ${SRC_LIST})
|
|
@ -1,127 +0,0 @@
|
|||||||
# !!! WARNING !!! AUTO-GENERATED FILE, PLEASE DO NOT MODIFY IT AND USE
|
|
||||||
# https://docs.platformio.org/page/projectconf/section_env_build.html#build-flags
|
|
||||||
#
|
|
||||||
# If you need to override existing CMake configuration or add extra,
|
|
||||||
# please create `CMakeListsUser.txt` in the root of project.
|
|
||||||
# The `CMakeListsUser.txt` will not be overwritten by PlatformIO.
|
|
||||||
|
|
||||||
% import os
|
|
||||||
% import re
|
|
||||||
%
|
|
||||||
% from platformio.compat import shlex_join
|
|
||||||
% from platformio.project.helpers import load_build_metadata
|
|
||||||
%
|
|
||||||
% def _normalize_path(path):
|
|
||||||
% if project_dir in path:
|
|
||||||
% path = path.replace(project_dir, "${CMAKE_CURRENT_LIST_DIR}")
|
|
||||||
% elif user_home_dir in path:
|
|
||||||
% if "windows" in systype:
|
|
||||||
% path = path.replace(user_home_dir, "${ENV_HOME_PATH}")
|
|
||||||
% else:
|
|
||||||
% path = path.replace(user_home_dir, "$ENV{HOME}")
|
|
||||||
% end
|
|
||||||
% end
|
|
||||||
% return path
|
|
||||||
% end
|
|
||||||
%
|
|
||||||
% def _fix_lib_dirs(lib_dirs):
|
|
||||||
% result = []
|
|
||||||
% for lib_dir in lib_dirs:
|
|
||||||
% if not os.path.isabs(lib_dir):
|
|
||||||
% lib_dir = os.path.join(project_dir, lib_dir)
|
|
||||||
% end
|
|
||||||
% result.append(to_unix_path(os.path.normpath(lib_dir)))
|
|
||||||
% end
|
|
||||||
% return result
|
|
||||||
% end
|
|
||||||
%
|
|
||||||
% def _escape(text):
|
|
||||||
% return to_unix_path(text).replace('"', '\\"')
|
|
||||||
% end
|
|
||||||
%
|
|
||||||
% def _get_lib_dirs(envname):
|
|
||||||
% env_libdeps_dir = os.path.join(config.get("platformio", "libdeps_dir"), envname)
|
|
||||||
% env_lib_extra_dirs = config.get("env:" + envname, "lib_extra_dirs", [])
|
|
||||||
% return _fix_lib_dirs([env_libdeps_dir] + env_lib_extra_dirs)
|
|
||||||
% end
|
|
||||||
%
|
|
||||||
% envs = config.envs()
|
|
||||||
|
|
||||||
|
|
||||||
% if len(envs) > 1:
|
|
||||||
set(CMAKE_CONFIGURATION_TYPES "{{ ";".join(envs) }};" CACHE STRING "Build Types reflect PlatformIO Environments" FORCE)
|
|
||||||
% else:
|
|
||||||
set(CMAKE_CONFIGURATION_TYPES "{{ env_name }}" CACHE STRING "Build Types reflect PlatformIO Environments" FORCE)
|
|
||||||
% end
|
|
||||||
|
|
||||||
# Convert "Home Directory" that may contain unescaped backslashes on Windows
|
|
||||||
% if "windows" in systype:
|
|
||||||
file(TO_CMAKE_PATH $ENV{HOMEDRIVE}$ENV{HOMEPATH} ENV_HOME_PATH)
|
|
||||||
% end
|
|
||||||
|
|
||||||
% if svd_path:
|
|
||||||
set(CLION_SVD_FILE_PATH "{{ _normalize_path(svd_path) }}" CACHE FILEPATH "Peripheral Registers Definitions File" FORCE)
|
|
||||||
% end
|
|
||||||
|
|
||||||
SET(CMAKE_C_COMPILER "{{ _normalize_path(cc_path) }}")
|
|
||||||
SET(CMAKE_CXX_COMPILER "{{ _normalize_path(cxx_path) }}")
|
|
||||||
SET(CMAKE_CXX_FLAGS {{ _normalize_path(to_unix_path(shlex_join(cxx_flags))) }})
|
|
||||||
SET(CMAKE_C_FLAGS {{ _normalize_path(to_unix_path(shlex_join(cc_flags))) }})
|
|
||||||
|
|
||||||
% cc_stds = [arg for arg in cc_flags if arg.startswith("-std=")]
|
|
||||||
% cxx_stds = [arg for arg in cxx_flags if arg.startswith("-std=")]
|
|
||||||
% if cc_stds:
|
|
||||||
SET(CMAKE_C_STANDARD {{ cc_stds[-1][-2:] }})
|
|
||||||
% end
|
|
||||||
% if cxx_stds:
|
|
||||||
set(CMAKE_CXX_STANDARD {{ cxx_stds[-1][-2:] }})
|
|
||||||
% end
|
|
||||||
|
|
||||||
if (CMAKE_BUILD_TYPE MATCHES "{{ env_name }}")
|
|
||||||
% for define in defines:
|
|
||||||
add_definitions(-D{{!re.sub(r"([\"\(\)\ #])", r"\\\1", define)}})
|
|
||||||
% end
|
|
||||||
|
|
||||||
% for include in filter_includes(includes):
|
|
||||||
include_directories("{{ _normalize_path(include) }}")
|
|
||||||
% end
|
|
||||||
|
|
||||||
FILE(GLOB_RECURSE EXTRA_LIB_SOURCES
|
|
||||||
% for dir in _get_lib_dirs(env_name):
|
|
||||||
{{ _normalize_path(dir) + "/*.*" }}
|
|
||||||
% end
|
|
||||||
)
|
|
||||||
endif()
|
|
||||||
|
|
||||||
% leftover_envs = list(set(envs) ^ set([env_name]))
|
|
||||||
%
|
|
||||||
% ide_data = {}
|
|
||||||
% if leftover_envs:
|
|
||||||
% ide_data = load_build_metadata(project_dir, leftover_envs)
|
|
||||||
% end
|
|
||||||
%
|
|
||||||
% for env, data in ide_data.items():
|
|
||||||
if (CMAKE_BUILD_TYPE MATCHES "{{ env }}")
|
|
||||||
% for define in data["defines"]:
|
|
||||||
add_definitions(-D{{!re.sub(r"([\"\(\)\ #])", r"\\\1", define)}})
|
|
||||||
% end
|
|
||||||
|
|
||||||
% for include in filter_includes(data["includes"]):
|
|
||||||
include_directories("{{ _normalize_path(to_unix_path(include)) }}")
|
|
||||||
% end
|
|
||||||
|
|
||||||
FILE(GLOB_RECURSE EXTRA_LIB_SOURCES
|
|
||||||
% for dir in _get_lib_dirs(env):
|
|
||||||
{{ _normalize_path(dir) + "/*.*" }}
|
|
||||||
% end
|
|
||||||
)
|
|
||||||
endif()
|
|
||||||
% end
|
|
||||||
|
|
||||||
FILE(GLOB_RECURSE SRC_LIST
|
|
||||||
% for path in (project_src_dir, project_lib_dir, project_test_dir):
|
|
||||||
{{ _normalize_path(path) + "/*.*" }}
|
|
||||||
% end
|
|
||||||
)
|
|
||||||
|
|
||||||
list(APPEND SRC_LIST ${EXTRA_LIB_SOURCES})
|
|
@ -207,7 +207,7 @@ def process_env(
|
|||||||
verbose,
|
verbose,
|
||||||
).process()
|
).process()
|
||||||
|
|
||||||
if "monitor" in targets and "nobuild" not in targets:
|
if result["succeeded"] and "monitor" in targets and "nobuild" not in targets:
|
||||||
ctx.invoke(
|
ctx.invoke(
|
||||||
device_monitor_cmd,
|
device_monitor_cmd,
|
||||||
port=monitor_port,
|
port=monitor_port,
|
||||||
|
@ -72,8 +72,8 @@ class EnvironmentProcessor:
|
|||||||
|
|
||||||
# pre-clean
|
# pre-clean
|
||||||
if is_clean:
|
if is_clean:
|
||||||
result = PlatformFactory.new(
|
result = PlatformFactory.from_env(
|
||||||
self.options["platform"], autoinstall=True
|
self.name, targets=self.targets, autoinstall=True
|
||||||
).run(build_vars, self.targets, self.silent, self.verbose, self.jobs)
|
).run(build_vars, self.targets, self.silent, self.verbose, self.jobs)
|
||||||
if not build_targets:
|
if not build_targets:
|
||||||
return result["returncode"] == 0
|
return result["returncode"] == 0
|
||||||
@ -85,7 +85,7 @@ class EnvironmentProcessor:
|
|||||||
"piotest_running_name": build_vars.get("piotest_running_name"),
|
"piotest_running_name": build_vars.get("piotest_running_name"),
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
result = PlatformFactory.new(self.options["platform"], autoinstall=True).run(
|
result = PlatformFactory.from_env(
|
||||||
build_vars, build_targets, self.silent, self.verbose, self.jobs
|
self.name, targets=build_targets, autoinstall=True
|
||||||
)
|
).run(build_vars, build_targets, self.silent, self.verbose, self.jobs)
|
||||||
return result["returncode"] == 0
|
return result["returncode"] == 0
|
||||||
|
@ -13,7 +13,6 @@
|
|||||||
# limitations under the License.
|
# limitations under the License.
|
||||||
|
|
||||||
import atexit
|
import atexit
|
||||||
import hashlib
|
|
||||||
import os
|
import os
|
||||||
import queue
|
import queue
|
||||||
import re
|
import re
|
||||||
@ -27,9 +26,8 @@ import requests
|
|||||||
|
|
||||||
from platformio import __title__, __version__, app, exception, fs, util
|
from platformio import __title__, __version__, app, exception, fs, util
|
||||||
from platformio.cli import PlatformioCLI
|
from platformio.cli import PlatformioCLI
|
||||||
from platformio.compat import hashlib_encode_data
|
|
||||||
from platformio.debug.config.base import DebugConfigBase
|
from platformio.debug.config.base import DebugConfigBase
|
||||||
from platformio.http import HTTPSession, ensure_internet_on
|
from platformio.http import HTTPSession
|
||||||
from platformio.proc import is_ci
|
from platformio.proc import is_ci
|
||||||
|
|
||||||
KEEP_MAX_REPORTS = 100
|
KEEP_MAX_REPORTS = 100
|
||||||
@ -135,7 +133,7 @@ class TelemetryLogger:
|
|||||||
# print("_commit_payload", payload)
|
# print("_commit_payload", payload)
|
||||||
try:
|
try:
|
||||||
r = self._http_session.post(
|
r = self._http_session.post(
|
||||||
"https://telemetry.platformio.org/collect",
|
"https://collector.platformio.org/collect",
|
||||||
json=payload,
|
json=payload,
|
||||||
timeout=(2, 5), # connect, read
|
timeout=(2, 5), # connect, read
|
||||||
)
|
)
|
||||||
@ -220,7 +218,7 @@ def dump_project_env_params(config, env, platform):
|
|||||||
for option in non_sensitive_data
|
for option in non_sensitive_data
|
||||||
if config.has_option(section, option)
|
if config.has_option(section, option)
|
||||||
}
|
}
|
||||||
params["pid"] = hashlib.sha1(hashlib_encode_data(config.path)).hexdigest()
|
params["pid"] = app.get_project_id(os.path.dirname(config.path))
|
||||||
params["platform_name"] = platform.name
|
params["platform_name"] = platform.name
|
||||||
params["platform_version"] = platform.version
|
params["platform_version"] = platform.version
|
||||||
return params
|
return params
|
||||||
@ -365,8 +363,6 @@ def postpone_events(events):
|
|||||||
|
|
||||||
|
|
||||||
def process_postponed_logs():
|
def process_postponed_logs():
|
||||||
if not ensure_internet_on():
|
|
||||||
return None
|
|
||||||
events = load_postponed_events()
|
events = load_postponed_events()
|
||||||
if not events:
|
if not events:
|
||||||
return None
|
return None
|
||||||
@ -380,4 +376,5 @@ def process_postponed_logs():
|
|||||||
timestamp=event["timestamp"],
|
timestamp=event["timestamp"],
|
||||||
instant_sending=False,
|
instant_sending=False,
|
||||||
)
|
)
|
||||||
|
telemetry.send()
|
||||||
return True
|
return True
|
||||||
|
@ -62,8 +62,8 @@ class TestRunnerBase:
|
|||||||
self.test_suite = test_suite
|
self.test_suite = test_suite
|
||||||
self.options = options
|
self.options = options
|
||||||
self.project_config = project_config
|
self.project_config = project_config
|
||||||
self.platform = PlatformFactory.new(
|
self.platform = PlatformFactory.from_env(
|
||||||
self.project_config.get(f"env:{self.test_suite.env_name}", "platform"),
|
self.test_suite.env_name,
|
||||||
autoinstall=True,
|
autoinstall=True,
|
||||||
)
|
)
|
||||||
self.cmd_ctx = None
|
self.cmd_ctx = None
|
||||||
|
@ -51,6 +51,12 @@ STATIC_FRAMEWORK_DATA = {
|
|||||||
"or physical experiences."
|
"or physical experiences."
|
||||||
),
|
),
|
||||||
},
|
},
|
||||||
|
"cmsis": {
|
||||||
|
"title": "CMSIS",
|
||||||
|
"description": (
|
||||||
|
"Vendor-independent hardware abstraction layer for the Cortex-M processor series"
|
||||||
|
),
|
||||||
|
},
|
||||||
"freertos": {
|
"freertos": {
|
||||||
"title": "FreeRTOS",
|
"title": "FreeRTOS",
|
||||||
"description": (
|
"description": (
|
||||||
|
@ -1,105 +0,0 @@
|
|||||||
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
|
|
||||||
#
|
|
||||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
|
||||||
# you may not use this file except in compliance with the License.
|
|
||||||
# You may obtain a copy of the License at
|
|
||||||
#
|
|
||||||
# http://www.apache.org/licenses/LICENSE-2.0
|
|
||||||
#
|
|
||||||
# Unless required by applicable law or agreed to in writing, software
|
|
||||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
|
||||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
||||||
# See the License for the specific language governing permissions and
|
|
||||||
# limitations under the License.
|
|
||||||
|
|
||||||
import tempfile
|
|
||||||
import io
|
|
||||||
import sys
|
|
||||||
import subprocess
|
|
||||||
|
|
||||||
MAIN_SCRIPT_URL = "https://raw.githubusercontent.com/platformio/platformio-core-installer/master/get-platformio.py"
|
|
||||||
|
|
||||||
|
|
||||||
def download_with_requests(url, dst):
|
|
||||||
import requests
|
|
||||||
|
|
||||||
resp = requests.get(url, stream=True)
|
|
||||||
itercontent = resp.iter_content(chunk_size=io.DEFAULT_BUFFER_SIZE)
|
|
||||||
with open(dst, "wb") as fp:
|
|
||||||
for chunk in itercontent:
|
|
||||||
fp.write(chunk)
|
|
||||||
return dst
|
|
||||||
|
|
||||||
|
|
||||||
def download_with_urllib3(url, dst):
|
|
||||||
import urllib3
|
|
||||||
|
|
||||||
http = urllib3.PoolManager()
|
|
||||||
r = http.request("GET", url, preload_content=False)
|
|
||||||
|
|
||||||
with open(dst, "wb") as out:
|
|
||||||
while True:
|
|
||||||
data = r.read(io.DEFAULT_BUFFER_SIZE)
|
|
||||||
if not data:
|
|
||||||
break
|
|
||||||
out.write(data)
|
|
||||||
|
|
||||||
r.release_conn()
|
|
||||||
return dst
|
|
||||||
|
|
||||||
|
|
||||||
def download_with_urllib(url, dst):
|
|
||||||
if sys.version_info[0] == 3:
|
|
||||||
from urllib.request import urlopen
|
|
||||||
else:
|
|
||||||
from urllib import urlopen
|
|
||||||
|
|
||||||
response = urlopen(url)
|
|
||||||
CHUNK = 16 * 1024
|
|
||||||
with open(dst, "wb") as f:
|
|
||||||
while True:
|
|
||||||
chunk = response.read(CHUNK)
|
|
||||||
if not chunk:
|
|
||||||
break
|
|
||||||
f.write(chunk)
|
|
||||||
|
|
||||||
return dst
|
|
||||||
|
|
||||||
|
|
||||||
def download_with_curl(url, dst):
|
|
||||||
subprocess.check_output(["curl", "-o", dst, url])
|
|
||||||
return dst
|
|
||||||
|
|
||||||
|
|
||||||
def download_with_wget(url, dst):
|
|
||||||
subprocess.check_output(["wget", "-O", dst, url])
|
|
||||||
return dst
|
|
||||||
|
|
||||||
|
|
||||||
def download_file(url, dst):
|
|
||||||
methods = [
|
|
||||||
download_with_requests,
|
|
||||||
download_with_urllib3,
|
|
||||||
download_with_urllib,
|
|
||||||
download_with_curl,
|
|
||||||
download_with_wget,
|
|
||||||
]
|
|
||||||
for method in methods:
|
|
||||||
try:
|
|
||||||
method(url, dst)
|
|
||||||
return dst
|
|
||||||
except:
|
|
||||||
pass
|
|
||||||
raise Exception("Could not download file '%s' to '%s' " % (url, dst))
|
|
||||||
|
|
||||||
|
|
||||||
def main():
|
|
||||||
with tempfile.NamedTemporaryFile() as tmp_file:
|
|
||||||
dst = download_file(MAIN_SCRIPT_URL, str(tmp_file.name))
|
|
||||||
command = [sys.executable, dst]
|
|
||||||
command.extend(sys.argv[1:])
|
|
||||||
subprocess.check_call(command)
|
|
||||||
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
|
||||||
sys.exit(main())
|
|
47
setup.py
47
setup.py
@ -12,6 +12,7 @@
|
|||||||
# See the License for the specific language governing permissions and
|
# See the License for the specific language governing permissions and
|
||||||
# limitations under the License.
|
# limitations under the License.
|
||||||
|
|
||||||
|
import platform
|
||||||
from setuptools import find_packages, setup
|
from setuptools import find_packages, setup
|
||||||
|
|
||||||
from platformio import (
|
from platformio import (
|
||||||
@ -22,48 +23,12 @@ from platformio import (
|
|||||||
__title__,
|
__title__,
|
||||||
__url__,
|
__url__,
|
||||||
__version__,
|
__version__,
|
||||||
|
__install_requires__,
|
||||||
)
|
)
|
||||||
|
|
||||||
env_marker_below_37 = "python_version < '3.7'"
|
# issue #4702; Broken "requests/charset_normalizer" on macOS ARM
|
||||||
env_marker_gte_37 = "python_version >= '3.7'"
|
if platform.system() == "Darwin" and "arm" in platform.machine().lower():
|
||||||
|
__install_requires__.append("chardet>=3.0.2,<4")
|
||||||
minimal_requirements = [
|
|
||||||
"bottle==0.12.*",
|
|
||||||
"click==8.0.4; " + env_marker_below_37,
|
|
||||||
"click==8.1.*; " + env_marker_gte_37,
|
|
||||||
"colorama",
|
|
||||||
"marshmallow==3.14.1; " + env_marker_below_37,
|
|
||||||
"marshmallow==3.19.*; " + env_marker_gte_37,
|
|
||||||
"pyelftools==0.29",
|
|
||||||
"pyserial==3.5.*", # keep in sync "device/monitor/terminal.py"
|
|
||||||
"requests==2.*",
|
|
||||||
"semantic_version==2.10.*",
|
|
||||||
"tabulate==0.*",
|
|
||||||
]
|
|
||||||
|
|
||||||
home_requirements = [
|
|
||||||
"aiofiles>=0.8.0",
|
|
||||||
"ajsonrpc==1.2.*",
|
|
||||||
"starlette==0.19.1; " + env_marker_below_37,
|
|
||||||
"starlette==0.28.*; " + env_marker_gte_37,
|
|
||||||
"uvicorn==0.16.0; " + env_marker_below_37,
|
|
||||||
"uvicorn==0.22.*; " + env_marker_gte_37,
|
|
||||||
"wsproto==1.0.0; " + env_marker_below_37,
|
|
||||||
"wsproto==1.2.*; " + env_marker_gte_37,
|
|
||||||
]
|
|
||||||
|
|
||||||
# issue 4614: urllib3 v2.0 only supports OpenSSL 1.1.1+
|
|
||||||
try:
|
|
||||||
import ssl
|
|
||||||
|
|
||||||
if ssl.OPENSSL_VERSION.startswith("OpenSSL ") and ssl.OPENSSL_VERSION_INFO < (
|
|
||||||
1,
|
|
||||||
1,
|
|
||||||
1,
|
|
||||||
):
|
|
||||||
minimal_requirements.append("urllib3<2")
|
|
||||||
except ImportError:
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
setup(
|
setup(
|
||||||
@ -75,7 +40,7 @@ setup(
|
|||||||
author_email=__email__,
|
author_email=__email__,
|
||||||
url=__url__,
|
url=__url__,
|
||||||
license=__license__,
|
license=__license__,
|
||||||
install_requires=minimal_requirements + home_requirements,
|
install_requires=__install_requires__,
|
||||||
python_requires=">=3.6",
|
python_requires=">=3.6",
|
||||||
packages=find_packages(include=["platformio", "platformio.*"]),
|
packages=find_packages(include=["platformio", "platformio.*"]),
|
||||||
package_data={
|
package_data={
|
||||||
|
@ -69,7 +69,7 @@ PVS_STUDIO_FREE_LICENSE_HEADER = """
|
|||||||
|
|
||||||
EXPECTED_ERRORS = 5
|
EXPECTED_ERRORS = 5
|
||||||
EXPECTED_WARNINGS = 1
|
EXPECTED_WARNINGS = 1
|
||||||
EXPECTED_STYLE = 2
|
EXPECTED_STYLE = 4
|
||||||
EXPECTED_DEFECTS = EXPECTED_ERRORS + EXPECTED_WARNINGS + EXPECTED_STYLE
|
EXPECTED_DEFECTS = EXPECTED_ERRORS + EXPECTED_WARNINGS + EXPECTED_STYLE
|
||||||
|
|
||||||
|
|
||||||
@ -345,7 +345,10 @@ def test_check_individual_flags_passed(clirunner, validate_cliresult, tmpdir):
|
|||||||
assert pvs_flags_found
|
assert pvs_flags_found
|
||||||
|
|
||||||
|
|
||||||
def test_check_cppcheck_misra_addon(clirunner, validate_cliresult, check_dir):
|
def test_check_cppcheck_misra_addon(clirunner, validate_cliresult, tmpdir_factory):
|
||||||
|
check_dir = tmpdir_factory.mktemp("project")
|
||||||
|
check_dir.join("platformio.ini").write(DEFAULT_CONFIG)
|
||||||
|
check_dir.mkdir("src").join("main.c").write(TEST_CODE)
|
||||||
check_dir.join("misra.json").write(
|
check_dir.join("misra.json").write(
|
||||||
"""
|
"""
|
||||||
{
|
{
|
||||||
@ -509,15 +512,18 @@ TEST-TEST-TEST-TEST
|
|||||||
assert "license information is incorrect" in verbose_result.output.lower()
|
assert "license information is incorrect" in verbose_result.output.lower()
|
||||||
|
|
||||||
|
|
||||||
def test_check_embedded_platform_all_tools(clirunner, validate_cliresult, tmpdir):
|
@pytest.mark.parametrize("framework", ["arduino", "stm32cube", "zephyr"])
|
||||||
config = """
|
@pytest.mark.parametrize("check_tool", ["cppcheck", "clangtidy", "pvs-studio"])
|
||||||
|
def test_check_embedded_platform_all_tools(
|
||||||
|
clirunner, validate_cliresult, tmpdir, framework, check_tool
|
||||||
|
):
|
||||||
|
config = f"""
|
||||||
[env:test]
|
[env:test]
|
||||||
platform = ststm32
|
platform = ststm32
|
||||||
board = nucleo_f401re
|
board = nucleo_f401re
|
||||||
framework = %s
|
framework = {framework}
|
||||||
check_tool = %s
|
check_tool = {check_tool}
|
||||||
"""
|
"""
|
||||||
# tmpdir.join("platformio.ini").write(config)
|
|
||||||
tmpdir.mkdir("src").join("main.c").write(
|
tmpdir.mkdir("src").join("main.c").write(
|
||||||
PVS_STUDIO_FREE_LICENSE_HEADER
|
PVS_STUDIO_FREE_LICENSE_HEADER
|
||||||
+ """
|
+ """
|
||||||
@ -534,20 +540,11 @@ int main() {
|
|||||||
"""
|
"""
|
||||||
)
|
)
|
||||||
|
|
||||||
for framework in (
|
tmpdir.join("platformio.ini").write(config)
|
||||||
"arduino",
|
result = clirunner.invoke(cmd_check, ["--project-dir", str(tmpdir)])
|
||||||
"stm32cube",
|
validate_cliresult(result)
|
||||||
"zephyr",
|
defects = sum(count_defects(result.output))
|
||||||
):
|
assert defects > 0, "Not defects were found!"
|
||||||
for tool in ("cppcheck", "clangtidy", "pvs-studio"):
|
|
||||||
tmpdir.join("platformio.ini").write(config % (framework, tool))
|
|
||||||
result = clirunner.invoke(cmd_check, ["--project-dir", str(tmpdir)])
|
|
||||||
validate_cliresult(result)
|
|
||||||
defects = sum(count_defects(result.output))
|
|
||||||
assert defects > 0, "Failed %s with %s" % (
|
|
||||||
framework,
|
|
||||||
tool,
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
def test_check_skip_includes_from_packages(clirunner, validate_cliresult, tmpdir):
|
def test_check_skip_includes_from_packages(clirunner, validate_cliresult, tmpdir):
|
||||||
|
@ -19,7 +19,7 @@ import semantic_version
|
|||||||
|
|
||||||
from platformio.package.meta import (
|
from platformio.package.meta import (
|
||||||
PackageCompatibility,
|
PackageCompatibility,
|
||||||
PackageMetaData,
|
PackageMetadata,
|
||||||
PackageOutdatedResult,
|
PackageOutdatedResult,
|
||||||
PackageSpec,
|
PackageSpec,
|
||||||
PackageType,
|
PackageType,
|
||||||
@ -229,7 +229,7 @@ def test_spec_as_dependency():
|
|||||||
|
|
||||||
|
|
||||||
def test_metadata_as_dict():
|
def test_metadata_as_dict():
|
||||||
metadata = PackageMetaData(PackageType.LIBRARY, "foo", "1.2.3")
|
metadata = PackageMetadata(PackageType.LIBRARY, "foo", "1.2.3")
|
||||||
# test setter
|
# test setter
|
||||||
metadata.version = "0.1.2+12345"
|
metadata.version = "0.1.2+12345"
|
||||||
assert metadata.version == semantic_version.Version("0.1.2+12345")
|
assert metadata.version == semantic_version.Version("0.1.2+12345")
|
||||||
@ -244,7 +244,7 @@ def test_metadata_as_dict():
|
|||||||
)
|
)
|
||||||
|
|
||||||
assert not jsondiff.diff(
|
assert not jsondiff.diff(
|
||||||
PackageMetaData(
|
PackageMetadata(
|
||||||
PackageType.TOOL,
|
PackageType.TOOL,
|
||||||
"toolchain",
|
"toolchain",
|
||||||
"2.0.5",
|
"2.0.5",
|
||||||
@ -267,7 +267,7 @@ def test_metadata_as_dict():
|
|||||||
|
|
||||||
def test_metadata_dump(tmpdir_factory):
|
def test_metadata_dump(tmpdir_factory):
|
||||||
pkg_dir = tmpdir_factory.mktemp("package")
|
pkg_dir = tmpdir_factory.mktemp("package")
|
||||||
metadata = PackageMetaData(
|
metadata = PackageMetadata(
|
||||||
PackageType.TOOL,
|
PackageType.TOOL,
|
||||||
"toolchain",
|
"toolchain",
|
||||||
"2.0.5",
|
"2.0.5",
|
||||||
@ -297,9 +297,9 @@ def test_metadata_load(tmpdir_factory):
|
|||||||
pkg_dir = tmpdir_factory.mktemp("package")
|
pkg_dir = tmpdir_factory.mktemp("package")
|
||||||
dst = pkg_dir.join(".piopm")
|
dst = pkg_dir.join(".piopm")
|
||||||
dst.write(contents)
|
dst.write(contents)
|
||||||
metadata = PackageMetaData.load(str(dst))
|
metadata = PackageMetadata.load(str(dst))
|
||||||
assert metadata.version == semantic_version.Version("0.1.3")
|
assert metadata.version == semantic_version.Version("0.1.3")
|
||||||
assert metadata == PackageMetaData(
|
assert metadata == PackageMetadata(
|
||||||
PackageType.PLATFORM,
|
PackageType.PLATFORM,
|
||||||
"foo",
|
"foo",
|
||||||
"0.1.3",
|
"0.1.3",
|
||||||
@ -307,11 +307,11 @@ def test_metadata_load(tmpdir_factory):
|
|||||||
)
|
)
|
||||||
|
|
||||||
piopm_path = pkg_dir.join(".piopm")
|
piopm_path = pkg_dir.join(".piopm")
|
||||||
metadata = PackageMetaData(
|
metadata = PackageMetadata(
|
||||||
PackageType.LIBRARY, "mylib", version="1.2.3", spec=PackageSpec("mylib")
|
PackageType.LIBRARY, "mylib", version="1.2.3", spec=PackageSpec("mylib")
|
||||||
)
|
)
|
||||||
metadata.dump(str(piopm_path))
|
metadata.dump(str(piopm_path))
|
||||||
restored_metadata = PackageMetaData.load(str(piopm_path))
|
restored_metadata = PackageMetadata.load(str(piopm_path))
|
||||||
assert metadata == restored_metadata
|
assert metadata == restored_metadata
|
||||||
|
|
||||||
|
|
||||||
|
Reference in New Issue
Block a user