mirror of
https://github.com/platformio/platformio-core.git
synced 2025-12-23 15:18:03 +01:00
Compare commits
130 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
3f3c8cabb8 | ||
|
|
34e12e575b | ||
|
|
4c8c261ab4 | ||
|
|
099bb3b9ff | ||
|
|
c623a6aacc | ||
|
|
ce7356794d | ||
|
|
523494f9cf | ||
|
|
0edc867d45 | ||
|
|
ce4c45a075 | ||
|
|
e29941e3eb | ||
|
|
86ce3595f6 | ||
|
|
6e958b8415 | ||
|
|
d485703768 | ||
|
|
109e2107d1 | ||
|
|
3469905365 | ||
|
|
75b3846f8f | ||
|
|
a9ec38208c | ||
|
|
c38b9a4144 | ||
|
|
b6128aeaa1 | ||
|
|
881782be05 | ||
|
|
0c05930501 | ||
|
|
b96f2a19b5 | ||
|
|
c1906714ee | ||
|
|
32181d1bd2 | ||
|
|
7dfb413d87 | ||
|
|
7934a96ad1 | ||
|
|
abddbf9c7d | ||
|
|
77e66241f7 | ||
|
|
4b3f2e19a4 | ||
|
|
b29c6485a8 | ||
|
|
f4dba7a68c | ||
|
|
2817408db3 | ||
|
|
9ff3c758eb | ||
|
|
3dcc189740 | ||
|
|
4a12d1954e | ||
|
|
e4d645110a | ||
|
|
01a32067d5 | ||
|
|
fc5ce4739c | ||
|
|
ae7b8f9ecf | ||
|
|
0f5d2d6821 | ||
|
|
48eca22a00 | ||
|
|
5e164493a8 | ||
|
|
ead99208f2 | ||
|
|
4f5ad05792 | ||
|
|
bc52e72605 | ||
|
|
038674835a | ||
|
|
00f21c17ca | ||
|
|
818a1508a0 | ||
|
|
2d9480a6a7 | ||
|
|
0bec4e25c8 | ||
|
|
950a540df4 | ||
|
|
2e66c5f807 | ||
|
|
7033c2616b | ||
|
|
7292024ee6 | ||
|
|
8d4cde4534 | ||
|
|
d6df6cbb5d | ||
|
|
344e94d8a1 | ||
|
|
5cf73a9165 | ||
|
|
96b1a1c79c | ||
|
|
0bbe7f8c73 | ||
|
|
e333bb1cca | ||
|
|
454cd8d784 | ||
|
|
743a43ae17 | ||
|
|
5a1b0e19b2 | ||
|
|
da6cde5cbd | ||
|
|
5ea864da39 | ||
|
|
175448deda | ||
|
|
16f90dd821 | ||
|
|
9efac669e6 | ||
|
|
adf9ba29df | ||
|
|
cacddb9abb | ||
|
|
edbe213410 | ||
|
|
891f78be37 | ||
|
|
175be346a8 | ||
|
|
9ae981614f | ||
|
|
16f5f3ef46 | ||
|
|
2cd19b0273 | ||
|
|
e158e54a26 | ||
|
|
63a6fe9133 | ||
|
|
779eaee310 | ||
|
|
0ecfe8105f | ||
|
|
b8cc867ba4 | ||
|
|
7230556d1b | ||
|
|
afd79f4655 | ||
|
|
5d87fb8757 | ||
|
|
23e9596506 | ||
|
|
428f46fafe | ||
|
|
ee847e03a6 | ||
|
|
a870981266 | ||
|
|
411bf1107d | ||
|
|
5b74c8a942 | ||
|
|
a24bab0a27 | ||
|
|
1cb7764b0e | ||
|
|
d835f52a18 | ||
|
|
9c20ab81cb | ||
|
|
14de3e79c5 | ||
|
|
21c12030d5 | ||
|
|
2370e16f1b | ||
|
|
a384411a28 | ||
|
|
1e0ca8f79c | ||
|
|
2b5e590819 | ||
|
|
bf57b777bf | ||
|
|
f656d19ed5 | ||
|
|
eb09af06ed | ||
|
|
687c339f20 | ||
|
|
7bc170a53e | ||
|
|
65297c24d4 | ||
|
|
ea21f3fba0 | ||
|
|
b515a004d3 | ||
|
|
7d3fc1ec1a | ||
|
|
6987d6c1c6 | ||
|
|
de2b5ea905 | ||
|
|
f946a0bc08 | ||
|
|
4f47ca5742 | ||
|
|
54b51fc2fd | ||
|
|
1f284e853d | ||
|
|
2a30ad0fdf | ||
|
|
c454ae336d | ||
|
|
cd59c829e0 | ||
|
|
429f416b38 | ||
|
|
0a881d582d | ||
|
|
65b1029216 | ||
|
|
c7758fd30e | ||
|
|
46f300d62f | ||
|
|
4234dfb6f9 | ||
|
|
9695720343 | ||
|
|
1f28056459 | ||
|
|
7dacceef04 | ||
|
|
39883e8d68 | ||
|
|
949ef2c48a |
9
.github/workflows/examples.yml
vendored
9
.github/workflows/examples.yml
vendored
@@ -15,7 +15,7 @@ jobs:
|
||||
with:
|
||||
submodules: "recursive"
|
||||
- name: Set up Python ${{ matrix.python-version }}
|
||||
uses: actions/setup-python@v1
|
||||
uses: actions/setup-python@v2
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
- name: Install dependencies
|
||||
@@ -26,7 +26,8 @@ jobs:
|
||||
- name: Run on Linux
|
||||
if: startsWith(matrix.os, 'ubuntu')
|
||||
env:
|
||||
PIO_INSTALL_DEVPLATFORMS_IGNORE: "ststm8,infineonxmc,intel_mcs51,aceinna_imu"
|
||||
PIO_INSTALL_DEVPLATFORMS_OWNERNAMES: "platformio"
|
||||
PIO_INSTALL_DEVPLATFORMS_IGNORE: "ststm8,infineonxmc,intel_mcs51"
|
||||
run: |
|
||||
# ChipKIT issue: install 32-bit support for GCC PIC32
|
||||
sudo apt-get install libc6-i386
|
||||
@@ -40,7 +41,8 @@ jobs:
|
||||
- name: Run on macOS
|
||||
if: startsWith(matrix.os, 'macos')
|
||||
env:
|
||||
PIO_INSTALL_DEVPLATFORMS_IGNORE: "ststm8,infineonxmc,microchippic32,gd32v,nuclei"
|
||||
PIO_INSTALL_DEVPLATFORMS_OWNERNAMES: "platformio"
|
||||
PIO_INSTALL_DEVPLATFORMS_IGNORE: "ststm8,infineonxmc,microchippic32,lattice_ice40,gd32v"
|
||||
run: |
|
||||
df -h
|
||||
tox -e testexamples
|
||||
@@ -50,6 +52,7 @@ jobs:
|
||||
env:
|
||||
PLATFORMIO_CORE_DIR: C:/pio
|
||||
PLATFORMIO_WORKSPACE_DIR: C:/pio-workspace/$PROJECT_HASH
|
||||
PIO_INSTALL_DEVPLATFORMS_OWNERNAMES: "platformio"
|
||||
PIO_INSTALL_DEVPLATFORMS_IGNORE: "ststm8,infineonxmc,riscv_gap"
|
||||
run: |
|
||||
tox -e testexamples
|
||||
|
||||
56
HISTORY.rst
56
HISTORY.rst
@@ -8,11 +8,62 @@ PlatformIO Core 5
|
||||
|
||||
**A professional collaborative platform for embedded development**
|
||||
|
||||
- `Migration guide from 4.x to 5.0 <https://docs.platformio.org/page/core/migration.html>`__
|
||||
5.0.4 (2020-12-30)
|
||||
~~~~~~~~~~~~~~~~~~
|
||||
|
||||
- Added "Core" suffix when showing PlatformIO Core version using ``pio --version`` command
|
||||
- Improved ``.ccls`` configuration file for Emacs, Vim, and Sublime Text integrations
|
||||
- Updated analysis tools:
|
||||
|
||||
* `Cppcheck <https://docs.platformio.org/page/plus/check-tools/cppcheck.html>`__ v2.3 with improved C++ parser and several new MISRA rules
|
||||
* `PVS-Studio <https://docs.platformio.org/page/plus/check-tools/pvs-studio.html>`__ v7.11 with new diagnostics and updated mass suppression mechanism
|
||||
|
||||
- Show a warning message about deprecated support for Python 2 and Python 3.5
|
||||
- Do not provide "intelliSenseMode" option when generating configuration for VSCode C/C++ extension
|
||||
- Fixed a "git-sh-setup: file not found" error when installing project dependencies from Git VCS (`issue #3740 <https://github.com/platformio/platformio-core/issues/3740>`_)
|
||||
- Fixed an issue with package publishing on Windows when Unix permissions are not preserved (`issue #3776 <https://github.com/platformio/platformio-core/issues/3776>`_)
|
||||
|
||||
5.0.3 (2020-11-12)
|
||||
~~~~~~~~~~~~~~~~~~
|
||||
|
||||
- Added an error selector for `Sublime Text <https://docs.platformio.org/page/integration/ide/sublimetext.html>`__ build runner (`issue #3733 <https://github.com/platformio/platformio-core/issues/3733>`_)
|
||||
- Generate a working "projectEnvName" for PlatformIO IDE's debugger for VSCode
|
||||
- Force VSCode's intelliSenseMode to "gcc-x64" when GCC toolchain is used
|
||||
- Print ignored test suites and environments in the test summary report only in verbose mode (`issue #3726 <https://github.com/platformio/platformio-core/issues/3726>`_)
|
||||
- Fixed an issue when the package manager tries to install a built-in library from the registry (`issue #3662 <https://github.com/platformio/platformio-core/issues/3662>`_)
|
||||
- Fixed an issue when `pio package pack <https://docs.platformio.org/page/core/userguide/package/cmd_pack.html>`__ ignores some folders (`issue #3730 <https://github.com/platformio/platformio-core/issues/3730>`_)
|
||||
|
||||
5.0.2 (2020-10-30)
|
||||
~~~~~~~~~~~~~~~~~~
|
||||
|
||||
- Initialize a new project or update the existing passing working environment name and its options (`issue #3686 <https://github.com/platformio/platformio-core/issues/3686>`_)
|
||||
- Automatically build PlatformIO Core extra Python dependencies on a host machine if they are missed in the registry (`issue #3700 <https://github.com/platformio/platformio-core/issues/3700>`_)
|
||||
- Improved "core.call" RPC for PlatformIO Home (`issue #3671 <https://github.com/platformio/platformio-core/issues/3671>`_)
|
||||
- Fixed a "PermissionError: [WinError 5]" on Windows when an external repository is used with `lib_deps <https://docs.platformio.org/page/projectconf/section_env_library.html#lib-deps>`__ option (`issue #3664 <https://github.com/platformio/platformio-core/issues/3664>`_)
|
||||
- Fixed a "KeyError: 'versions'" when dependency does not exist in the registry (`issue #3666 <https://github.com/platformio/platformio-core/issues/3666>`_)
|
||||
- Fixed an issue with GCC linker when "native" dev-platform is used in pair with library dependencies (`issue #3669 <https://github.com/platformio/platformio-core/issues/3669>`_)
|
||||
- Fixed an "AssertionError: ensure_dir_exists" when checking library updates from simultaneous subprocesses (`issue #3677 <https://github.com/platformio/platformio-core/issues/3677>`_)
|
||||
- Fixed an issue when `pio package publish <https://docs.platformio.org/page/core/userguide/package/cmd_publish.html>`__ command removes original archive after submitting to the registry (`issue #3716 <https://github.com/platformio/platformio-core/issues/3716>`_)
|
||||
- Fixed an issue when multiple `pio lib install <https://docs.platformio.org/page/core/userguide/lib/cmd_install.html>`__ command with the same local library results in duplicates in ``lib_deps`` (`issue #3715 <https://github.com/platformio/platformio-core/issues/3715>`_)
|
||||
- Fixed an issue with a "wrong" timestamp in device monitor output using `"time" filter <https://docs.platformio.org/page/core/userguide/device/cmd_monitor.html#filters>`__ (`issue #3712 <https://github.com/platformio/platformio-core/issues/3712>`_)
|
||||
|
||||
5.0.1 (2020-09-10)
|
||||
~~~~~~~~~~~~~~~~~~
|
||||
|
||||
- Added support for "owner" requirement when declaring ``dependencies`` using `library.json <https://docs.platformio.org/page/librarymanager/config.html#dependencies>`__
|
||||
- Fixed an issue when using a custom git/ssh package with `platform_packages <https://docs.platformio.org/page/projectconf/section_env_platform.html#platform-packages>`__ option (`issue #3624 <https://github.com/platformio/platformio-core/issues/3624>`_)
|
||||
- Fixed an issue with "ImportError: cannot import name '_get_backend' from 'cryptography.hazmat.backends'" when using `Remote Development <https://docs.platformio.org/page/plus/pio-remote.html>`__ on RaspberryPi device (`issue #3652 <https://github.com/platformio/platformio-core/issues/3652>`_)
|
||||
- Fixed an issue when `pio package unpublish <https://docs.platformio.org/page/core/userguide/package/cmd_unpublish.html>`__ command crashes (`issue #3660 <https://github.com/platformio/platformio-core/issues/3660>`_)
|
||||
- Fixed an issue when the package manager tries to install a built-in library from the registry (`issue #3662 <https://github.com/platformio/platformio-core/issues/3662>`_)
|
||||
- Fixed an issue with incorrect value for C++ language standard in IDE projects when an in-progress language standard is used (`issue #3653 <https://github.com/platformio/platformio-core/issues/3653>`_)
|
||||
- Fixed an issue with "Invalid simple block (semantic_version)" from library dependency that refs to an external source (repository, ZIP/Tar archives) (`issue #3658 <https://github.com/platformio/platformio-core/issues/3658>`_)
|
||||
- Fixed an issue when can not remove update or remove external dev-platform using PlatformIO Home (`issue #3663 <https://github.com/platformio/platformio-core/issues/3663>`_)
|
||||
|
||||
5.0.0 (2020-09-03)
|
||||
~~~~~~~~~~~~~~~~~~
|
||||
|
||||
Please check `Migration guide from 4.x to 5.0 <https://docs.platformio.org/page/core/migration.html>`__.
|
||||
|
||||
* Integration with the new **PlatformIO Trusted Registry**
|
||||
|
||||
- Enterprise-grade package storage with high availability (multi replicas)
|
||||
@@ -92,7 +143,8 @@ PlatformIO Core 5
|
||||
- Display system-wide information using a new `pio system info <https://docs.platformio.org/page/core/userguide/system/cmd_info.html>`__ command (`issue #3521 <https://github.com/platformio/platformio-core/issues/3521>`_)
|
||||
- Remove unused data using a new `pio system prune <https://docs.platformio.org/page/core/userguide/system/cmd_prune.html>`__ command (`issue #3522 <https://github.com/platformio/platformio-core/issues/3522>`_)
|
||||
- Show ignored project environments only in the verbose mode (`issue #3641 <https://github.com/platformio/platformio-core/issues/3641>`_)
|
||||
- Do not escape compiler arguments in VSCode template on Windows.
|
||||
- Do not escape compiler arguments in VSCode template on Windows
|
||||
- Drop support for Python 2 and 3.5.
|
||||
|
||||
.. _release_notes_4:
|
||||
|
||||
|
||||
3
Makefile
3
Makefile
@@ -31,5 +31,8 @@ profile:
|
||||
python -m cProfile -o .tox/.tmp/cprofile.prof -m platformio ${PIOARGS}
|
||||
snakeviz .tox/.tmp/cprofile.prof
|
||||
|
||||
pack:
|
||||
python setup.py sdist
|
||||
|
||||
publish:
|
||||
python setup.py sdist upload
|
||||
|
||||
30
README.rst
30
README.rst
@@ -16,23 +16,21 @@ PlatformIO
|
||||
.. image:: https://img.shields.io/badge/license-Apache%202.0-blue.svg
|
||||
:target: https://pypi.python.org/pypi/platformio/
|
||||
:alt: License
|
||||
.. image:: https://img.shields.io/badge/PlatformIO-Community-orange.svg
|
||||
:alt: Community Forums
|
||||
:target: https://community.platformio.org?utm_source=github&utm_medium=core
|
||||
.. image:: https://img.shields.io/badge/PlatformIO-Labs-orange.svg
|
||||
:alt: Community Labs
|
||||
:target: https://piolabs.com/?utm_source=github&utm_medium=core
|
||||
|
||||
**Quick Links:** `Web <https://platformio.org?utm_source=github&utm_medium=core>`_ |
|
||||
`PlatformIO IDE <https://platformio.org/platformio-ide?utm_source=github&utm_medium=core>`_ |
|
||||
`Project Examples <https://github.com/platformio/platformio-examples/>`__ |
|
||||
`Docs <https://docs.platformio.org?utm_source=github&utm_medium=core>`_ |
|
||||
`Donate <https://platformio.org/donate?utm_source=github&utm_medium=core>`_ |
|
||||
`Contact Us <https://platformio.org/contact?utm_source=github&utm_medium=core>`_
|
||||
`Contact Us <https://piolabs.com/?utm_source=github&utm_medium=core>`_
|
||||
|
||||
**Social:** `Twitter <https://twitter.com/PlatformIO_Org>`_ |
|
||||
`LinkedIn <https://www.linkedin.com/company/platformio/>`_ |
|
||||
**Social:** `LinkedIn <https://www.linkedin.com/company/platformio/>`_ |
|
||||
`Twitter <https://twitter.com/PlatformIO_Org>`_ |
|
||||
`Facebook <https://www.facebook.com/platformio>`_ |
|
||||
`Hackaday <https://hackaday.io/project/7980-platformio>`_ |
|
||||
`Bintray <https://bintray.com/platformio>`_ |
|
||||
`Community <https://community.platformio.org?utm_source=github&utm_medium=core>`_
|
||||
`Community Forums <https://community.platformio.org?utm_source=github&utm_medium=core>`_
|
||||
|
||||
.. image:: https://raw.githubusercontent.com/platformio/platformio-web/develop/app/images/platformio-ide-laptop.png
|
||||
:target: https://platformio.org?utm_source=github&utm_medium=core
|
||||
@@ -51,20 +49,18 @@ Get Started
|
||||
-----------
|
||||
|
||||
* `What is PlatformIO? <https://docs.platformio.org/page/what-is-platformio.html?utm_source=github&utm_medium=core>`_
|
||||
|
||||
Instruments
|
||||
-----------
|
||||
|
||||
* `PlatformIO IDE <https://platformio.org/platformio-ide?utm_source=github&utm_medium=core>`_
|
||||
* `PlatformIO Core (CLI) <https://docs.platformio.org/page/core.html?utm_source=github&utm_medium=core>`_
|
||||
* `Library Management <https://docs.platformio.org/page/librarymanager/index.html?utm_source=github&utm_medium=core>`_
|
||||
* `Project Examples <https://github.com/platformio/platformio-examples?utm_source=github&utm_medium=core>`__
|
||||
|
||||
Solutions
|
||||
---------
|
||||
|
||||
* `Library Management <https://docs.platformio.org/page/librarymanager/index.html?utm_source=github&utm_medium=core>`_
|
||||
* `Desktop IDEs Integration <https://docs.platformio.org/page/ide.html?utm_source=github&utm_medium=core>`_
|
||||
* `Continuous Integration <https://docs.platformio.org/page/ci/index.html?utm_source=github&utm_medium=core>`_
|
||||
* `Advanced Scripting API <https://docs.platformio.org/page/projectconf/advanced_scripting.html?utm_source=github&utm_medium=core>`_
|
||||
|
||||
Professional
|
||||
------------
|
||||
**Advanced**
|
||||
|
||||
* `Debugging <https://docs.platformio.org/page/plus/debugging.html?utm_source=github&utm_medium=core>`_
|
||||
* `Unit Testing <https://docs.platformio.org/page/plus/unit-testing.html?utm_source=github&utm_medium=core>`_
|
||||
|
||||
2
docs
2
docs
Submodule docs updated: 03a83c996f...9db46dccef
2
examples
2
examples
Submodule examples updated: 84855946ea...161ae7302b
@@ -14,7 +14,7 @@
|
||||
|
||||
import sys
|
||||
|
||||
VERSION = (5, 0, 0)
|
||||
VERSION = (5, 0, 4)
|
||||
__version__ = ".".join([str(s) for s in VERSION])
|
||||
|
||||
__title__ = "platformio"
|
||||
@@ -31,11 +31,11 @@ __description__ = (
|
||||
)
|
||||
__url__ = "https://platformio.org"
|
||||
|
||||
__author__ = "PlatformIO"
|
||||
__email__ = "contact@platformio.org"
|
||||
__author__ = "PlatformIO Labs"
|
||||
__email__ = "contact@piolabs.com"
|
||||
|
||||
__license__ = "Apache Software License"
|
||||
__copyright__ = "Copyright 2014-present PlatformIO"
|
||||
__copyright__ = "Copyright 2014-present PlatformIO Labs"
|
||||
|
||||
__accounts_api__ = "https://api.accounts.platformio.org"
|
||||
__registry_api__ = [
|
||||
@@ -47,18 +47,17 @@ __pioremote_endpoint__ = "ssl:host=remote.platformio.org:port=4413"
|
||||
__default_requests_timeout__ = (10, None) # (connect, read)
|
||||
|
||||
__core_packages__ = {
|
||||
"contrib-piohome": "~3.3.0",
|
||||
"contrib-piohome": "~3.3.1",
|
||||
"contrib-pysite": "~2.%d%d.0" % (sys.version_info.major, sys.version_info.minor),
|
||||
"tool-unity": "~1.20500.0",
|
||||
"tool-scons": "~2.20501.7" if sys.version_info.major == 2 else "~4.40001.0",
|
||||
"tool-cppcheck": "~1.210.0",
|
||||
"tool-cppcheck": "~1.230.0",
|
||||
"tool-clangtidy": "~1.100000.0",
|
||||
"tool-pvs-studio": "~7.9.0",
|
||||
"tool-pvs-studio": "~7.11.0",
|
||||
}
|
||||
|
||||
__check_internet_hosts__ = [
|
||||
"140.82.118.3", # Github.com
|
||||
"35.231.145.151", # Gitlab.com
|
||||
"185.199.110.153", # Github.com
|
||||
"88.198.170.159", # platformio.org
|
||||
"github.com",
|
||||
"platformio.org",
|
||||
|
||||
@@ -33,7 +33,7 @@ except: # pylint: disable=bare-except
|
||||
@click.command(
|
||||
cls=PlatformioCLI, context_settings=dict(help_option_names=["-h", "--help"])
|
||||
)
|
||||
@click.version_option(__version__, prog_name="PlatformIO")
|
||||
@click.version_option(__version__, prog_name="PlatformIO Core")
|
||||
@click.option("--force", "-f", is_flag=True, help="DEPRECATE")
|
||||
@click.option("--caller", "-c", help="Caller ID (service)")
|
||||
@click.option("--no-ansi", is_flag=True, help="Do not print ANSI control characters")
|
||||
|
||||
@@ -255,6 +255,8 @@ def get_cid():
|
||||
uid = None
|
||||
if os.getenv("C9_UID"):
|
||||
uid = os.getenv("C9_UID")
|
||||
elif os.getenv("GITPOD_GIT_USER_NAME"):
|
||||
uid = os.getenv("GITPOD_GIT_USER_NAME")
|
||||
elif os.getenv("CHE_API", os.getenv("CHE_API_ENDPOINT")):
|
||||
try:
|
||||
uid = json.loads(
|
||||
|
||||
@@ -78,6 +78,7 @@ DEFAULT_ENV_OPTIONS = dict(
|
||||
PROGNAME="program",
|
||||
PROG_PATH=join("$BUILD_DIR", "$PROGNAME$PROGSUFFIX"),
|
||||
PYTHONEXE=get_pythonexe_path(),
|
||||
IDE_EXTRA_DATA={},
|
||||
)
|
||||
|
||||
if not int(ARGUMENTS.get("PIOVERBOSE", 0)):
|
||||
|
||||
@@ -93,7 +93,9 @@ def _dump_defines(env):
|
||||
defines = []
|
||||
# global symbols
|
||||
for item in processDefines(env.get("CPPDEFINES", [])):
|
||||
defines.append(env.subst(item).replace("\\", ""))
|
||||
item = item.strip()
|
||||
if item:
|
||||
defines.append(env.subst(item).replace("\\", ""))
|
||||
|
||||
# special symbol for Atmel AVR MCU
|
||||
if env["PIOPLATFORM"] == "atmelavr":
|
||||
@@ -164,14 +166,17 @@ def DumpIDEData(env, globalenv):
|
||||
"cxx_path": where_is_program(env.subst("$CXX"), env.subst("${ENV['PATH']}")),
|
||||
"gdb_path": where_is_program(env.subst("$GDB"), env.subst("${ENV['PATH']}")),
|
||||
"prog_path": env.subst("$PROG_PATH"),
|
||||
"flash_extra_images": [
|
||||
{"offset": item[0], "path": env.subst(item[1])}
|
||||
for item in env.get("FLASH_EXTRA_IMAGES", [])
|
||||
],
|
||||
"svd_path": _get_svd_path(env),
|
||||
"compiler_type": env.GetCompilerType(),
|
||||
"targets": globalenv.DumpTargets(),
|
||||
"extra": dict(
|
||||
flash_images=[
|
||||
{"offset": item[0], "path": env.subst(item[1])}
|
||||
for item in env.get("FLASH_EXTRA_IMAGES", [])
|
||||
]
|
||||
),
|
||||
}
|
||||
data["extra"].update(env.get("IDE_EXTRA_DATA", {}))
|
||||
|
||||
env_ = env.Clone()
|
||||
# https://github.com/platformio/platformio-atom-ide/issues/34
|
||||
|
||||
@@ -27,7 +27,7 @@ from SCons.Script import Export # pylint: disable=import-error
|
||||
from SCons.Script import SConscript # pylint: disable=import-error
|
||||
|
||||
from platformio import __version__, fs
|
||||
from platformio.compat import string_types
|
||||
from platformio.compat import MACOS, string_types
|
||||
from platformio.package.version import pepver_to_semver
|
||||
|
||||
SRC_HEADER_EXT = ["h", "hpp"]
|
||||
@@ -69,7 +69,7 @@ def BuildProgram(env):
|
||||
if (
|
||||
env.get("LIBS")
|
||||
and env.GetCompilerType() == "gcc"
|
||||
and env.PioPlatform().is_embedded()
|
||||
and (env.PioPlatform().is_embedded() or not MACOS)
|
||||
):
|
||||
env.Prepend(_LIBFLAGS="-Wl,--start-group ")
|
||||
env.Append(_LIBFLAGS=" -Wl,--end-group")
|
||||
|
||||
@@ -80,7 +80,9 @@ class AccountClient(HTTPClient): # pylint:disable=too-many-public-methods
|
||||
)
|
||||
|
||||
data = self.fetch_json_data(
|
||||
"post", "/v1/login", data={"username": username, "password": password},
|
||||
"post",
|
||||
"/v1/login",
|
||||
data={"username": username, "password": password},
|
||||
)
|
||||
app.set_state_item("account", data)
|
||||
return data
|
||||
@@ -108,7 +110,9 @@ class AccountClient(HTTPClient): # pylint:disable=too-many-public-methods
|
||||
self.delete_local_session()
|
||||
try:
|
||||
self.fetch_json_data(
|
||||
"post", "/v1/logout", data={"refresh_token": refresh_token},
|
||||
"post",
|
||||
"/v1/logout",
|
||||
data={"refresh_token": refresh_token},
|
||||
)
|
||||
except AccountError:
|
||||
pass
|
||||
@@ -153,15 +157,26 @@ class AccountClient(HTTPClient): # pylint:disable=too-many-public-methods
|
||||
).get("auth_token")
|
||||
|
||||
def forgot_password(self, username):
|
||||
return self.fetch_json_data("post", "/v1/forgot", data={"username": username},)
|
||||
return self.fetch_json_data(
|
||||
"post",
|
||||
"/v1/forgot",
|
||||
data={"username": username},
|
||||
)
|
||||
|
||||
def get_profile(self):
|
||||
return self.send_auth_request("get", "/v1/profile",)
|
||||
return self.send_auth_request(
|
||||
"get",
|
||||
"/v1/profile",
|
||||
)
|
||||
|
||||
def update_profile(self, profile, current_password):
|
||||
profile["current_password"] = current_password
|
||||
self.delete_local_state("summary")
|
||||
response = self.send_auth_request("put", "/v1/profile", data=profile,)
|
||||
response = self.send_auth_request(
|
||||
"put",
|
||||
"/v1/profile",
|
||||
data=profile,
|
||||
)
|
||||
return response
|
||||
|
||||
def get_account_info(self, offline=False):
|
||||
@@ -178,7 +193,10 @@ class AccountClient(HTTPClient): # pylint:disable=too-many-public-methods
|
||||
"username": account.get("username"),
|
||||
}
|
||||
}
|
||||
result = self.send_auth_request("get", "/v1/summary",)
|
||||
result = self.send_auth_request(
|
||||
"get",
|
||||
"/v1/summary",
|
||||
)
|
||||
account["summary"] = dict(
|
||||
profile=result.get("profile"),
|
||||
packages=result.get("packages"),
|
||||
@@ -203,7 +221,10 @@ class AccountClient(HTTPClient): # pylint:disable=too-many-public-methods
|
||||
return self.send_auth_request("get", "/v1/orgs/%s" % orgname)
|
||||
|
||||
def list_orgs(self):
|
||||
return self.send_auth_request("get", "/v1/orgs",)
|
||||
return self.send_auth_request(
|
||||
"get",
|
||||
"/v1/orgs",
|
||||
)
|
||||
|
||||
def update_org(self, orgname, data):
|
||||
return self.send_auth_request(
|
||||
@@ -211,19 +232,29 @@ class AccountClient(HTTPClient): # pylint:disable=too-many-public-methods
|
||||
)
|
||||
|
||||
def destroy_org(self, orgname):
|
||||
return self.send_auth_request("delete", "/v1/orgs/%s" % orgname,)
|
||||
return self.send_auth_request(
|
||||
"delete",
|
||||
"/v1/orgs/%s" % orgname,
|
||||
)
|
||||
|
||||
def add_org_owner(self, orgname, username):
|
||||
return self.send_auth_request(
|
||||
"post", "/v1/orgs/%s/owners" % orgname, data={"username": username},
|
||||
"post",
|
||||
"/v1/orgs/%s/owners" % orgname,
|
||||
data={"username": username},
|
||||
)
|
||||
|
||||
def list_org_owners(self, orgname):
|
||||
return self.send_auth_request("get", "/v1/orgs/%s/owners" % orgname,)
|
||||
return self.send_auth_request(
|
||||
"get",
|
||||
"/v1/orgs/%s/owners" % orgname,
|
||||
)
|
||||
|
||||
def remove_org_owner(self, orgname, username):
|
||||
return self.send_auth_request(
|
||||
"delete", "/v1/orgs/%s/owners" % orgname, data={"username": username},
|
||||
"delete",
|
||||
"/v1/orgs/%s/owners" % orgname,
|
||||
data={"username": username},
|
||||
)
|
||||
|
||||
def create_team(self, orgname, teamname, description):
|
||||
@@ -235,16 +266,21 @@ class AccountClient(HTTPClient): # pylint:disable=too-many-public-methods
|
||||
|
||||
def destroy_team(self, orgname, teamname):
|
||||
return self.send_auth_request(
|
||||
"delete", "/v1/orgs/%s/teams/%s" % (orgname, teamname),
|
||||
"delete",
|
||||
"/v1/orgs/%s/teams/%s" % (orgname, teamname),
|
||||
)
|
||||
|
||||
def get_team(self, orgname, teamname):
|
||||
return self.send_auth_request(
|
||||
"get", "/v1/orgs/%s/teams/%s" % (orgname, teamname),
|
||||
"get",
|
||||
"/v1/orgs/%s/teams/%s" % (orgname, teamname),
|
||||
)
|
||||
|
||||
def list_teams(self, orgname):
|
||||
return self.send_auth_request("get", "/v1/orgs/%s/teams" % orgname,)
|
||||
return self.send_auth_request(
|
||||
"get",
|
||||
"/v1/orgs/%s/teams" % orgname,
|
||||
)
|
||||
|
||||
def update_team(self, orgname, teamname, data):
|
||||
return self.send_auth_request(
|
||||
|
||||
@@ -133,9 +133,7 @@ class HTTPClient(object):
|
||||
def fetch_json_data(self, method, path, **kwargs):
|
||||
cache_valid = kwargs.pop("cache_valid") if "cache_valid" in kwargs else None
|
||||
if not cache_valid:
|
||||
return self.raise_error_from_response(
|
||||
self.send_request(method, path, **kwargs)
|
||||
)
|
||||
return self._parse_json_response(self.send_request(method, path, **kwargs))
|
||||
cache_key = ContentCache.key_from_args(
|
||||
method, path, kwargs.get("params"), kwargs.get("data")
|
||||
)
|
||||
@@ -144,11 +142,12 @@ class HTTPClient(object):
|
||||
if result is not None:
|
||||
return json.loads(result)
|
||||
response = self.send_request(method, path, **kwargs)
|
||||
data = self._parse_json_response(response)
|
||||
cc.set(cache_key, response.text, cache_valid)
|
||||
return self.raise_error_from_response(response)
|
||||
return data
|
||||
|
||||
@staticmethod
|
||||
def raise_error_from_response(response, expected_codes=(200, 201, 202)):
|
||||
def _parse_json_response(response, expected_codes=(200, 201, 202)):
|
||||
if response.status_code in expected_codes:
|
||||
try:
|
||||
return response.json()
|
||||
|
||||
@@ -70,12 +70,16 @@ class RegistryClient(HTTPClient):
|
||||
if version:
|
||||
path += "/" + version
|
||||
return self.send_auth_request(
|
||||
"delete", path, params={"undo": 1 if undo else 0},
|
||||
"delete",
|
||||
path,
|
||||
params={"undo": 1 if undo else 0},
|
||||
)
|
||||
|
||||
def update_resource(self, urn, private):
|
||||
return self.send_auth_request(
|
||||
"put", "/v3/resources/%s" % urn, data={"private": int(private)},
|
||||
"put",
|
||||
"/v3/resources/%s" % urn,
|
||||
data={"private": int(private)},
|
||||
)
|
||||
|
||||
def grant_access_for_resource(self, urn, client, level):
|
||||
@@ -87,7 +91,9 @@ class RegistryClient(HTTPClient):
|
||||
|
||||
def revoke_access_from_resource(self, urn, client):
|
||||
return self.send_auth_request(
|
||||
"delete", "/v3/resources/%s/access" % urn, data={"client": client},
|
||||
"delete",
|
||||
"/v3/resources/%s/access" % urn,
|
||||
data={"client": client},
|
||||
)
|
||||
|
||||
def list_resources(self, owner):
|
||||
@@ -136,6 +142,6 @@ class RegistryClient(HTTPClient):
|
||||
cache_valid="1h",
|
||||
)
|
||||
except HTTPClientError as e:
|
||||
if e.response.status_code == 404:
|
||||
if e.response is not None and e.response.status_code == 404:
|
||||
return None
|
||||
raise e
|
||||
|
||||
@@ -47,27 +47,31 @@ def validate_urn(value):
|
||||
|
||||
@cli.command("public", short_help="Make resource public")
|
||||
@click.argument(
|
||||
"urn", callback=lambda _, __, value: validate_urn(value),
|
||||
"urn",
|
||||
callback=lambda _, __, value: validate_urn(value),
|
||||
)
|
||||
@click.option("--urn-type", type=click.Choice(["prn:reg:pkg"]), default="prn:reg:pkg")
|
||||
def access_public(urn, urn_type):
|
||||
client = RegistryClient()
|
||||
client.update_resource(urn=urn, private=0)
|
||||
return click.secho(
|
||||
"The resource %s has been successfully updated." % urn, fg="green",
|
||||
"The resource %s has been successfully updated." % urn,
|
||||
fg="green",
|
||||
)
|
||||
|
||||
|
||||
@cli.command("private", short_help="Make resource private")
|
||||
@click.argument(
|
||||
"urn", callback=lambda _, __, value: validate_urn(value),
|
||||
"urn",
|
||||
callback=lambda _, __, value: validate_urn(value),
|
||||
)
|
||||
@click.option("--urn-type", type=click.Choice(["prn:reg:pkg"]), default="prn:reg:pkg")
|
||||
def access_private(urn, urn_type):
|
||||
client = RegistryClient()
|
||||
client.update_resource(urn=urn, private=1)
|
||||
return click.secho(
|
||||
"The resource %s has been successfully updated." % urn, fg="green",
|
||||
"The resource %s has been successfully updated." % urn,
|
||||
fg="green",
|
||||
)
|
||||
|
||||
|
||||
@@ -79,14 +83,16 @@ def access_private(urn, urn_type):
|
||||
callback=lambda _, __, value: validate_client(value),
|
||||
)
|
||||
@click.argument(
|
||||
"urn", callback=lambda _, __, value: validate_urn(value),
|
||||
"urn",
|
||||
callback=lambda _, __, value: validate_urn(value),
|
||||
)
|
||||
@click.option("--urn-type", type=click.Choice(["prn:reg:pkg"]), default="prn:reg:pkg")
|
||||
def access_grant(level, client, urn, urn_type):
|
||||
reg_client = RegistryClient()
|
||||
reg_client.grant_access_for_resource(urn=urn, client=client, level=level)
|
||||
return click.secho(
|
||||
"Access for resource %s has been granted for %s" % (urn, client), fg="green",
|
||||
"Access for resource %s has been granted for %s" % (urn, client),
|
||||
fg="green",
|
||||
)
|
||||
|
||||
|
||||
@@ -97,14 +103,16 @@ def access_grant(level, client, urn, urn_type):
|
||||
callback=lambda _, __, value: validate_client(value),
|
||||
)
|
||||
@click.argument(
|
||||
"urn", callback=lambda _, __, value: validate_urn(value),
|
||||
"urn",
|
||||
callback=lambda _, __, value: validate_urn(value),
|
||||
)
|
||||
@click.option("--urn-type", type=click.Choice(["prn:reg:pkg"]), default="prn:reg:pkg")
|
||||
def access_revoke(client, urn, urn_type):
|
||||
reg_client = RegistryClient()
|
||||
reg_client.revoke_access_from_resource(urn=urn, client=client)
|
||||
return click.secho(
|
||||
"Access for resource %s has been revoked for %s" % (urn, client), fg="green",
|
||||
"Access for resource %s has been revoked for %s" % (urn, client),
|
||||
fg="green",
|
||||
)
|
||||
|
||||
|
||||
|
||||
@@ -192,7 +192,10 @@ def account_destroy():
|
||||
client.logout()
|
||||
except AccountNotAuthorized:
|
||||
pass
|
||||
return click.secho("User account has been destroyed.", fg="green",)
|
||||
return click.secho(
|
||||
"User account has been destroyed.",
|
||||
fg="green",
|
||||
)
|
||||
|
||||
|
||||
@cli.command("show", short_help="PlatformIO Account information")
|
||||
|
||||
@@ -167,6 +167,29 @@ class CheckToolBase(object): # pylint: disable=too-many-instance-attributes
|
||||
if os.path.isfile(f):
|
||||
os.remove(f)
|
||||
|
||||
@staticmethod
|
||||
def is_check_successful(cmd_result):
|
||||
return cmd_result["returncode"] == 0
|
||||
|
||||
def execute_check_cmd(self, cmd):
|
||||
result = proc.exec_command(
|
||||
cmd,
|
||||
stdout=proc.LineBufferedAsyncPipe(self.on_tool_output),
|
||||
stderr=proc.LineBufferedAsyncPipe(self.on_tool_output),
|
||||
)
|
||||
|
||||
if not self.is_check_successful(result):
|
||||
click.echo(
|
||||
"\nError: Failed to execute check command! Exited with code %d."
|
||||
% result["returncode"]
|
||||
)
|
||||
if self.options.get("verbose"):
|
||||
click.echo(result["out"])
|
||||
click.echo(result["err"])
|
||||
self._bad_input = True
|
||||
|
||||
return result
|
||||
|
||||
@staticmethod
|
||||
def get_project_target_files(patterns):
|
||||
c_extension = (".c",)
|
||||
@@ -200,11 +223,7 @@ class CheckToolBase(object): # pylint: disable=too-many-instance-attributes
|
||||
if self.options.get("verbose"):
|
||||
click.echo(" ".join(cmd))
|
||||
|
||||
proc.exec_command(
|
||||
cmd,
|
||||
stdout=proc.LineBufferedAsyncPipe(self.on_tool_output),
|
||||
stderr=proc.LineBufferedAsyncPipe(self.on_tool_output),
|
||||
)
|
||||
self.execute_check_cmd(cmd)
|
||||
|
||||
else:
|
||||
if self.options.get("verbose"):
|
||||
|
||||
@@ -49,6 +49,12 @@ class ClangtidyCheckTool(CheckToolBase):
|
||||
|
||||
return DefectItem(severity, category, message, file_, line, column, defect_id)
|
||||
|
||||
@staticmethod
|
||||
def is_check_successful(cmd_result):
|
||||
# Note: Clang-Tidy returns 1 for not critical compilation errors,
|
||||
# so 0 and 1 are only acceptable values
|
||||
return cmd_result["returncode"] < 2
|
||||
|
||||
def configure_command(self):
|
||||
tool_path = join(get_core_package_dir("tool-clangtidy"), "clang-tidy")
|
||||
|
||||
|
||||
@@ -109,7 +109,7 @@ class CppcheckCheckTool(CheckToolBase):
|
||||
cmd = [
|
||||
tool_path,
|
||||
"--addon-python=%s" % proc.get_pythonexe_path(),
|
||||
"--error-exitcode=1",
|
||||
"--error-exitcode=3",
|
||||
"--verbose" if self.options.get("verbose") else "--quiet",
|
||||
]
|
||||
|
||||
@@ -220,6 +220,11 @@ class CppcheckCheckTool(CheckToolBase):
|
||||
if os.path.isfile(dump_file):
|
||||
os.remove(dump_file)
|
||||
|
||||
@staticmethod
|
||||
def is_check_successful(cmd_result):
|
||||
# Cppcheck is configured to return '3' if a defect is found
|
||||
return cmd_result["returncode"] in (0, 3)
|
||||
|
||||
def check(self, on_defect_callback=None):
|
||||
self._on_defect_callback = on_defect_callback
|
||||
project_files = self.get_project_target_files(self.options["patterns"])
|
||||
@@ -238,11 +243,7 @@ class CppcheckCheckTool(CheckToolBase):
|
||||
if self.options.get("verbose"):
|
||||
click.echo(" ".join(cmd))
|
||||
|
||||
proc.exec_command(
|
||||
cmd,
|
||||
stdout=proc.LineBufferedAsyncPipe(self.on_tool_output),
|
||||
stderr=proc.LineBufferedAsyncPipe(self.on_tool_output),
|
||||
)
|
||||
self.execute_check_cmd(cmd)
|
||||
|
||||
self.clean_up()
|
||||
|
||||
|
||||
@@ -52,6 +52,11 @@ class PvsStudioCheckTool(CheckToolBase): # pylint: disable=too-many-instance-at
|
||||
)
|
||||
)
|
||||
|
||||
def tool_output_filter(self, line):
|
||||
if "license was not entered" in line.lower():
|
||||
self._bad_input = True
|
||||
return line
|
||||
|
||||
def _process_defects(self, defects):
|
||||
for defect in defects:
|
||||
if not isinstance(defect, DefectItem):
|
||||
@@ -203,6 +208,12 @@ class PvsStudioCheckTool(CheckToolBase): # pylint: disable=too-many-instance-at
|
||||
if os.path.isdir(self._tmp_dir):
|
||||
shutil.rmtree(self._tmp_dir)
|
||||
|
||||
@staticmethod
|
||||
def is_check_successful(cmd_result):
|
||||
return (
|
||||
"license" not in cmd_result["err"].lower() and cmd_result["returncode"] == 0
|
||||
)
|
||||
|
||||
def check(self, on_defect_callback=None):
|
||||
self._on_defect_callback = on_defect_callback
|
||||
for scope, files in self.get_project_target_files(
|
||||
@@ -219,11 +230,8 @@ class PvsStudioCheckTool(CheckToolBase): # pylint: disable=too-many-instance-at
|
||||
self._bad_input = True
|
||||
continue
|
||||
|
||||
result = proc.exec_command(cmd)
|
||||
# pylint: disable=unsupported-membership-test
|
||||
if result["returncode"] != 0 or "license" in result["err"].lower():
|
||||
self._bad_input = True
|
||||
click.echo(result["err"])
|
||||
result = self.execute_check_cmd(cmd)
|
||||
if result["returncode"] != 0:
|
||||
continue
|
||||
|
||||
self._process_defects(self.parse_defects(self._tmp_output_file))
|
||||
|
||||
@@ -24,7 +24,10 @@ import click
|
||||
from platformio import app, exception, fs, proc
|
||||
from platformio.commands.debug import helpers
|
||||
from platformio.commands.debug.exception import DebugInvalidOptionsError
|
||||
from platformio.commands.platform import platform_install as cmd_platform_install
|
||||
from platformio.package.manager.core import inject_contrib_pysite
|
||||
from platformio.platform.exception import UnknownPlatform
|
||||
from platformio.platform.factory import PlatformFactory
|
||||
from platformio.project.config import ProjectConfig
|
||||
from platformio.project.exception import ProjectEnvsNotAvailableError
|
||||
from platformio.project.helpers import is_platformio_project, load_project_ide_data
|
||||
@@ -73,18 +76,29 @@ def cli(ctx, project_dir, project_conf, environment, verbose, interface, __unpro
|
||||
env_options = config.items(env=env_name, as_dict=True)
|
||||
if not set(env_options.keys()) >= set(["platform", "board"]):
|
||||
raise ProjectEnvsNotAvailableError()
|
||||
debug_options = helpers.validate_debug_options(ctx, env_options)
|
||||
|
||||
try:
|
||||
platform = PlatformFactory.new(env_options["platform"])
|
||||
except UnknownPlatform:
|
||||
ctx.invoke(
|
||||
cmd_platform_install,
|
||||
platforms=[env_options["platform"]],
|
||||
skip_default_package=True,
|
||||
)
|
||||
platform = PlatformFactory.new(env_options["platform"])
|
||||
|
||||
debug_options = helpers.configure_initial_debug_options(platform, env_options)
|
||||
assert debug_options
|
||||
|
||||
if not interface:
|
||||
return helpers.predebug_project(ctx, project_dir, env_name, False, verbose)
|
||||
|
||||
configuration = load_project_ide_data(project_dir, env_name)
|
||||
if not configuration:
|
||||
raise DebugInvalidOptionsError("Could not load debug configuration")
|
||||
ide_data = load_project_ide_data(project_dir, env_name)
|
||||
if not ide_data:
|
||||
raise DebugInvalidOptionsError("Could not load a build configuration")
|
||||
|
||||
if "--version" in __unprocessed:
|
||||
result = proc.exec_command([configuration["gdb_path"], "--version"])
|
||||
result = proc.exec_command([ide_data["gdb_path"], "--version"])
|
||||
if result["returncode"] == 0:
|
||||
return click.echo(result["out"])
|
||||
raise exception.PlatformioException("\n".join([result["out"], result["err"]]))
|
||||
@@ -99,23 +113,25 @@ def cli(ctx, project_dir, project_conf, environment, verbose, interface, __unpro
|
||||
nl=False,
|
||||
)
|
||||
|
||||
debug_options["load_cmds"] = helpers.configure_esp32_load_cmds(
|
||||
debug_options, configuration
|
||||
)
|
||||
try:
|
||||
debug_options = platform.configure_debug_options(debug_options, ide_data)
|
||||
except NotImplementedError:
|
||||
# legacy for ESP32 dev-platform <=2.0.0
|
||||
debug_options["load_cmds"] = helpers.configure_esp32_load_cmds(
|
||||
debug_options, ide_data
|
||||
)
|
||||
|
||||
rebuild_prog = False
|
||||
preload = debug_options["load_cmds"] == ["preload"]
|
||||
load_mode = debug_options["load_mode"]
|
||||
if load_mode == "always":
|
||||
rebuild_prog = preload or not helpers.has_debug_symbols(
|
||||
configuration["prog_path"]
|
||||
)
|
||||
rebuild_prog = preload or not helpers.has_debug_symbols(ide_data["prog_path"])
|
||||
elif load_mode == "modified":
|
||||
rebuild_prog = helpers.is_prog_obsolete(
|
||||
configuration["prog_path"]
|
||||
) or not helpers.has_debug_symbols(configuration["prog_path"])
|
||||
ide_data["prog_path"]
|
||||
) or not helpers.has_debug_symbols(ide_data["prog_path"])
|
||||
else:
|
||||
rebuild_prog = not isfile(configuration["prog_path"])
|
||||
rebuild_prog = not isfile(ide_data["prog_path"])
|
||||
|
||||
if preload or (not rebuild_prog and load_mode != "always"):
|
||||
# don't load firmware through debug server
|
||||
@@ -139,9 +155,9 @@ def cli(ctx, project_dir, project_conf, environment, verbose, interface, __unpro
|
||||
|
||||
# save SHA sum of newly created prog
|
||||
if load_mode == "modified":
|
||||
helpers.is_prog_obsolete(configuration["prog_path"])
|
||||
helpers.is_prog_obsolete(ide_data["prog_path"])
|
||||
|
||||
if not isfile(configuration["prog_path"]):
|
||||
if not isfile(ide_data["prog_path"]):
|
||||
raise DebugInvalidOptionsError("Program/firmware is missed")
|
||||
|
||||
# run debugging client
|
||||
@@ -151,7 +167,7 @@ def cli(ctx, project_dir, project_conf, environment, verbose, interface, __unpro
|
||||
from platformio.commands.debug.process.client import GDBClient, reactor
|
||||
|
||||
client = GDBClient(project_dir, __unprocessed, debug_options, env_options)
|
||||
client.spawn(configuration["gdb_path"], configuration["prog_path"])
|
||||
client.spawn(ide_data["gdb_path"], ide_data["prog_path"])
|
||||
|
||||
signal.signal(signal.SIGINT, lambda *args, **kwargs: None)
|
||||
reactor.run()
|
||||
|
||||
@@ -23,11 +23,8 @@ from os.path import isfile
|
||||
from platformio import fs, util
|
||||
from platformio.commands import PlatformioCLI
|
||||
from platformio.commands.debug.exception import DebugInvalidOptionsError
|
||||
from platformio.commands.platform import platform_install as cmd_platform_install
|
||||
from platformio.commands.run.command import cli as cmd_run
|
||||
from platformio.compat import is_bytes
|
||||
from platformio.platform.exception import UnknownPlatform
|
||||
from platformio.platform.factory import PlatformFactory
|
||||
from platformio.project.config import ProjectConfig
|
||||
from platformio.project.options import ProjectOptions
|
||||
|
||||
@@ -89,21 +86,11 @@ def predebug_project(ctx, project_dir, env_name, preload, verbose):
|
||||
time.sleep(5)
|
||||
|
||||
|
||||
def validate_debug_options(cmd_ctx, env_options):
|
||||
def configure_initial_debug_options(platform, env_options):
|
||||
def _cleanup_cmds(items):
|
||||
items = ProjectConfig.parse_multi_values(items)
|
||||
return ["$LOAD_CMDS" if item == "$LOAD_CMD" else item for item in items]
|
||||
|
||||
try:
|
||||
platform = PlatformFactory.new(env_options["platform"])
|
||||
except UnknownPlatform:
|
||||
cmd_ctx.invoke(
|
||||
cmd_platform_install,
|
||||
platforms=[env_options["platform"]],
|
||||
skip_default_package=True,
|
||||
)
|
||||
platform = PlatformFactory.new(env_options["platform"])
|
||||
|
||||
board_config = platform.board_config(env_options["board"])
|
||||
tool_name = board_config.get_debug_tool_name(env_options.get("debug_tool"))
|
||||
tool_settings = board_config.get("debug", {}).get("tools", {}).get(tool_name, {})
|
||||
@@ -195,13 +182,16 @@ def validate_debug_options(cmd_ctx, env_options):
|
||||
|
||||
|
||||
def configure_esp32_load_cmds(debug_options, configuration):
|
||||
"""
|
||||
DEPRECATED: Moved to ESP32 dev-platform
|
||||
See platform.py::configure_debug_options
|
||||
"""
|
||||
flash_images = configuration.get("extra", {}).get("flash_images")
|
||||
ignore_conds = [
|
||||
debug_options["load_cmds"] != ["load"],
|
||||
"xtensa-esp32" not in configuration.get("cc_path", ""),
|
||||
not configuration.get("flash_extra_images"),
|
||||
not all(
|
||||
[isfile(item["path"]) for item in configuration.get("flash_extra_images")]
|
||||
),
|
||||
not flash_images,
|
||||
not all([isfile(item["path"]) for item in flash_images]),
|
||||
]
|
||||
if any(ignore_conds):
|
||||
return debug_options["load_cmds"]
|
||||
@@ -210,7 +200,7 @@ def configure_esp32_load_cmds(debug_options, configuration):
|
||||
'monitor program_esp32 "{{{path}}}" {offset} verify'.format(
|
||||
path=fs.to_unix_path(item["path"]), offset=item["offset"]
|
||||
)
|
||||
for item in configuration.get("flash_extra_images")
|
||||
for item in flash_images
|
||||
]
|
||||
mon_cmds.append(
|
||||
'monitor program_esp32 "{%s.bin}" 0x10000 verify'
|
||||
|
||||
@@ -203,7 +203,9 @@ def device_monitor(**kwargs): # pylint: disable=too-many-branches
|
||||
kwargs["port"] = ports[0]["port"]
|
||||
elif "platform" in project_options and "board" in project_options:
|
||||
board_hwids = device_helpers.get_board_hwids(
|
||||
kwargs["project_dir"], platform, project_options["board"],
|
||||
kwargs["project_dir"],
|
||||
platform,
|
||||
project_options["board"],
|
||||
)
|
||||
for item in ports:
|
||||
for hwid in board_hwids:
|
||||
|
||||
@@ -22,13 +22,16 @@ class Timestamp(DeviceMonitorFilter):
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
super(Timestamp, self).__init__(*args, **kwargs)
|
||||
self._first_text_received = False
|
||||
self._line_started = False
|
||||
|
||||
def rx(self, text):
|
||||
if self._first_text_received and "\n" not in text:
|
||||
if self._line_started and "\n" not in text:
|
||||
return text
|
||||
timestamp = datetime.now().strftime("%H:%M:%S.%f")[:-3]
|
||||
if not self._first_text_received:
|
||||
self._first_text_received = True
|
||||
return "%s > %s" % (timestamp, text)
|
||||
if not self._line_started:
|
||||
self._line_started = True
|
||||
text = "%s > %s" % (timestamp, text)
|
||||
if text.endswith("\n"):
|
||||
self._line_started = False
|
||||
return text[:-1].replace("\n", "\n%s > " % timestamp) + "\n"
|
||||
return text.replace("\n", "\n%s > " % timestamp)
|
||||
|
||||
@@ -27,7 +27,13 @@ from twisted.internet import utils # pylint: disable=import-error
|
||||
|
||||
from platformio import __main__, __version__, fs
|
||||
from platformio.commands.home import helpers
|
||||
from platformio.compat import PY2, get_filesystem_encoding, is_bytes, string_types
|
||||
from platformio.compat import (
|
||||
PY2,
|
||||
get_filesystem_encoding,
|
||||
get_locale_encoding,
|
||||
is_bytes,
|
||||
string_types,
|
||||
)
|
||||
|
||||
try:
|
||||
from thread import get_ident as thread_get_ident
|
||||
@@ -95,10 +101,11 @@ class PIOCoreRPC(object):
|
||||
else:
|
||||
args[i] = str(arg)
|
||||
|
||||
options = options or {}
|
||||
to_json = "--json-output" in args
|
||||
|
||||
try:
|
||||
if args and args[0] == "remote":
|
||||
if options.get("force_subprocess"):
|
||||
result = yield PIOCoreRPC._call_subprocess(args, options)
|
||||
defer.returnValue(PIOCoreRPC._process_result(result, to_json))
|
||||
else:
|
||||
@@ -117,7 +124,7 @@ class PIOCoreRPC(object):
|
||||
@staticmethod
|
||||
def _call_inline(args, options):
|
||||
PIOCoreRPC.setup_multithreading_std_streams()
|
||||
cwd = (options or {}).get("cwd") or os.getcwd()
|
||||
cwd = options.get("cwd") or os.getcwd()
|
||||
|
||||
def _thread_task():
|
||||
with fs.cd(cwd):
|
||||
@@ -143,13 +150,15 @@ class PIOCoreRPC(object):
|
||||
@staticmethod
|
||||
def _process_result(result, to_json=False):
|
||||
out, err, code = result
|
||||
if out and is_bytes(out):
|
||||
out = out.decode(get_locale_encoding())
|
||||
if err and is_bytes(err):
|
||||
err = err.decode(get_locale_encoding())
|
||||
text = ("%s\n\n%s" % (out, err)).strip()
|
||||
if code != 0:
|
||||
raise Exception(text)
|
||||
if not to_json:
|
||||
return text
|
||||
if is_bytes(out):
|
||||
out = out.decode()
|
||||
try:
|
||||
return json.loads(out)
|
||||
except ValueError as e:
|
||||
|
||||
@@ -198,7 +198,9 @@ class ProjectRPC(object):
|
||||
and state["storage"]["coreCaller"] in ProjectGenerator.get_supported_ides()
|
||||
):
|
||||
args.extend(["--ide", state["storage"]["coreCaller"]])
|
||||
d = PIOCoreRPC.call(args, options={"cwd": project_dir})
|
||||
d = PIOCoreRPC.call(
|
||||
args, options={"cwd": project_dir, "force_subprocess": True}
|
||||
)
|
||||
d.addCallback(self._generate_project_main, project_dir, framework)
|
||||
return d
|
||||
|
||||
@@ -291,7 +293,9 @@ class ProjectRPC(object):
|
||||
and state["storage"]["coreCaller"] in ProjectGenerator.get_supported_ides()
|
||||
):
|
||||
args.extend(["--ide", state["storage"]["coreCaller"]])
|
||||
d = PIOCoreRPC.call(args, options={"cwd": project_dir})
|
||||
d = PIOCoreRPC.call(
|
||||
args, options={"cwd": project_dir, "force_subprocess": True}
|
||||
)
|
||||
d.addCallback(self._finalize_arduino_import, project_dir, arduino_project_dir)
|
||||
return d
|
||||
|
||||
@@ -324,6 +328,8 @@ class ProjectRPC(object):
|
||||
and state["storage"]["coreCaller"] in ProjectGenerator.get_supported_ides()
|
||||
):
|
||||
args.extend(["--ide", state["storage"]["coreCaller"]])
|
||||
d = PIOCoreRPC.call(args, options={"cwd": new_project_dir})
|
||||
d = PIOCoreRPC.call(
|
||||
args, options={"cwd": new_project_dir, "force_subprocess": True}
|
||||
)
|
||||
d.addCallback(lambda _: new_project_dir)
|
||||
return d
|
||||
|
||||
@@ -22,11 +22,7 @@ from tabulate import tabulate
|
||||
|
||||
from platformio import exception, fs, util
|
||||
from platformio.commands import PlatformioCLI
|
||||
from platformio.commands.lib.helpers import (
|
||||
get_builtin_libs,
|
||||
is_builtin_lib,
|
||||
save_project_libdeps,
|
||||
)
|
||||
from platformio.commands.lib.helpers import get_builtin_libs, save_project_libdeps
|
||||
from platformio.compat import dump_json_to_unicode
|
||||
from platformio.package.exception import NotGlobalLibDir, UnknownPackageError
|
||||
from platformio.package.manager.library import LibraryPackageManager
|
||||
@@ -164,15 +160,8 @@ def lib_install( # pylint: disable=too-many-arguments,unused-argument
|
||||
}
|
||||
|
||||
elif storage_dir in storage_libdeps:
|
||||
builtin_lib_storages = None
|
||||
for library in storage_libdeps[storage_dir]:
|
||||
try:
|
||||
lm.install(library, silent=silent, force=force)
|
||||
except UnknownPackageError as e:
|
||||
if builtin_lib_storages is None:
|
||||
builtin_lib_storages = get_builtin_libs()
|
||||
if not silent or not is_builtin_lib(builtin_lib_storages, library):
|
||||
click.secho("Warning! %s" % e, fg="yellow")
|
||||
lm.install(library, silent=silent, force=force)
|
||||
|
||||
if save and installed_pkgs:
|
||||
_save_deps(ctx, installed_pkgs)
|
||||
|
||||
@@ -45,10 +45,11 @@ def get_builtin_libs(storage_names=None):
|
||||
return items
|
||||
|
||||
|
||||
def is_builtin_lib(storages, name):
|
||||
for storage in storages or []:
|
||||
if any(lib.get("name") == name for lib in storage["items"]):
|
||||
return True
|
||||
def is_builtin_lib(name, storages=None):
|
||||
for storage in storages or get_builtin_libs():
|
||||
for lib in storage["items"]:
|
||||
if lib.get("name") == name:
|
||||
return True
|
||||
return False
|
||||
|
||||
|
||||
@@ -80,15 +81,22 @@ def save_project_libdeps(project_dir, specs, environments=None, action="add"):
|
||||
if environments and env not in environments:
|
||||
continue
|
||||
config.expand_interpolations = False
|
||||
lib_deps = []
|
||||
candidates = []
|
||||
try:
|
||||
lib_deps = ignore_deps_by_specs(config.get("env:" + env, "lib_deps"), specs)
|
||||
candidates = ignore_deps_by_specs(
|
||||
config.get("env:" + env, "lib_deps"), specs
|
||||
)
|
||||
except InvalidProjectConfError:
|
||||
pass
|
||||
if action == "add":
|
||||
lib_deps.extend(spec.as_dependency() for spec in specs)
|
||||
if lib_deps:
|
||||
config.set("env:" + env, "lib_deps", lib_deps)
|
||||
candidates.extend(spec.as_dependency() for spec in specs)
|
||||
if candidates:
|
||||
result = []
|
||||
for item in candidates:
|
||||
item = item.strip()
|
||||
if item and item not in result:
|
||||
result.append(item)
|
||||
config.set("env:" + env, "lib_deps", result)
|
||||
elif config.has_option("env:" + env, "lib_deps"):
|
||||
config.remove_option("env:" + env, "lib_deps")
|
||||
config.save()
|
||||
|
||||
@@ -34,17 +34,21 @@ def validate_orgname(value):
|
||||
|
||||
@cli.command("create", short_help="Create a new organization")
|
||||
@click.argument(
|
||||
"orgname", callback=lambda _, __, value: validate_orgname(value),
|
||||
"orgname",
|
||||
callback=lambda _, __, value: validate_orgname(value),
|
||||
)
|
||||
@click.option(
|
||||
"--email", callback=lambda _, __, value: validate_email(value) if value else value
|
||||
)
|
||||
@click.option("--displayname",)
|
||||
@click.option(
|
||||
"--displayname",
|
||||
)
|
||||
def org_create(orgname, email, displayname):
|
||||
client = AccountClient()
|
||||
client.create_org(orgname, email, displayname)
|
||||
return click.secho(
|
||||
"The organization `%s` has been successfully created." % orgname, fg="green",
|
||||
"The organization `%s` has been successfully created." % orgname,
|
||||
fg="green",
|
||||
)
|
||||
|
||||
|
||||
@@ -121,12 +125,19 @@ def account_destroy(orgname):
|
||||
abort=True,
|
||||
)
|
||||
client.destroy_org(orgname)
|
||||
return click.secho("Organization `%s` has been destroyed." % orgname, fg="green",)
|
||||
return click.secho(
|
||||
"Organization `%s` has been destroyed." % orgname,
|
||||
fg="green",
|
||||
)
|
||||
|
||||
|
||||
@cli.command("add", short_help="Add a new owner to organization")
|
||||
@click.argument("orgname",)
|
||||
@click.argument("username",)
|
||||
@click.argument(
|
||||
"orgname",
|
||||
)
|
||||
@click.argument(
|
||||
"username",
|
||||
)
|
||||
def org_add_owner(orgname, username):
|
||||
client = AccountClient()
|
||||
client.add_org_owner(orgname, username)
|
||||
@@ -138,8 +149,12 @@ def org_add_owner(orgname, username):
|
||||
|
||||
|
||||
@cli.command("remove", short_help="Remove an owner from organization")
|
||||
@click.argument("orgname",)
|
||||
@click.argument("username",)
|
||||
@click.argument(
|
||||
"orgname",
|
||||
)
|
||||
@click.argument(
|
||||
"username",
|
||||
)
|
||||
def org_remove_owner(orgname, username):
|
||||
client = AccountClient()
|
||||
client.remove_org_owner(orgname, username)
|
||||
|
||||
@@ -13,13 +13,17 @@
|
||||
# limitations under the License.
|
||||
|
||||
import os
|
||||
import tempfile
|
||||
from datetime import datetime
|
||||
|
||||
import click
|
||||
|
||||
from platformio import fs
|
||||
from platformio.clients.registry import RegistryClient
|
||||
from platformio.compat import ensure_python3
|
||||
from platformio.package.meta import PackageSpec, PackageType
|
||||
from platformio.package.pack import PackagePacker
|
||||
from platformio.package.unpack import FileUnpacker, TARArchiver
|
||||
|
||||
|
||||
def validate_datetime(ctx, param, value): # pylint: disable=unused-argument
|
||||
@@ -77,13 +81,27 @@ def package_pack(package, output):
|
||||
help="Notify by email when package is processed",
|
||||
)
|
||||
def package_publish(package, owner, released_at, private, notify):
|
||||
p = PackagePacker(package)
|
||||
archive_path = p.pack()
|
||||
response = RegistryClient().publish_package(
|
||||
archive_path, owner, released_at, private, notify
|
||||
)
|
||||
os.remove(archive_path)
|
||||
click.secho(response.get("message"), fg="green")
|
||||
assert ensure_python3()
|
||||
|
||||
# publish .tar.gz instantly without repacking
|
||||
if not os.path.isdir(package) and isinstance(
|
||||
FileUnpacker.new_archiver(package), TARArchiver
|
||||
):
|
||||
response = RegistryClient().publish_package(
|
||||
package, owner, released_at, private, notify
|
||||
)
|
||||
click.secho(response.get("message"), fg="green")
|
||||
return
|
||||
|
||||
with tempfile.TemporaryDirectory() as tmp_dir: # pylint: disable=no-member
|
||||
with fs.cd(tmp_dir):
|
||||
p = PackagePacker(package)
|
||||
archive_path = p.pack()
|
||||
response = RegistryClient().publish_package(
|
||||
archive_path, owner, released_at, private, notify
|
||||
)
|
||||
os.remove(archive_path)
|
||||
click.secho(response.get("message"), fg="green")
|
||||
|
||||
|
||||
@cli.command("unpublish", short_help="Remove a pushed package from the registry")
|
||||
@@ -107,7 +125,7 @@ def package_unpublish(package, type, undo): # pylint: disable=redefined-builtin
|
||||
type=type,
|
||||
name=spec.name,
|
||||
owner=spec.owner,
|
||||
version=spec.requirements,
|
||||
version=str(spec.requirements),
|
||||
undo=undo,
|
||||
)
|
||||
click.secho(response.get("message"), fg="green")
|
||||
|
||||
@@ -149,15 +149,19 @@ def project_init(
|
||||
):
|
||||
if not silent:
|
||||
if project_dir == os.getcwd():
|
||||
click.secho("\nThe current working directory", fg="yellow", nl=False)
|
||||
click.secho(" %s " % project_dir, fg="cyan", nl=False)
|
||||
click.secho("will be used for the project.", fg="yellow")
|
||||
click.secho("\nThe current working directory ", fg="yellow", nl=False)
|
||||
try:
|
||||
click.secho(project_dir, fg="cyan", nl=False)
|
||||
except UnicodeEncodeError:
|
||||
click.secho(json.dumps(project_dir), fg="cyan", nl=False)
|
||||
click.secho(" will be used for the project.", fg="yellow")
|
||||
click.echo("")
|
||||
|
||||
click.echo(
|
||||
"The next files/directories have been created in %s"
|
||||
% click.style(project_dir, fg="cyan")
|
||||
)
|
||||
click.echo("The next files/directories have been created in ", nl=False)
|
||||
try:
|
||||
click.secho(project_dir, fg="cyan")
|
||||
except UnicodeEncodeError:
|
||||
click.secho(json.dumps(project_dir), fg="cyan")
|
||||
click.echo(
|
||||
"%s - Put project header files here" % click.style("include", fg="cyan")
|
||||
)
|
||||
@@ -174,8 +178,10 @@ def project_init(
|
||||
if is_new_project:
|
||||
init_base_project(project_dir)
|
||||
|
||||
if board:
|
||||
fill_project_envs(
|
||||
if environment:
|
||||
update_project_env(project_dir, environment, project_option)
|
||||
elif board:
|
||||
update_board_envs(
|
||||
ctx, project_dir, board, project_option, env_prefix, ide is not None
|
||||
)
|
||||
|
||||
@@ -358,7 +364,7 @@ def init_cvs_ignore(project_dir):
|
||||
fp.write(".pio\n")
|
||||
|
||||
|
||||
def fill_project_envs(
|
||||
def update_board_envs(
|
||||
ctx, project_dir, board_ids, project_option, env_prefix, force_download
|
||||
):
|
||||
config = ProjectConfig(
|
||||
@@ -417,6 +423,26 @@ def _install_dependent_platforms(ctx, platforms):
|
||||
)
|
||||
|
||||
|
||||
def update_project_env(project_dir, environment, project_option):
|
||||
if not project_option:
|
||||
return
|
||||
config = ProjectConfig(
|
||||
os.path.join(project_dir, "platformio.ini"), parse_extra=False
|
||||
)
|
||||
|
||||
section = "env:%s" % environment
|
||||
if not config.has_section(section):
|
||||
config.add_section(section)
|
||||
|
||||
for item in project_option:
|
||||
if "=" not in item:
|
||||
continue
|
||||
_name, _value = item.split("=", 1)
|
||||
config.set(section, _name.strip(), _value.strip())
|
||||
|
||||
config.save()
|
||||
|
||||
|
||||
def get_best_envname(config, board_ids=None):
|
||||
envname = None
|
||||
default_envs = config.default_envs()
|
||||
|
||||
@@ -23,12 +23,12 @@ from time import sleep
|
||||
|
||||
import click
|
||||
|
||||
from platformio import exception, fs, proc
|
||||
from platformio import fs, proc
|
||||
from platformio.commands.device import helpers as device_helpers
|
||||
from platformio.commands.device.command import device_monitor as cmd_device_monitor
|
||||
from platformio.commands.run.command import cli as cmd_run
|
||||
from platformio.commands.test.command import cli as cmd_test
|
||||
from platformio.compat import PY2
|
||||
from platformio.compat import ensure_python3
|
||||
from platformio.package.manager.core import inject_contrib_pysite
|
||||
from platformio.project.exception import NotPlatformIOProjectError
|
||||
|
||||
@@ -37,13 +37,7 @@ from platformio.project.exception import NotPlatformIOProjectError
|
||||
@click.option("-a", "--agent", multiple=True)
|
||||
@click.pass_context
|
||||
def cli(ctx, agent):
|
||||
if PY2:
|
||||
raise exception.UserSideException(
|
||||
"PlatformIO Remote Development requires Python 3.5 or above. \n"
|
||||
"Please install the latest Python 3 and reinstall PlatformIO Core using "
|
||||
"installation script:\n"
|
||||
"https://docs.platformio.org/page/core/installation.html"
|
||||
)
|
||||
assert ensure_python3()
|
||||
ctx.obj = agent
|
||||
inject_contrib_pysite(verify_openssl=True)
|
||||
|
||||
|
||||
@@ -45,7 +45,10 @@ class RemoteClientFactory(pb.PBClientFactory, protocol.ReconnectingClientFactory
|
||||
return d
|
||||
|
||||
d = self.login(
|
||||
credentials.UsernamePassword(auth_token.encode(), get_host_id().encode(),),
|
||||
credentials.UsernamePassword(
|
||||
auth_token.encode(),
|
||||
get_host_id().encode(),
|
||||
),
|
||||
client=self.remote_client,
|
||||
)
|
||||
d.addCallback(self.remote_client.cb_client_authorization_made)
|
||||
|
||||
@@ -63,13 +63,16 @@ def cli():
|
||||
value, teamname_validate=True
|
||||
),
|
||||
)
|
||||
@click.option("--description",)
|
||||
@click.option(
|
||||
"--description",
|
||||
)
|
||||
def team_create(orgname_teamname, description):
|
||||
orgname, teamname = orgname_teamname.split(":", 1)
|
||||
client = AccountClient()
|
||||
client.create_team(orgname, teamname, description)
|
||||
return click.secho(
|
||||
"The team %s has been successfully created." % teamname, fg="green",
|
||||
"The team %s has been successfully created." % teamname,
|
||||
fg="green",
|
||||
)
|
||||
|
||||
|
||||
@@ -123,7 +126,9 @@ def team_list(orgname, json_output):
|
||||
callback=lambda _, __, value: validate_teamname(value),
|
||||
help="A new team name",
|
||||
)
|
||||
@click.option("--description",)
|
||||
@click.option(
|
||||
"--description",
|
||||
)
|
||||
def team_update(orgname_teamname, **kwargs):
|
||||
orgname, teamname = orgname_teamname.split(":", 1)
|
||||
client = AccountClient()
|
||||
@@ -142,7 +147,8 @@ def team_update(orgname_teamname, **kwargs):
|
||||
new_team.update({key: value for key, value in kwargs.items() if value})
|
||||
client.update_team(orgname, teamname, new_team)
|
||||
return click.secho(
|
||||
"The team %s has been successfully updated." % teamname, fg="green",
|
||||
"The team %s has been successfully updated." % teamname,
|
||||
fg="green",
|
||||
)
|
||||
|
||||
|
||||
@@ -163,7 +169,8 @@ def team_destroy(orgname_teamname):
|
||||
client = AccountClient()
|
||||
client.destroy_team(orgname, teamname)
|
||||
return click.secho(
|
||||
"The team %s has been successfully destroyed." % teamname, fg="green",
|
||||
"The team %s has been successfully destroyed." % teamname,
|
||||
fg="green",
|
||||
)
|
||||
|
||||
|
||||
@@ -173,7 +180,9 @@ def team_destroy(orgname_teamname):
|
||||
metavar="ORGNAME:TEAMNAME",
|
||||
callback=lambda _, __, value: validate_orgname_teamname(value),
|
||||
)
|
||||
@click.argument("username",)
|
||||
@click.argument(
|
||||
"username",
|
||||
)
|
||||
def team_add_member(orgname_teamname, username):
|
||||
orgname, teamname = orgname_teamname.split(":", 1)
|
||||
client = AccountClient()
|
||||
|
||||
@@ -177,7 +177,7 @@ def cli( # pylint: disable=redefined-builtin
|
||||
if without_testing:
|
||||
return
|
||||
|
||||
print_testing_summary(results)
|
||||
print_testing_summary(results, verbose)
|
||||
|
||||
command_failed = any(r.get("succeeded") is False for r in results)
|
||||
if command_failed:
|
||||
@@ -222,7 +222,7 @@ def print_processing_footer(result):
|
||||
)
|
||||
|
||||
|
||||
def print_testing_summary(results):
|
||||
def print_testing_summary(results, verbose=False):
|
||||
click.echo()
|
||||
|
||||
tabular_data = []
|
||||
@@ -236,6 +236,8 @@ def print_testing_summary(results):
|
||||
failed_nums += 1
|
||||
status_str = click.style("FAILED", fg="red")
|
||||
elif result.get("succeeded") is None:
|
||||
if not verbose:
|
||||
continue
|
||||
status_str = "IGNORED"
|
||||
else:
|
||||
succeeded_nums += 1
|
||||
|
||||
@@ -23,9 +23,12 @@ import os
|
||||
import re
|
||||
import sys
|
||||
|
||||
from platformio.exception import UserSideException
|
||||
|
||||
PY2 = sys.version_info[0] == 2
|
||||
CYGWIN = sys.platform.startswith("cygwin")
|
||||
WINDOWS = sys.platform.startswith("win")
|
||||
MACOS = sys.platform.startswith("darwin")
|
||||
|
||||
|
||||
def get_filesystem_encoding():
|
||||
@@ -58,6 +61,18 @@ def ci_strings_are_equal(a, b):
|
||||
return a.strip().lower() == b.strip().lower()
|
||||
|
||||
|
||||
def ensure_python3(raise_exception=True):
|
||||
compatible = sys.version_info >= (3, 6)
|
||||
if not raise_exception or compatible:
|
||||
return compatible
|
||||
raise UserSideException(
|
||||
"Python 3.6 or later is required for this operation. \n"
|
||||
"Please install the latest Python 3 and reinstall PlatformIO Core using "
|
||||
"installation script:\n"
|
||||
"https://docs.platformio.org/page/core/installation.html"
|
||||
)
|
||||
|
||||
|
||||
if PY2:
|
||||
import imp
|
||||
|
||||
@@ -84,7 +99,7 @@ if PY2:
|
||||
if isinstance(obj, unicode):
|
||||
return obj
|
||||
return json.dumps(
|
||||
obj, encoding=get_filesystem_encoding(), ensure_ascii=False, sort_keys=True
|
||||
obj, encoding=get_filesystem_encoding(), ensure_ascii=False
|
||||
).encode("utf8")
|
||||
|
||||
_magic_check = re.compile("([*?[])")
|
||||
@@ -132,7 +147,7 @@ else:
|
||||
def dump_json_to_unicode(obj):
|
||||
if isinstance(obj, string_types):
|
||||
return obj
|
||||
return json.dumps(obj, ensure_ascii=False, sort_keys=True)
|
||||
return json.dumps(obj)
|
||||
|
||||
def glob_recursive(pathname):
|
||||
return glob.glob(pathname, recursive=True)
|
||||
|
||||
@@ -63,7 +63,7 @@ SET(CMAKE_CXX_COMPILER "{{ _normalize_path(cxx_path) }}")
|
||||
SET(CMAKE_CXX_FLAGS "{{ _normalize_path(to_unix_path(cxx_flags)) }}")
|
||||
SET(CMAKE_C_FLAGS "{{ _normalize_path(to_unix_path(cc_flags)) }}")
|
||||
|
||||
% STD_RE = re.compile(r"\-std=[a-z\+]+(\d+)")
|
||||
% STD_RE = re.compile(r"\-std=[a-z\+]+(\w+)")
|
||||
% cc_stds = STD_RE.findall(cc_flags)
|
||||
% cxx_stds = STD_RE.findall(cxx_flags)
|
||||
% if cc_stds:
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
% import re
|
||||
% STD_RE = re.compile(r"(\-std=[a-z\+]+\d+)")
|
||||
% STD_RE = re.compile(r"(\-std=[a-z\+]+\w+)")
|
||||
% cxx_stds = STD_RE.findall(cxx_flags)
|
||||
% cxx_std = cxx_stds[-1] if cxx_stds else ""
|
||||
%
|
||||
|
||||
@@ -1,22 +1,12 @@
|
||||
% import re
|
||||
% STD_RE = re.compile(r"\-std=[a-z\+]+(\d+)")
|
||||
% cc_stds = STD_RE.findall(cc_flags)
|
||||
% cxx_stds = STD_RE.findall(cxx_flags)
|
||||
%
|
||||
%
|
||||
clang
|
||||
{{ cxx_path }}
|
||||
|
||||
% if cc_stds:
|
||||
{{"%c"}} -std=c{{ cc_stds[-1] }}
|
||||
% end
|
||||
% if cxx_stds:
|
||||
{{"%cpp"}} -std=c++{{ cxx_stds[-1] }}
|
||||
% end
|
||||
{{"%c"}} {{ !cc_flags }}
|
||||
{{"%cpp"}} {{ !cxx_flags }}
|
||||
|
||||
% for include in filter_includes(includes):
|
||||
-I{{ include }}
|
||||
-I{{ !include }}
|
||||
% end
|
||||
|
||||
% for define in defines:
|
||||
-D{{ define }}
|
||||
-D{{ !define }}
|
||||
% end
|
||||
|
||||
@@ -1,6 +0,0 @@
|
||||
% for include in filter_includes(includes):
|
||||
-I{{include}}
|
||||
% end
|
||||
% for define in defines:
|
||||
-D{{!define}}
|
||||
% end
|
||||
@@ -1,3 +1,13 @@
|
||||
% import re
|
||||
%
|
||||
% cpp_standards_remap = {
|
||||
% "0x": "11",
|
||||
% "1y": "14",
|
||||
% "1z": "17",
|
||||
% "2a": "20",
|
||||
% "2b": "23"
|
||||
% }
|
||||
|
||||
win32 {
|
||||
HOMEDIR += $$(USERPROFILE)
|
||||
}
|
||||
@@ -27,3 +37,9 @@ HEADERS += {{file}}
|
||||
SOURCES += {{file}}
|
||||
% end
|
||||
% end
|
||||
|
||||
% STD_RE = re.compile(r"\-std=[a-z\+]+(\w+)")
|
||||
% cxx_stds = STD_RE.findall(cxx_flags)
|
||||
% if cxx_stds:
|
||||
CONFIG += c++{{ cpp_standards_remap.get(cxx_stds[-1], cxx_stds[-1]) }}
|
||||
% end
|
||||
|
||||
12
platformio/ide/tpls/sublimetext/.ccls.tpl
Normal file
12
platformio/ide/tpls/sublimetext/.ccls.tpl
Normal file
@@ -0,0 +1,12 @@
|
||||
{{ cxx_path }}
|
||||
|
||||
{{"%c"}} {{ !cc_flags }}
|
||||
{{"%cpp"}} {{ !cxx_flags }}
|
||||
|
||||
% for include in filter_includes(includes):
|
||||
-I{{ !include }}
|
||||
% end
|
||||
|
||||
% for define in defines:
|
||||
-D{{ !define }}
|
||||
% end
|
||||
@@ -5,9 +5,10 @@
|
||||
"cmd":
|
||||
[
|
||||
"{{ platformio_path }}",
|
||||
"-f", "-c", "sublimetext",
|
||||
"-c", "sublimetext",
|
||||
"run"
|
||||
],
|
||||
"file_regex": "^(..[^:\n]*):([0-9]+):?([0-9]+)?:? (.*)$",
|
||||
"name": "PlatformIO",
|
||||
"variants":
|
||||
[
|
||||
@@ -15,78 +16,73 @@
|
||||
"cmd":
|
||||
[
|
||||
"{{ platformio_path }}",
|
||||
"-f", "-c", "sublimetext",
|
||||
"-c", "sublimetext",
|
||||
"run"
|
||||
],
|
||||
"file_regex": "^(..[^:\n]*):([0-9]+):?([0-9]+)?:? (.*)$",
|
||||
"name": "Build"
|
||||
},
|
||||
{
|
||||
"cmd":
|
||||
[
|
||||
"{{ platformio_path }}",
|
||||
"-f", "-c", "sublimetext",
|
||||
"-c", "sublimetext",
|
||||
"run",
|
||||
"--target",
|
||||
"upload"
|
||||
],
|
||||
"file_regex": "^(..[^:\n]*):([0-9]+):?([0-9]+)?:? (.*)$",
|
||||
"name": "Upload"
|
||||
},
|
||||
{
|
||||
"cmd":
|
||||
[
|
||||
"{{ platformio_path }}",
|
||||
"-f", "-c", "sublimetext",
|
||||
"-c", "sublimetext",
|
||||
"run",
|
||||
"--target",
|
||||
"clean"
|
||||
],
|
||||
"file_regex": "^(..[^:\n]*):([0-9]+):?([0-9]+)?:? (.*)$",
|
||||
"name": "Clean"
|
||||
},
|
||||
{
|
||||
"cmd":
|
||||
[
|
||||
"{{ platformio_path }}",
|
||||
"-f", "-c", "sublimetext",
|
||||
"-c", "sublimetext",
|
||||
"test"
|
||||
],
|
||||
"file_regex": "^(..[^:\n]*):([0-9]+):?([0-9]+)?:? (.*)$",
|
||||
"name": "Test"
|
||||
},
|
||||
{
|
||||
"cmd":
|
||||
[
|
||||
"{{ platformio_path }}",
|
||||
"-f", "-c", "sublimetext",
|
||||
"run",
|
||||
"--target",
|
||||
"program"
|
||||
],
|
||||
"name": "Upload using Programmer"
|
||||
},
|
||||
{
|
||||
"cmd":
|
||||
[
|
||||
"{{ platformio_path }}",
|
||||
"-f", "-c", "sublimetext",
|
||||
"-c", "sublimetext",
|
||||
"run",
|
||||
"--target",
|
||||
"uploadfs"
|
||||
],
|
||||
"file_regex": "^(..[^:\n]*):([0-9]+):?([0-9]+)?:? (.*)$",
|
||||
"name": "Upload SPIFFS image"
|
||||
},
|
||||
{
|
||||
"cmd":
|
||||
[
|
||||
"{{ platformio_path }}",
|
||||
"-f", "-c", "sublimetext",
|
||||
"-c", "sublimetext",
|
||||
"update"
|
||||
],
|
||||
"file_regex": "^(..[^:\n]*):([0-9]+):?([0-9]+)?:? (.*)$",
|
||||
"name": "Update platforms and libraries"
|
||||
},
|
||||
{
|
||||
"cmd":
|
||||
[
|
||||
"{{ platformio_path }}",
|
||||
"-f", "-c", "sublimetext",
|
||||
"-c", "sublimetext",
|
||||
"upgrade"
|
||||
],
|
||||
"name": "Upgrade PlatformIO Core"
|
||||
|
||||
@@ -1,22 +1,12 @@
|
||||
% import re
|
||||
% STD_RE = re.compile(r"\-std=[a-z\+]+(\d+)")
|
||||
% cc_stds = STD_RE.findall(cc_flags)
|
||||
% cxx_stds = STD_RE.findall(cxx_flags)
|
||||
%
|
||||
%
|
||||
clang
|
||||
{{ cxx_path }}
|
||||
|
||||
% if cc_stds:
|
||||
{{"%c"}} -std=c{{ cc_stds[-1] }}
|
||||
% end
|
||||
% if cxx_stds:
|
||||
{{"%cpp"}} -std=c++{{ cxx_stds[-1] }}
|
||||
% end
|
||||
{{"%c"}} {{ !cc_flags }}
|
||||
{{"%cpp"}} {{ !cxx_flags }}
|
||||
|
||||
% for include in filter_includes(includes):
|
||||
-I{{ include }}
|
||||
-I{{ !include }}
|
||||
% end
|
||||
|
||||
% for define in defines:
|
||||
-D{{ define }}
|
||||
-D{{ !define }}
|
||||
% end
|
||||
|
||||
@@ -1,6 +0,0 @@
|
||||
% for include in filter_includes(includes):
|
||||
-I"{{include}}"
|
||||
% end
|
||||
% for define in defines:
|
||||
-D{{!define}}
|
||||
% end
|
||||
@@ -1,9 +0,0 @@
|
||||
% _defines = " ".join(["-D%s" % d.replace(" ", "\\\\ ") for d in defines])
|
||||
{
|
||||
"execPath": "{{ cxx_path }}",
|
||||
"gccDefaultCFlags": "-fsyntax-only {{! cc_flags.replace(' -MMD ', ' ').replace('"', '\\"') }} {{ !_defines.replace('"', '\\"') }}",
|
||||
"gccDefaultCppFlags": "-fsyntax-only {{! cxx_flags.replace(' -MMD ', ' ').replace('"', '\\"') }} {{ !_defines.replace('"', '\\"') }}",
|
||||
"gccErrorLimit": 15,
|
||||
"gccIncludePaths": "{{ ','.join(filter_includes(includes)) }}",
|
||||
"gccSuppressWarnings": false
|
||||
}
|
||||
@@ -6,6 +6,14 @@
|
||||
%
|
||||
% systype = platform.system().lower()
|
||||
%
|
||||
% cpp_standards_remap = {
|
||||
% "0x": "11",
|
||||
% "1y": "14",
|
||||
% "1z": "17",
|
||||
% "2a": "20",
|
||||
% "2b": "23"
|
||||
% }
|
||||
%
|
||||
% def _escape(text):
|
||||
% return to_unix_path(text).replace('"', '\\"')
|
||||
% end
|
||||
@@ -68,27 +76,22 @@
|
||||
%
|
||||
% cleaned_includes = filter_includes(includes, ["toolchain"])
|
||||
%
|
||||
% STD_RE = re.compile(r"\-std=[a-z\+]+(\d+)")
|
||||
% STD_RE = re.compile(r"\-std=[a-z\+]+(\w+)")
|
||||
% cc_stds = STD_RE.findall(cc_flags)
|
||||
% cxx_stds = STD_RE.findall(cxx_flags)
|
||||
% cc_m_flags = split_args(cc_flags)
|
||||
% forced_includes = _find_forced_includes(
|
||||
% filter_args(cc_m_flags, ["-include", "-imacros"]), cleaned_includes)
|
||||
%
|
||||
//
|
||||
// !!! WARNING !!! AUTO-GENERATED FILE!
|
||||
// PLEASE DO NOT MODIFY IT AND USE "platformio.ini":
|
||||
// https://docs.platformio.org/page/projectconf/section_env_build.html#build-flags
|
||||
//
|
||||
{
|
||||
"configurations": [
|
||||
{
|
||||
"name": "!!! WARNING !!! AUTO-GENERATED FILE, PLEASE DO NOT MODIFY IT AND USE https://docs.platformio.org/page/projectconf/section_env_build.html#build-flags"
|
||||
},
|
||||
{
|
||||
% if systype == "windows":
|
||||
"name": "Win32",
|
||||
% elif systype == "darwin":
|
||||
"name": "Mac",
|
||||
"macFrameworkPath": [],
|
||||
% else:
|
||||
"name": "Linux",
|
||||
% end
|
||||
"name": "PlatformIO",
|
||||
"includePath": [
|
||||
% for include in cleaned_includes:
|
||||
"{{ include }}",
|
||||
@@ -110,12 +113,11 @@
|
||||
% end
|
||||
""
|
||||
],
|
||||
"intelliSenseMode": "clang-x64",
|
||||
% if cc_stds:
|
||||
"cStandard": "c{{ cc_stds[-1] }}",
|
||||
% end
|
||||
% if cxx_stds:
|
||||
"cppStandard": "c++{{ cxx_stds[-1] }}",
|
||||
"cppStandard": "c++{{ cpp_standards_remap.get(cxx_stds[-1], cxx_stds[-1]) }}",
|
||||
% end
|
||||
% if forced_includes:
|
||||
"forcedInclude": [
|
||||
|
||||
@@ -19,13 +19,18 @@
|
||||
"request": "launch",
|
||||
"name": "PIO Debug",
|
||||
"executable": "{{ _escape_path(prog_path) }}",
|
||||
"projectEnvName": "{{ env_name }}",
|
||||
"toolchainBinDir": "{{ _escape_path(dirname(gdb_path)) }}",
|
||||
% if svd_path:
|
||||
"svdPath": "{{ _escape_path(svd_path) }}",
|
||||
% end
|
||||
"preLaunchTask": {
|
||||
"type": "PlatformIO",
|
||||
% if len(config.envs()) > 1:
|
||||
"task": "Pre-Debug ({{ env_name }})"
|
||||
% else:
|
||||
"task": "Pre-Debug"
|
||||
% end
|
||||
},
|
||||
"internalConsoleOptions": "openOnSessionStart"
|
||||
},
|
||||
@@ -34,6 +39,7 @@
|
||||
"request": "launch",
|
||||
"name": "PIO Debug (skip Pre-Debug)",
|
||||
"executable": "{{ _escape_path(prog_path) }}",
|
||||
"projectEnvName": "{{ env_name }}",
|
||||
"toolchainBinDir": "{{ _escape_path(dirname(gdb_path)) }}",
|
||||
% if svd_path:
|
||||
"svdPath": "{{ _escape_path(svd_path) }}",
|
||||
|
||||
@@ -27,6 +27,7 @@ from platformio.commands.lib.command import CTX_META_STORAGE_DIRS_KEY
|
||||
from platformio.commands.lib.command import lib_update as cmd_lib_update
|
||||
from platformio.commands.platform import platform_update as cmd_platform_update
|
||||
from platformio.commands.upgrade import get_latest_version
|
||||
from platformio.compat import ensure_python3
|
||||
from platformio.package.manager.core import update_core_packages
|
||||
from platformio.package.manager.library import LibraryPackageManager
|
||||
from platformio.package.manager.platform import PlatformPackageManager
|
||||
@@ -43,8 +44,25 @@ def on_platformio_start(ctx, force, caller):
|
||||
set_caller(caller)
|
||||
telemetry.on_command()
|
||||
|
||||
if not PlatformioCLI.in_silence():
|
||||
after_upgrade(ctx)
|
||||
if PlatformioCLI.in_silence():
|
||||
return
|
||||
|
||||
after_upgrade(ctx)
|
||||
|
||||
if not ensure_python3(raise_exception=False):
|
||||
click.secho(
|
||||
"""
|
||||
Python 2 and Python 3.5 are not compatible with PlatformIO Core 5.0.
|
||||
Please check the migration guide on how to fix this warning message:
|
||||
""",
|
||||
fg="yellow",
|
||||
)
|
||||
click.secho(
|
||||
"https://docs.platformio.org/en/latest/core/migration.html"
|
||||
"#drop-support-for-python-2-and-3-5",
|
||||
fg="blue",
|
||||
)
|
||||
click.echo("")
|
||||
|
||||
|
||||
def on_platformio_end(ctx, result): # pylint: disable=unused-argument
|
||||
@@ -73,17 +91,20 @@ def on_platformio_exception(e):
|
||||
|
||||
def set_caller(caller=None):
|
||||
caller = caller or getenv("PLATFORMIO_CALLER")
|
||||
if not caller:
|
||||
if getenv("VSCODE_PID") or getenv("VSCODE_NLS_CONFIG"):
|
||||
caller = "vscode"
|
||||
elif is_container():
|
||||
if getenv("C9_UID"):
|
||||
caller = "C9"
|
||||
elif getenv("USER") == "cabox":
|
||||
caller = "CA"
|
||||
elif getenv("CHE_API", getenv("CHE_API_ENDPOINT")):
|
||||
caller = "Che"
|
||||
app.set_session_var("caller_id", caller)
|
||||
if caller:
|
||||
return app.set_session_var("caller_id", caller)
|
||||
if getenv("VSCODE_PID") or getenv("VSCODE_NLS_CONFIG"):
|
||||
caller = "vscode"
|
||||
elif getenv("GITPOD_INSTANCE_ID") or getenv("GITPOD_WORKSPACE_URL"):
|
||||
caller = "gitpod"
|
||||
elif is_container():
|
||||
if getenv("C9_UID"):
|
||||
caller = "C9"
|
||||
elif getenv("USER") == "cabox":
|
||||
caller = "CA"
|
||||
elif getenv("CHE_API", getenv("CHE_API_ENDPOINT")):
|
||||
caller = "Che"
|
||||
return app.set_session_var("caller_id", caller)
|
||||
|
||||
|
||||
class Upgrader(object):
|
||||
@@ -124,7 +145,9 @@ class Upgrader(object):
|
||||
continue
|
||||
result = result[0]
|
||||
pkg.metadata.spec = PackageSpec(
|
||||
id=result["id"], owner=result["owner"]["username"], name=result["name"],
|
||||
id=result["id"],
|
||||
owner=result["owner"]["username"],
|
||||
name=result["name"],
|
||||
)
|
||||
pkg.dump_meta()
|
||||
return True
|
||||
|
||||
@@ -152,7 +152,10 @@ class PackageManagerInstallMixin(object):
|
||||
return self._install_tmp_pkg(pkg_item)
|
||||
finally:
|
||||
if os.path.isdir(tmp_dir):
|
||||
fs.rmtree(tmp_dir)
|
||||
try:
|
||||
fs.rmtree(tmp_dir)
|
||||
except: # pylint: disable=bare-except
|
||||
pass
|
||||
|
||||
def _install_tmp_pkg(self, tmp_pkg):
|
||||
assert isinstance(tmp_pkg, PackageItem)
|
||||
@@ -213,10 +216,10 @@ class PackageManagerInstallMixin(object):
|
||||
# move existing into the new place
|
||||
pkg_dir = os.path.join(self.package_dir, target_dirname)
|
||||
_cleanup_dir(pkg_dir)
|
||||
shutil.move(dst_pkg.path, pkg_dir)
|
||||
shutil.copytree(dst_pkg.path, pkg_dir, symlinks=True)
|
||||
# move new source to the destination location
|
||||
_cleanup_dir(dst_pkg.path)
|
||||
shutil.move(tmp_pkg.path, dst_pkg.path)
|
||||
shutil.copytree(tmp_pkg.path, dst_pkg.path, symlinks=True)
|
||||
return PackageItem(dst_pkg.path)
|
||||
|
||||
if action == "detach-new":
|
||||
@@ -233,10 +236,10 @@ class PackageManagerInstallMixin(object):
|
||||
)
|
||||
pkg_dir = os.path.join(self.package_dir, target_dirname)
|
||||
_cleanup_dir(pkg_dir)
|
||||
shutil.move(tmp_pkg.path, pkg_dir)
|
||||
shutil.copytree(tmp_pkg.path, pkg_dir, symlinks=True)
|
||||
return PackageItem(pkg_dir)
|
||||
|
||||
# otherwise, overwrite existing
|
||||
_cleanup_dir(dst_pkg.path)
|
||||
shutil.move(tmp_pkg.path, dst_pkg.path)
|
||||
shutil.copytree(tmp_pkg.path, dst_pkg.path, symlinks=True)
|
||||
return PackageItem(dst_pkg.path)
|
||||
|
||||
@@ -104,7 +104,7 @@ class PackageManagerUpdateMixin(object):
|
||||
|
||||
outdated = self.outdated(pkg, to_spec)
|
||||
if not silent:
|
||||
self.print_outdated_state(outdated, show_incompatible)
|
||||
self.print_outdated_state(outdated, only_check, show_incompatible)
|
||||
|
||||
if only_check or not outdated.is_outdated(allow_incompatible=False):
|
||||
return pkg
|
||||
@@ -116,24 +116,39 @@ class PackageManagerUpdateMixin(object):
|
||||
self.unlock()
|
||||
|
||||
@staticmethod
|
||||
def print_outdated_state(outdated, show_incompatible=True):
|
||||
def print_outdated_state(outdated, only_check, show_incompatible):
|
||||
if outdated.detached:
|
||||
return click.echo("[%s]" % (click.style("Detached", fg="yellow")))
|
||||
|
||||
if (
|
||||
not outdated.latest
|
||||
or outdated.current == outdated.latest
|
||||
or (not show_incompatible and outdated.current == outdated.wanted)
|
||||
):
|
||||
return click.echo("[%s]" % (click.style("Up-to-date", fg="green")))
|
||||
|
||||
if outdated.wanted and outdated.current == outdated.wanted:
|
||||
return click.echo(
|
||||
"[%s]" % (click.style("Incompatible %s" % outdated.latest, fg="yellow"))
|
||||
)
|
||||
|
||||
if only_check:
|
||||
return click.echo(
|
||||
"[%s]"
|
||||
% (
|
||||
click.style(
|
||||
"Outdated %s" % str(outdated.wanted or outdated.latest),
|
||||
fg="red",
|
||||
)
|
||||
)
|
||||
)
|
||||
|
||||
return click.echo(
|
||||
"[%s]"
|
||||
% (
|
||||
click.style(
|
||||
"Outdated %s" % str(outdated.wanted or outdated.latest), fg="red"
|
||||
"Updating to %s" % str(outdated.wanted or outdated.latest),
|
||||
fg="green",
|
||||
)
|
||||
)
|
||||
)
|
||||
|
||||
@@ -51,7 +51,7 @@ class BasePackageManager( # pylint: disable=too-many-public-methods
|
||||
|
||||
def __init__(self, pkg_type, package_dir):
|
||||
self.pkg_type = pkg_type
|
||||
self.package_dir = self.ensure_dir_exists(package_dir)
|
||||
self.package_dir = package_dir
|
||||
self._MEMORY_CACHE = {}
|
||||
|
||||
self._lockfile = None
|
||||
@@ -62,7 +62,9 @@ class BasePackageManager( # pylint: disable=too-many-public-methods
|
||||
def lock(self):
|
||||
if self._lockfile:
|
||||
return
|
||||
self.ensure_dir_exists(os.path.dirname(self.package_dir))
|
||||
self._lockfile = LockFile(self.package_dir)
|
||||
self.ensure_dir_exists(self.package_dir)
|
||||
self._lockfile.acquire()
|
||||
|
||||
def unlock(self):
|
||||
@@ -190,6 +192,9 @@ class BasePackageManager( # pylint: disable=too-many-public-methods
|
||||
return metadata
|
||||
|
||||
def get_installed(self):
|
||||
if not os.path.isdir(self.package_dir):
|
||||
return []
|
||||
|
||||
cache_key = "get_installed"
|
||||
if self.memcache_get(cache_key):
|
||||
return self.memcache_get(cache_key)
|
||||
|
||||
@@ -14,13 +14,16 @@
|
||||
|
||||
import json
|
||||
import os
|
||||
import shutil
|
||||
import subprocess
|
||||
import sys
|
||||
from datetime import date
|
||||
|
||||
from platformio import __core_packages__, exception, fs, util
|
||||
from platformio.compat import PY2
|
||||
from platformio.package.exception import UnknownPackageError
|
||||
from platformio.package.manager.tool import ToolPackageManager
|
||||
from platformio.package.meta import PackageSpec
|
||||
from platformio.package.meta import PackageItem, PackageSpec
|
||||
from platformio.proc import get_pythonexe_path
|
||||
|
||||
|
||||
@@ -73,9 +76,17 @@ def inject_contrib_pysite(verify_openssl=False):
|
||||
# pylint: disable=import-outside-toplevel
|
||||
from site import addsitedir
|
||||
|
||||
contrib_pysite_dir = get_core_package_dir("contrib-pysite")
|
||||
try:
|
||||
contrib_pysite_dir = get_core_package_dir("contrib-pysite")
|
||||
except UnknownPackageError:
|
||||
pm = ToolPackageManager()
|
||||
contrib_pysite_dir = build_contrib_pysite_package(
|
||||
os.path.join(pm.package_dir, "contrib-pysite")
|
||||
)
|
||||
|
||||
if contrib_pysite_dir in sys.path:
|
||||
return True
|
||||
|
||||
addsitedir(contrib_pysite_dir)
|
||||
sys.path.insert(0, contrib_pysite_dir)
|
||||
|
||||
@@ -86,41 +97,92 @@ def inject_contrib_pysite(verify_openssl=False):
|
||||
# pylint: disable=import-error,unused-import,unused-variable
|
||||
from OpenSSL import SSL
|
||||
except: # pylint: disable=bare-except
|
||||
build_contrib_pysite_deps(get_core_package_dir("contrib-pysite"))
|
||||
build_contrib_pysite_package(contrib_pysite_dir)
|
||||
|
||||
return True
|
||||
|
||||
|
||||
def build_contrib_pysite_deps(target_dir):
|
||||
def build_contrib_pysite_package(target_dir, with_metadata=True):
|
||||
systype = util.get_systype()
|
||||
if os.path.isdir(target_dir):
|
||||
fs.rmtree(target_dir)
|
||||
os.makedirs(target_dir)
|
||||
|
||||
# build dependencies
|
||||
args = [
|
||||
get_pythonexe_path(),
|
||||
"-m",
|
||||
"pip",
|
||||
"install",
|
||||
"--no-compile",
|
||||
"-t",
|
||||
target_dir,
|
||||
]
|
||||
if "linux" in systype:
|
||||
args.extend(["--no-binary", ":all:"])
|
||||
for dep in get_contrib_pysite_deps():
|
||||
subprocess.check_call(args + [dep])
|
||||
|
||||
# build manifests
|
||||
with open(os.path.join(target_dir, "package.json"), "w") as fp:
|
||||
json.dump(
|
||||
dict(
|
||||
name="contrib-pysite",
|
||||
version="2.%d%d.0" % (sys.version_info.major, sys.version_info.minor),
|
||||
system=util.get_systype(),
|
||||
version="2.%d%d.%s"
|
||||
% (
|
||||
sys.version_info.major,
|
||||
sys.version_info.minor,
|
||||
date.today().strftime("%y%m%d"),
|
||||
),
|
||||
system=list(
|
||||
set([systype, "linux_armv6l", "linux_armv7l", "linux_armv8l"])
|
||||
)
|
||||
if systype.startswith("linux_arm")
|
||||
else systype,
|
||||
description="Extra Python package for PlatformIO Core",
|
||||
keywords=["platformio", "platformio-core"],
|
||||
homepage="https://docs.platformio.org/page/core/index.html",
|
||||
repository={
|
||||
"type": "git",
|
||||
"url": "https://github.com/platformio/platformio-core",
|
||||
},
|
||||
),
|
||||
fp,
|
||||
)
|
||||
|
||||
pythonexe = get_pythonexe_path()
|
||||
for dep in get_contrib_pysite_deps():
|
||||
subprocess.check_call(
|
||||
[
|
||||
pythonexe,
|
||||
"-m",
|
||||
"pip",
|
||||
"install",
|
||||
# "--no-cache-dir",
|
||||
"--no-compile",
|
||||
"-t",
|
||||
target_dir,
|
||||
dep,
|
||||
]
|
||||
# generate package metadata
|
||||
if with_metadata:
|
||||
pm = ToolPackageManager()
|
||||
pkg = PackageItem(target_dir)
|
||||
pkg.metadata = pm.build_metadata(
|
||||
target_dir, PackageSpec(owner="platformio", name="contrib-pysite")
|
||||
)
|
||||
return True
|
||||
pkg.dump_meta()
|
||||
|
||||
# remove unused files
|
||||
shutil.rmtree(os.path.join(target_dir, "autobahn", "xbr", "contracts"))
|
||||
for root, dirs, files in os.walk(target_dir):
|
||||
for t in ("_test", "test", "tests"):
|
||||
if t in dirs:
|
||||
shutil.rmtree(os.path.join(root, t))
|
||||
for name in files:
|
||||
if name.endswith((".chm", ".pyc")):
|
||||
os.remove(os.path.join(root, name))
|
||||
|
||||
# apply patches
|
||||
with open(
|
||||
os.path.join(target_dir, "autobahn", "twisted", "__init__.py"), "r+"
|
||||
) as fp:
|
||||
contents = fp.read()
|
||||
contents = contents.replace(
|
||||
"from autobahn.twisted.wamp import ApplicationSession",
|
||||
"# from autobahn.twisted.wamp import ApplicationSession",
|
||||
)
|
||||
fp.seek(0)
|
||||
fp.truncate()
|
||||
fp.write(contents)
|
||||
|
||||
return target_dir
|
||||
|
||||
|
||||
def get_contrib_pysite_deps():
|
||||
@@ -130,7 +192,7 @@ def get_contrib_pysite_deps():
|
||||
twisted_version = "19.10.0" if PY2 else "20.3.0"
|
||||
result = [
|
||||
"twisted == %s" % twisted_version,
|
||||
"autobahn == 20.4.3",
|
||||
"autobahn == %s" % ("19.11.2" if PY2 else "20.7.1"),
|
||||
"json-rpc == 1.13.0",
|
||||
]
|
||||
|
||||
@@ -151,8 +213,8 @@ def get_contrib_pysite_deps():
|
||||
result.append("pypiwin32 == 223")
|
||||
# workaround for twisted wheels
|
||||
twisted_wheel = (
|
||||
"https://download.lfd.uci.edu/pythonlibs/g5apjq5m/Twisted-"
|
||||
"%s-cp%s-cp%sm-win%s.whl"
|
||||
"https://download.lfd.uci.edu/pythonlibs/x2tqcw5k/Twisted-"
|
||||
"%s-cp%s-cp%s-win%s.whl"
|
||||
% (
|
||||
twisted_version,
|
||||
py_version,
|
||||
|
||||
@@ -15,7 +15,10 @@
|
||||
import json
|
||||
import os
|
||||
|
||||
from platformio.package.exception import MissingPackageManifestError
|
||||
from platformio.package.exception import (
|
||||
MissingPackageManifestError,
|
||||
UnknownPackageError,
|
||||
)
|
||||
from platformio.package.manager.base import BasePackageManager
|
||||
from platformio.package.meta import PackageItem, PackageSpec, PackageType
|
||||
from platformio.project.helpers import get_project_global_lib_dir
|
||||
@@ -43,7 +46,10 @@ class LibraryPackageManager(BasePackageManager): # pylint: disable=too-many-anc
|
||||
# automatically generate library manifest
|
||||
with open(os.path.join(root_dir, "library.json"), "w") as fp:
|
||||
json.dump(
|
||||
dict(name=spec.name, version=self.generate_rand_version(),),
|
||||
dict(
|
||||
name=spec.name,
|
||||
version=self.generate_rand_version(),
|
||||
),
|
||||
fp,
|
||||
indent=2,
|
||||
)
|
||||
@@ -63,6 +69,33 @@ class LibraryPackageManager(BasePackageManager): # pylint: disable=too-many-anc
|
||||
return root
|
||||
return path
|
||||
|
||||
def _install( # pylint: disable=too-many-arguments
|
||||
self,
|
||||
spec,
|
||||
search_filters=None,
|
||||
silent=False,
|
||||
skip_dependencies=False,
|
||||
force=False,
|
||||
):
|
||||
try:
|
||||
return super(LibraryPackageManager, self)._install(
|
||||
spec,
|
||||
search_filters=search_filters,
|
||||
silent=silent,
|
||||
skip_dependencies=skip_dependencies,
|
||||
force=force,
|
||||
)
|
||||
except UnknownPackageError as e:
|
||||
# pylint: disable=import-outside-toplevel
|
||||
from platformio.commands.lib.helpers import is_builtin_lib
|
||||
|
||||
spec = self.ensure_spec(spec)
|
||||
if is_builtin_lib(spec.name):
|
||||
self.print_message("Already installed, built-in library", fg="yellow")
|
||||
return True
|
||||
|
||||
raise e
|
||||
|
||||
def install_dependencies(self, pkg, silent=False):
|
||||
assert isinstance(pkg, PackageItem)
|
||||
manifest = self.load_manifest(pkg)
|
||||
@@ -79,15 +112,28 @@ class LibraryPackageManager(BasePackageManager): # pylint: disable=too-many-anc
|
||||
)
|
||||
|
||||
def _install_dependency(self, dependency, silent=False):
|
||||
spec = PackageSpec(
|
||||
name=dependency.get("name"), requirements=dependency.get("version")
|
||||
)
|
||||
if set(["name", "version"]) <= set(dependency.keys()) and any(
|
||||
c in dependency["version"] for c in (":", "/", "@")
|
||||
):
|
||||
spec = PackageSpec("%s=%s" % (dependency["name"], dependency["version"]))
|
||||
else:
|
||||
spec = PackageSpec(
|
||||
owner=dependency.get("owner"),
|
||||
name=dependency.get("name"),
|
||||
requirements=dependency.get("version"),
|
||||
)
|
||||
search_filters = {
|
||||
key: value
|
||||
for key, value in dependency.items()
|
||||
if key in ("authors", "platforms", "frameworks")
|
||||
}
|
||||
return self._install(spec, search_filters=search_filters or None, silent=silent)
|
||||
try:
|
||||
return self._install(
|
||||
spec, search_filters=search_filters or None, silent=silent
|
||||
)
|
||||
except UnknownPackageError:
|
||||
pass
|
||||
return None
|
||||
|
||||
def uninstall_dependencies(self, pkg, silent=False):
|
||||
assert isinstance(pkg, PackageItem)
|
||||
|
||||
@@ -119,12 +119,13 @@ class ManifestParserFactory(object):
|
||||
assert path.endswith("tar.gz")
|
||||
with tarfile.open(path, mode="r:gz") as tf:
|
||||
for t in sorted(ManifestFileType.items().values()):
|
||||
try:
|
||||
return ManifestParserFactory.new(
|
||||
tf.extractfile(t).read().decode(), t
|
||||
)
|
||||
except KeyError:
|
||||
pass
|
||||
for member in (t, "./" + t):
|
||||
try:
|
||||
return ManifestParserFactory.new(
|
||||
tf.extractfile(member).read().decode(), t
|
||||
)
|
||||
except KeyError:
|
||||
pass
|
||||
raise UnknownManifestError("Unknown manifest file type in %s archive" % path)
|
||||
|
||||
@staticmethod
|
||||
@@ -388,7 +389,15 @@ class LibraryJsonManifestParser(BaseManifestParser):
|
||||
raw = [raw]
|
||||
|
||||
if isinstance(raw, dict):
|
||||
return [dict(name=name, version=version) for name, version in raw.items()]
|
||||
result = []
|
||||
for name, version in raw.items():
|
||||
if "/" in name:
|
||||
owner, name = name.split("/", 1)
|
||||
result.append(dict(owner=owner, name=name, version=version))
|
||||
else:
|
||||
result.append(dict(name=name, version=version))
|
||||
return result
|
||||
|
||||
if isinstance(raw, list):
|
||||
for i, dependency in enumerate(raw):
|
||||
if isinstance(dependency, dict):
|
||||
|
||||
@@ -106,6 +106,7 @@ class RepositorySchema(StrictSchema):
|
||||
|
||||
|
||||
class DependencySchema(StrictSchema):
|
||||
owner = fields.Str(validate=validate.Length(min=1, max=100))
|
||||
name = fields.Str(required=True, validate=validate.Length(min=1, max=100))
|
||||
version = fields.Str(validate=validate.Length(min=1, max=100))
|
||||
authors = StrictListField(fields.Str(validate=validate.Length(min=1, max=50)))
|
||||
@@ -140,9 +141,10 @@ class ExampleSchema(StrictSchema):
|
||||
name = fields.Str(
|
||||
required=True,
|
||||
validate=[
|
||||
validate.Length(min=1, max=100),
|
||||
validate.Length(min=1, max=255),
|
||||
validate.Regexp(
|
||||
r"^[a-zA-Z\d\-\_/]+$", error="Only [a-zA-Z0-9-_/] chars are allowed"
|
||||
r"^[a-zA-Z\d\-\_/\. ]+$",
|
||||
error="Only [a-zA-Z0-9-_/. ] chars are allowed",
|
||||
),
|
||||
],
|
||||
)
|
||||
@@ -242,7 +244,7 @@ class ManifestSchema(BaseSchema):
|
||||
raise ValidationError("Could not load SPDX licenses for validation")
|
||||
for item in spdx.get("licenses", []):
|
||||
if item.get("licenseId") == value:
|
||||
return
|
||||
return True
|
||||
raise ValidationError(
|
||||
"Invalid SPDX license identifier. See valid identifiers at "
|
||||
"https://spdx.org/licenses/"
|
||||
@@ -251,9 +253,5 @@ class ManifestSchema(BaseSchema):
|
||||
@staticmethod
|
||||
@memoized(expire="1h")
|
||||
def load_spdx_licenses():
|
||||
version = "3.10"
|
||||
spdx_data_url = (
|
||||
"https://raw.githubusercontent.com/spdx/license-list-data"
|
||||
"/v%s/json/licenses.json" % version
|
||||
)
|
||||
spdx_data_url = "https://dl.bintray.com/platformio/dl-misc/spdx-licenses-3.json"
|
||||
return json.loads(fetch_remote_content(spdx_data_url))
|
||||
|
||||
@@ -209,6 +209,7 @@ class PackageSpec(object): # pylint: disable=too-many-instance-attributes
|
||||
raw = raw.strip()
|
||||
|
||||
parsers = (
|
||||
self._parse_local_file,
|
||||
self._parse_requirements,
|
||||
self._parse_custom_name,
|
||||
self._parse_id,
|
||||
@@ -227,10 +228,16 @@ class PackageSpec(object): # pylint: disable=too-many-instance-attributes
|
||||
# the leftover is a package name
|
||||
self.name = raw
|
||||
|
||||
def _parse_requirements(self, raw):
|
||||
if "@" not in raw:
|
||||
@staticmethod
|
||||
def _parse_local_file(raw):
|
||||
if raw.startswith("file://") or not any(c in raw for c in ("/", "\\")):
|
||||
return raw
|
||||
if raw.startswith("file://") and os.path.exists(raw[7:]):
|
||||
if os.path.exists(raw):
|
||||
return "file://%s" % raw
|
||||
return raw
|
||||
|
||||
def _parse_requirements(self, raw):
|
||||
if "@" not in raw or raw.startswith("file://"):
|
||||
return raw
|
||||
tokens = raw.rsplit("@", 1)
|
||||
if any(s in tokens[1] for s in (":", "/")):
|
||||
|
||||
@@ -20,7 +20,8 @@ import tarfile
|
||||
import tempfile
|
||||
|
||||
from platformio import fs
|
||||
from platformio.package.exception import PackageException
|
||||
from platformio.compat import WINDOWS, ensure_python3
|
||||
from platformio.package.exception import PackageException, UserSideException
|
||||
from platformio.package.manifest.parser import ManifestFileType, ManifestParserFactory
|
||||
from platformio.package.manifest.schema import ManifestSchema
|
||||
from platformio.package.meta import PackageItem
|
||||
@@ -28,20 +29,72 @@ from platformio.package.unpack import FileUnpacker
|
||||
|
||||
|
||||
class PackagePacker(object):
|
||||
INCLUDE_DEFAULT = ManifestFileType.items().values()
|
||||
EXCLUDE_DEFAULT = [
|
||||
# PlatformIO internal files
|
||||
PackageItem.METAFILE_NAME,
|
||||
".pio/",
|
||||
"**/.pio/",
|
||||
# Hidden files
|
||||
"._*",
|
||||
"__*",
|
||||
".DS_Store",
|
||||
".vscode",
|
||||
".cache",
|
||||
"**/.cache",
|
||||
# VCS
|
||||
".git/",
|
||||
".hg/",
|
||||
".svn/",
|
||||
".pio/",
|
||||
"**/.pio/",
|
||||
PackageItem.METAFILE_NAME,
|
||||
]
|
||||
INCLUDE_DEFAULT = ManifestFileType.items().values()
|
||||
EXCLUDE_EXTRA = [
|
||||
# Tests
|
||||
"tests?",
|
||||
# Docs
|
||||
"doc",
|
||||
"docs",
|
||||
"mkdocs",
|
||||
"**/*.[pP][dD][fF]",
|
||||
"**/*.[dD][oO][cC]?",
|
||||
"**/*.[pP][pP][tT]?",
|
||||
"**/*.[dD][oO][xX]",
|
||||
"**/*.[hH][tT][mM]?",
|
||||
"**/*.[tT][eE][xX]",
|
||||
"**/*.[jJ][sS]",
|
||||
"**/*.[cC][sS][sS]",
|
||||
# Binary files
|
||||
"**/*.[jJ][pP][gG]",
|
||||
"**/*.[jJ][pP][eE][gG]",
|
||||
"**/*.[pP][nN][gG]",
|
||||
"**/*.[gG][iI][fF]",
|
||||
"**/*.[zZ][iI][pP]",
|
||||
"**/*.[gG][zZ]",
|
||||
"**/*.3[gG][pP]",
|
||||
"**/*.[mM][oO][vV]",
|
||||
"**/*.[mM][pP][34]",
|
||||
"**/*.[pP][sS][dD]",
|
||||
"**/*.[wW][aA][wW]",
|
||||
]
|
||||
EXCLUDE_LIBRARY_EXTRA = [
|
||||
"assets",
|
||||
"extra",
|
||||
"resources",
|
||||
"html",
|
||||
"media",
|
||||
"doxygen",
|
||||
"**/build/",
|
||||
"**/*.flat",
|
||||
"**/*.[jJ][aA][rR]",
|
||||
"**/*.[eE][xX][eE]",
|
||||
"**/*.[bB][iI][nN]",
|
||||
"**/*.[hH][eE][xX]",
|
||||
"**/*.[dD][bB]",
|
||||
"**/*.[dD][aA][tT]",
|
||||
"**/*.[dD][lL][lL]",
|
||||
]
|
||||
|
||||
def __init__(self, package, manifest_uri=None):
|
||||
assert ensure_python3()
|
||||
self.package = package
|
||||
self.manifest_uri = manifest_uri
|
||||
|
||||
@@ -51,7 +104,9 @@ class PackagePacker(object):
|
||||
r"[^\da-zA-Z\-\._\+]+",
|
||||
"",
|
||||
"{name}{system}-{version}.tar.gz".format(
|
||||
name=name, system=("-" + system) if system else "", version=version,
|
||||
name=name,
|
||||
system=("-" + system) if system else "",
|
||||
version=version,
|
||||
),
|
||||
)
|
||||
|
||||
@@ -62,12 +117,17 @@ class PackagePacker(object):
|
||||
|
||||
# if zip/tar.gz -> unpack to tmp dir
|
||||
if not os.path.isdir(src):
|
||||
if WINDOWS:
|
||||
raise UserSideException(
|
||||
"Packaging from an archive does not work on Windows OS. Please "
|
||||
"extract data from `%s` manually and pack a folder instead"
|
||||
% src
|
||||
)
|
||||
with FileUnpacker(src) as fu:
|
||||
assert fu.unpack(tmp_dir, silent=True)
|
||||
src = tmp_dir
|
||||
|
||||
src = self.find_source_root(src)
|
||||
|
||||
manifest = self.load_manifest(src)
|
||||
filename = self.get_archive_name(
|
||||
manifest["name"],
|
||||
@@ -128,16 +188,32 @@ class PackagePacker(object):
|
||||
json.dump(manifest_updated, fp, indent=2, ensure_ascii=False)
|
||||
include = None
|
||||
|
||||
src_filters = self.compute_src_filters(include, exclude)
|
||||
src_filters = self.compute_src_filters(src, include, exclude)
|
||||
with tarfile.open(dst, "w:gz") as tar:
|
||||
for f in fs.match_src_files(src, src_filters, followlinks=False):
|
||||
tar.add(os.path.join(src, f), f)
|
||||
return dst
|
||||
|
||||
def compute_src_filters(self, include, exclude):
|
||||
def compute_src_filters(self, src, include, exclude):
|
||||
exclude_extra = self.EXCLUDE_EXTRA[:]
|
||||
# extend with library extra filters
|
||||
if any(
|
||||
os.path.isfile(os.path.join(src, name))
|
||||
for name in (
|
||||
ManifestFileType.LIBRARY_JSON,
|
||||
ManifestFileType.LIBRARY_PROPERTIES,
|
||||
ManifestFileType.MODULE_JSON,
|
||||
)
|
||||
):
|
||||
exclude_extra.extend(self.EXCLUDE_LIBRARY_EXTRA)
|
||||
|
||||
result = ["+<%s>" % p for p in include or ["*", ".*"]]
|
||||
result += ["-<%s>" % p for p in exclude or []]
|
||||
result += ["-<%s>" % p for p in self.EXCLUDE_DEFAULT]
|
||||
# exclude items declared in manifest
|
||||
result += ["-<%s>" % p for p in exclude or []]
|
||||
# apply extra excludes if no custom "export" field in manifest
|
||||
if not include and not exclude:
|
||||
result += ["-<%s>" % p for p in exclude_extra]
|
||||
# automatically include manifests
|
||||
result += ["+<%s>" % p for p in self.INCLUDE_DEFAULT]
|
||||
return result
|
||||
|
||||
@@ -134,27 +134,28 @@ class FileUnpacker(object):
|
||||
self.path = path
|
||||
self._archiver = None
|
||||
|
||||
def _init_archiver(self):
|
||||
def __enter__(self):
|
||||
self._archiver = self.new_archiver(self.path)
|
||||
return self
|
||||
|
||||
def __exit__(self, *args):
|
||||
if self._archiver:
|
||||
self._archiver.close()
|
||||
|
||||
@staticmethod
|
||||
def new_archiver(path):
|
||||
magic_map = {
|
||||
b"\x1f\x8b\x08": TARArchiver,
|
||||
b"\x42\x5a\x68": TARArchiver,
|
||||
b"\x50\x4b\x03\x04": ZIPArchiver,
|
||||
}
|
||||
magic_len = max(len(k) for k in magic_map)
|
||||
with open(self.path, "rb") as fp:
|
||||
with open(path, "rb") as fp:
|
||||
data = fp.read(magic_len)
|
||||
for magic, archiver in magic_map.items():
|
||||
if data.startswith(magic):
|
||||
return archiver(self.path)
|
||||
raise PackageException("Unknown archive type '%s'" % self.path)
|
||||
|
||||
def __enter__(self):
|
||||
self._archiver = self._init_archiver()
|
||||
return self
|
||||
|
||||
def __exit__(self, *args):
|
||||
if self._archiver:
|
||||
self._archiver.close()
|
||||
return archiver(path)
|
||||
raise PackageException("Unknown archive type '%s'" % path)
|
||||
|
||||
def unpack(
|
||||
self, dest_dir=None, with_progress=True, check_unpacked=True, silent=False
|
||||
|
||||
@@ -12,17 +12,17 @@
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import os
|
||||
import re
|
||||
from os.path import join
|
||||
from subprocess import CalledProcessError, check_call
|
||||
from sys import modules
|
||||
import subprocess
|
||||
import sys
|
||||
|
||||
from platformio import proc
|
||||
from platformio.package.exception import (
|
||||
PackageException,
|
||||
PlatformioException,
|
||||
UserSideException,
|
||||
)
|
||||
from platformio.proc import exec_command
|
||||
|
||||
try:
|
||||
from urllib.parse import urlparse
|
||||
@@ -51,7 +51,7 @@ class VCSClientFactory(object):
|
||||
if not type_:
|
||||
raise VCSBaseException("VCS: Unknown repository type %s" % remote_url)
|
||||
try:
|
||||
obj = getattr(modules[__name__], "%sClient" % type_.title())(
|
||||
obj = getattr(sys.modules[__name__], "%sClient" % type_.title())(
|
||||
src_dir, remote_url, tag, silent
|
||||
)
|
||||
assert isinstance(obj, VCSClientBase)
|
||||
@@ -86,7 +86,7 @@ class VCSClientBase(object):
|
||||
|
||||
@property
|
||||
def storage_dir(self):
|
||||
return join(self.src_dir, "." + self.command)
|
||||
return os.path.join(self.src_dir, "." + self.command)
|
||||
|
||||
def export(self):
|
||||
raise NotImplementedError
|
||||
@@ -108,17 +108,19 @@ class VCSClientBase(object):
|
||||
args = [self.command] + args
|
||||
if "cwd" not in kwargs:
|
||||
kwargs["cwd"] = self.src_dir
|
||||
if "env" not in kwargs:
|
||||
kwargs["env"] = os.environ
|
||||
try:
|
||||
check_call(args, **kwargs)
|
||||
subprocess.check_call(args, **kwargs)
|
||||
return True
|
||||
except CalledProcessError as e:
|
||||
except subprocess.CalledProcessError as e:
|
||||
raise VCSBaseException("VCS: Could not process command %s" % e.cmd)
|
||||
|
||||
def get_cmd_output(self, args, **kwargs):
|
||||
args = [self.command] + args
|
||||
if "cwd" not in kwargs:
|
||||
kwargs["cwd"] = self.src_dir
|
||||
result = exec_command(args, **kwargs)
|
||||
result = proc.exec_command(args, **kwargs)
|
||||
if result["returncode"] == 0:
|
||||
return result["out"].strip()
|
||||
raise VCSBaseException(
|
||||
@@ -129,6 +131,28 @@ class VCSClientBase(object):
|
||||
class GitClient(VCSClientBase):
|
||||
|
||||
command = "git"
|
||||
_configured = False
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
self.configure()
|
||||
super(GitClient, self).__init__(*args, **kwargs)
|
||||
|
||||
@classmethod
|
||||
def configure(cls):
|
||||
if cls._configured:
|
||||
return True
|
||||
cls._configured = True
|
||||
try:
|
||||
result = proc.exec_command([cls.command, "--exec-path"])
|
||||
if result["returncode"] != 0:
|
||||
return False
|
||||
path = result["out"].strip()
|
||||
if path:
|
||||
proc.append_env_path("PATH", path)
|
||||
return True
|
||||
except subprocess.CalledProcessError:
|
||||
pass
|
||||
return False
|
||||
|
||||
def check_client(self):
|
||||
try:
|
||||
@@ -173,7 +197,7 @@ class GitClient(VCSClientBase):
|
||||
if self.tag:
|
||||
args += ["--branch", self.tag]
|
||||
args += [self.remote_url, self.src_dir]
|
||||
assert self.run_cmd(args)
|
||||
assert self.run_cmd(args, cwd=os.getcwd())
|
||||
if is_commit:
|
||||
assert self.run_cmd(["reset", "--hard", self.tag])
|
||||
return self.run_cmd(
|
||||
|
||||
@@ -94,7 +94,7 @@ class PlatformBase( # pylint: disable=too-many-instance-attributes,too-many-pub
|
||||
name = item
|
||||
version = "*"
|
||||
if "@" in item:
|
||||
name, version = item.split("@", 2)
|
||||
name, version = item.split("@", 1)
|
||||
spec = self.pm.ensure_spec(name)
|
||||
options = {"version": version.strip(), "optional": False}
|
||||
if spec.owner:
|
||||
@@ -203,6 +203,9 @@ class PlatformBase( # pylint: disable=too-many-instance-attributes,too-many-pub
|
||||
elif "nobuild" in targets and opts.get("type") != "framework":
|
||||
self.packages[name]["optional"] = True
|
||||
|
||||
def configure_debug_options(self, initial_debug_options, ide_data):
|
||||
raise NotImplementedError
|
||||
|
||||
def get_lib_storages(self):
|
||||
storages = {}
|
||||
for opts in (self.frameworks or {}).values():
|
||||
|
||||
@@ -20,6 +20,7 @@ from threading import Thread
|
||||
|
||||
from platformio import exception
|
||||
from platformio.compat import (
|
||||
PY2,
|
||||
WINDOWS,
|
||||
get_filesystem_encoding,
|
||||
get_locale_encoding,
|
||||
@@ -125,7 +126,9 @@ def exec_command(*args, **kwargs):
|
||||
result[s[3:]] = kwargs[s].get_buffer()
|
||||
|
||||
for k, v in result.items():
|
||||
if isinstance(result[k], bytes):
|
||||
if PY2 and isinstance(v, unicode): # pylint: disable=undefined-variable
|
||||
result[k] = v.encode()
|
||||
elif not PY2 and isinstance(result[k], bytes):
|
||||
try:
|
||||
result[k] = result[k].decode(
|
||||
get_locale_encoding() or get_filesystem_encoding()
|
||||
@@ -203,3 +206,11 @@ def where_is_program(program, envpath=None):
|
||||
return os.path.join(bin_dir, "%s.exe" % program)
|
||||
|
||||
return program
|
||||
|
||||
|
||||
def append_env_path(name, value):
|
||||
cur_value = os.environ.get(name) or ""
|
||||
if cur_value and value in cur_value.split(os.pathsep):
|
||||
return cur_value
|
||||
os.environ[name] = os.pathsep.join([cur_value, value])
|
||||
return os.environ[name]
|
||||
|
||||
@@ -358,12 +358,6 @@ class ProjectConfigBase(object):
|
||||
click.secho("Warning! %s" % warning, fg="yellow")
|
||||
return True
|
||||
|
||||
def remove_option(self, section, option):
|
||||
return self._parser.remove_option(section, option)
|
||||
|
||||
def remove_section(self, section):
|
||||
return self._parser.remove_section(section)
|
||||
|
||||
|
||||
class ProjectConfigDirsMixin(object):
|
||||
def _get_core_dir(self, exists=False):
|
||||
|
||||
@@ -301,7 +301,11 @@ def on_command():
|
||||
def on_exception(e):
|
||||
skip_conditions = [
|
||||
isinstance(e, cls)
|
||||
for cls in (IOError, exception.ReturnErrorCode, exception.UserSideException,)
|
||||
for cls in (
|
||||
IOError,
|
||||
exception.ReturnErrorCode,
|
||||
exception.UserSideException,
|
||||
)
|
||||
]
|
||||
if any(skip_conditions):
|
||||
return
|
||||
|
||||
@@ -19,7 +19,6 @@ import math
|
||||
import os
|
||||
import platform
|
||||
import re
|
||||
import sys
|
||||
import time
|
||||
from functools import wraps
|
||||
from glob import glob
|
||||
@@ -167,12 +166,9 @@ def get_mdns_services():
|
||||
try:
|
||||
import zeroconf
|
||||
except ImportError:
|
||||
from site import addsitedir
|
||||
from platformio.package.manager.core import get_core_package_dir
|
||||
from platformio.package.manager.core import inject_contrib_pysite
|
||||
|
||||
contrib_pysite_dir = get_core_package_dir("contrib-pysite")
|
||||
addsitedir(contrib_pysite_dir)
|
||||
sys.path.insert(0, contrib_pysite_dir)
|
||||
inject_contrib_pysite()
|
||||
import zeroconf # pylint: disable=import-outside-toplevel
|
||||
|
||||
class mDNSListener(object):
|
||||
|
||||
@@ -1066,6 +1066,8 @@ def update_project_examples():
|
||||
# Frameworks
|
||||
frameworks = []
|
||||
for framework in API_FRAMEWORKS:
|
||||
if framework["name"] not in framework_examples_md_lines:
|
||||
continue
|
||||
readme_dir = join(project_examples_dir, "frameworks", framework["name"])
|
||||
if not isdir(readme_dir):
|
||||
os.makedirs(readme_dir)
|
||||
|
||||
@@ -26,15 +26,25 @@ import click
|
||||
envvar="PIO_INSTALL_DEVPLATFORMS_IGNORE",
|
||||
help="Ignore names split by comma",
|
||||
)
|
||||
def main(desktop, ignore):
|
||||
@click.option(
|
||||
"--ownernames",
|
||||
envvar="PIO_INSTALL_DEVPLATFORMS_OWNERNAMES",
|
||||
help="Filter dev-platforms by ownernames (split by comma)",
|
||||
)
|
||||
def main(desktop, ignore, ownernames):
|
||||
platforms = json.loads(
|
||||
subprocess.check_output(
|
||||
["platformio", "platform", "search", "--json-output"]
|
||||
).decode()
|
||||
)
|
||||
ignore = [n.strip() for n in (ignore or "").split(",") if n.strip()]
|
||||
ownernames = [n.strip() for n in (ownernames or "").split(",") if n.strip()]
|
||||
for platform in platforms:
|
||||
skip = [not desktop and platform["forDesktop"], platform["name"] in ignore]
|
||||
skip = [
|
||||
not desktop and platform["forDesktop"],
|
||||
platform["name"] in ignore,
|
||||
ownernames and platform["ownername"] not in ownernames,
|
||||
]
|
||||
if any(skip):
|
||||
continue
|
||||
subprocess.check_call(["platformio", "platform", "install", platform["name"]])
|
||||
|
||||
2
setup.py
2
setup.py
@@ -52,7 +52,7 @@ setup(
|
||||
[">=2.7", "!=3.0.*", "!=3.1.*", "!=3.2.*", "!=3.3.*", "!=3.4.*"]
|
||||
),
|
||||
install_requires=install_requires,
|
||||
packages=find_packages() + ["scripts"],
|
||||
packages=find_packages(exclude=["tests.*", "tests"]) + ["scripts"],
|
||||
package_data={
|
||||
"platformio": [
|
||||
"ide/tpls/*/.*.tpl",
|
||||
|
||||
@@ -100,14 +100,21 @@ def test_account_register(
|
||||
|
||||
|
||||
def test_account_login(
|
||||
clirunner, validate_cliresult, isolated_pio_core,
|
||||
clirunner,
|
||||
validate_cliresult,
|
||||
isolated_pio_core,
|
||||
):
|
||||
result = clirunner.invoke(cmd_account, ["login", "-u", username, "-p", password],)
|
||||
result = clirunner.invoke(
|
||||
cmd_account,
|
||||
["login", "-u", username, "-p", password],
|
||||
)
|
||||
validate_cliresult(result)
|
||||
|
||||
|
||||
def test_account_summary(
|
||||
clirunner, validate_cliresult, isolated_pio_core,
|
||||
clirunner,
|
||||
validate_cliresult,
|
||||
isolated_pio_core,
|
||||
):
|
||||
result = clirunner.invoke(cmd_account, ["show", "--json-output", "--offline"])
|
||||
validate_cliresult(result)
|
||||
@@ -160,13 +167,21 @@ def test_account_summary(
|
||||
|
||||
|
||||
def test_account_token(clirunner, validate_cliresult, isolated_pio_core):
|
||||
result = clirunner.invoke(cmd_account, ["token", "--password", password,],)
|
||||
result = clirunner.invoke(
|
||||
cmd_account,
|
||||
[
|
||||
"token",
|
||||
"--password",
|
||||
password,
|
||||
],
|
||||
)
|
||||
validate_cliresult(result)
|
||||
assert "Personal Authentication Token:" in result.output
|
||||
token = result.output.strip().split(": ")[-1]
|
||||
|
||||
result = clirunner.invoke(
|
||||
cmd_account, ["token", "--password", password, "--json-output"],
|
||||
cmd_account,
|
||||
["token", "--password", password, "--json-output"],
|
||||
)
|
||||
validate_cliresult(result)
|
||||
json_result = json.loads(result.output.strip())
|
||||
@@ -177,7 +192,14 @@ def test_account_token(clirunner, validate_cliresult, isolated_pio_core):
|
||||
|
||||
clirunner.invoke(cmd_account, ["logout"])
|
||||
|
||||
result = clirunner.invoke(cmd_account, ["token", "--password", password,],)
|
||||
result = clirunner.invoke(
|
||||
cmd_account,
|
||||
[
|
||||
"token",
|
||||
"--password",
|
||||
password,
|
||||
],
|
||||
)
|
||||
assert result.exit_code > 0
|
||||
assert result.exception
|
||||
assert "You are not authorized! Please log in to PIO Account" in str(
|
||||
@@ -187,7 +209,8 @@ def test_account_token(clirunner, validate_cliresult, isolated_pio_core):
|
||||
os.environ["PLATFORMIO_AUTH_TOKEN"] = token
|
||||
|
||||
result = clirunner.invoke(
|
||||
cmd_account, ["token", "--password", password, "--json-output"],
|
||||
cmd_account,
|
||||
["token", "--password", password, "--json-output"],
|
||||
)
|
||||
validate_cliresult(result)
|
||||
json_result = json.loads(result.output.strip())
|
||||
@@ -197,7 +220,10 @@ def test_account_token(clirunner, validate_cliresult, isolated_pio_core):
|
||||
|
||||
os.environ.pop("PLATFORMIO_AUTH_TOKEN")
|
||||
|
||||
result = clirunner.invoke(cmd_account, ["login", "-u", username, "-p", password],)
|
||||
result = clirunner.invoke(
|
||||
cmd_account,
|
||||
["login", "-u", username, "-p", password],
|
||||
)
|
||||
validate_cliresult(result)
|
||||
|
||||
|
||||
@@ -205,7 +231,13 @@ def test_account_change_password(clirunner, validate_cliresult, isolated_pio_cor
|
||||
new_password = "Testpassword123"
|
||||
result = clirunner.invoke(
|
||||
cmd_account,
|
||||
["password", "--old-password", password, "--new-password", new_password,],
|
||||
[
|
||||
"password",
|
||||
"--old-password",
|
||||
password,
|
||||
"--new-password",
|
||||
new_password,
|
||||
],
|
||||
)
|
||||
validate_cliresult(result)
|
||||
assert "Password successfully changed!" in result.output
|
||||
@@ -213,13 +245,20 @@ def test_account_change_password(clirunner, validate_cliresult, isolated_pio_cor
|
||||
clirunner.invoke(cmd_account, ["logout"])
|
||||
|
||||
result = clirunner.invoke(
|
||||
cmd_account, ["login", "-u", username, "-p", new_password],
|
||||
cmd_account,
|
||||
["login", "-u", username, "-p", new_password],
|
||||
)
|
||||
validate_cliresult(result)
|
||||
|
||||
result = clirunner.invoke(
|
||||
cmd_account,
|
||||
["password", "--old-password", new_password, "--new-password", password,],
|
||||
[
|
||||
"password",
|
||||
"--old-password",
|
||||
new_password,
|
||||
"--new-password",
|
||||
password,
|
||||
],
|
||||
)
|
||||
validate_cliresult(result)
|
||||
|
||||
@@ -272,14 +311,20 @@ def test_account_update(
|
||||
link = link.replace("&", "&")
|
||||
session.get(link)
|
||||
|
||||
result = clirunner.invoke(cmd_account, ["show"],)
|
||||
result = clirunner.invoke(
|
||||
cmd_account,
|
||||
["show"],
|
||||
)
|
||||
assert result.exit_code > 0
|
||||
assert result.exception
|
||||
assert "You are not authorized! Please log in to PIO Account" in str(
|
||||
result.exception
|
||||
)
|
||||
|
||||
result = clirunner.invoke(cmd_account, ["login", "-u", username, "-p", password],)
|
||||
result = clirunner.invoke(
|
||||
cmd_account,
|
||||
["login", "-u", username, "-p", password],
|
||||
)
|
||||
validate_cliresult(result)
|
||||
|
||||
|
||||
@@ -317,7 +362,8 @@ def test_account_update(
|
||||
|
||||
def test_org_create(clirunner, validate_cliresult, isolated_pio_core):
|
||||
result = clirunner.invoke(
|
||||
cmd_org, ["create", "--email", email, "--displayname", display_name, orgname],
|
||||
cmd_org,
|
||||
["create", "--email", email, "--displayname", display_name, orgname],
|
||||
)
|
||||
validate_cliresult(result)
|
||||
|
||||
@@ -405,13 +451,21 @@ def test_org_update(clirunner, validate_cliresult, isolated_pio_core):
|
||||
def test_team_create(clirunner, validate_cliresult, isolated_pio_core):
|
||||
result = clirunner.invoke(
|
||||
cmd_team,
|
||||
["create", "%s:%s" % (orgname, teamname), "--description", team_description,],
|
||||
[
|
||||
"create",
|
||||
"%s:%s" % (orgname, teamname),
|
||||
"--description",
|
||||
team_description,
|
||||
],
|
||||
)
|
||||
validate_cliresult(result)
|
||||
|
||||
|
||||
def test_team_list(clirunner, validate_cliresult, isolated_pio_core):
|
||||
result = clirunner.invoke(cmd_team, ["list", "%s" % orgname, "--json-output"],)
|
||||
result = clirunner.invoke(
|
||||
cmd_team,
|
||||
["list", "%s" % orgname, "--json-output"],
|
||||
)
|
||||
validate_cliresult(result)
|
||||
json_result = json.loads(result.output.strip())
|
||||
for item in json_result:
|
||||
@@ -423,22 +477,30 @@ def test_team_list(clirunner, validate_cliresult, isolated_pio_core):
|
||||
|
||||
def test_team_add_member(clirunner, validate_cliresult, isolated_pio_core):
|
||||
result = clirunner.invoke(
|
||||
cmd_team, ["add", "%s:%s" % (orgname, teamname), second_username],
|
||||
cmd_team,
|
||||
["add", "%s:%s" % (orgname, teamname), second_username],
|
||||
)
|
||||
validate_cliresult(result)
|
||||
|
||||
result = clirunner.invoke(cmd_team, ["list", "%s" % orgname, "--json-output"],)
|
||||
result = clirunner.invoke(
|
||||
cmd_team,
|
||||
["list", "%s" % orgname, "--json-output"],
|
||||
)
|
||||
validate_cliresult(result)
|
||||
assert second_username in result.output
|
||||
|
||||
|
||||
def test_team_remove(clirunner, validate_cliresult, isolated_pio_core):
|
||||
result = clirunner.invoke(
|
||||
cmd_team, ["remove", "%s:%s" % (orgname, teamname), second_username],
|
||||
cmd_team,
|
||||
["remove", "%s:%s" % (orgname, teamname), second_username],
|
||||
)
|
||||
validate_cliresult(result)
|
||||
|
||||
result = clirunner.invoke(cmd_team, ["list", "%s" % orgname, "--json-output"],)
|
||||
result = clirunner.invoke(
|
||||
cmd_team,
|
||||
["list", "%s" % orgname, "--json-output"],
|
||||
)
|
||||
validate_cliresult(result)
|
||||
assert second_username not in result.output
|
||||
|
||||
@@ -459,7 +521,10 @@ def test_team_update(clirunner, validate_cliresult, receive_email, isolated_pio_
|
||||
)
|
||||
validate_cliresult(result)
|
||||
|
||||
result = clirunner.invoke(cmd_team, ["list", "%s" % orgname, "--json-output"],)
|
||||
result = clirunner.invoke(
|
||||
cmd_team,
|
||||
["list", "%s" % orgname, "--json-output"],
|
||||
)
|
||||
validate_cliresult(result)
|
||||
json_result = json.loads(result.output.strip())
|
||||
for item in json_result:
|
||||
|
||||
@@ -410,6 +410,22 @@ check_tool = pvs-studio
|
||||
assert style == 0
|
||||
|
||||
|
||||
def test_check_pvs_studio_fails_without_license(clirunner, tmpdir):
|
||||
config = DEFAULT_CONFIG + "\ncheck_tool = pvs-studio"
|
||||
|
||||
tmpdir.join("platformio.ini").write(config)
|
||||
tmpdir.mkdir("src").join("main.c").write(TEST_CODE)
|
||||
|
||||
default_result = clirunner.invoke(cmd_check, ["--project-dir", str(tmpdir)])
|
||||
verbose_result = clirunner.invoke(cmd_check, ["--project-dir", str(tmpdir), "-v"])
|
||||
|
||||
assert default_result.exit_code != 0
|
||||
assert "failed to perform check" in default_result.output.lower()
|
||||
|
||||
assert verbose_result.exit_code != 0
|
||||
assert "license was not entered" in verbose_result.output.lower()
|
||||
|
||||
|
||||
def test_check_embedded_platform_all_tools(clirunner, validate_cliresult, tmpdir):
|
||||
config = """
|
||||
[env:test]
|
||||
@@ -446,7 +462,10 @@ int main() {
|
||||
result = clirunner.invoke(cmd_check, ["--project-dir", str(tmpdir)])
|
||||
validate_cliresult(result)
|
||||
defects = sum(count_defects(result.output))
|
||||
assert defects > 0, "Failed %s with %s" % (framework, tool,)
|
||||
assert defects > 0, "Failed %s with %s" % (
|
||||
framework,
|
||||
tool,
|
||||
)
|
||||
|
||||
|
||||
def test_check_skip_includes_from_packages(clirunner, validate_cliresult, tmpdir):
|
||||
|
||||
@@ -236,7 +236,9 @@ def test_global_lib_update_check(clirunner, validate_cliresult):
|
||||
result = clirunner.invoke(cmd_lib, ["-g", "update", "--dry-run", "--json-output"])
|
||||
validate_cliresult(result)
|
||||
output = json.loads(result.output)
|
||||
assert set(["ESPAsyncTCP", "NeoPixelBus"]) == set(lib["name"] for lib in output)
|
||||
assert set(["Adafruit PN532", "ESPAsyncTCP", "NeoPixelBus"]) == set(
|
||||
lib["name"] for lib in output
|
||||
)
|
||||
|
||||
|
||||
def test_global_lib_update(clirunner, validate_cliresult):
|
||||
@@ -256,7 +258,7 @@ def test_global_lib_update(clirunner, validate_cliresult):
|
||||
result = clirunner.invoke(cmd_lib, ["-g", "update"])
|
||||
validate_cliresult(result)
|
||||
assert result.output.count("[Detached]") == 1
|
||||
assert result.output.count("[Up-to-date]") == 15
|
||||
assert result.output.count("[Up-to-date]") == 14
|
||||
|
||||
# update unknown library
|
||||
result = clirunner.invoke(cmd_lib, ["-g", "update", "Unknown"])
|
||||
@@ -339,14 +341,17 @@ def test_lib_stats(clirunner, validate_cliresult):
|
||||
|
||||
result = clirunner.invoke(cmd_lib, ["stats", "--json-output"])
|
||||
validate_cliresult(result)
|
||||
assert set(
|
||||
[
|
||||
"dlweek",
|
||||
"added",
|
||||
"updated",
|
||||
"topkeywords",
|
||||
"dlmonth",
|
||||
"dlday",
|
||||
"lastkeywords",
|
||||
]
|
||||
) == set(json.loads(result.output).keys())
|
||||
assert (
|
||||
set(
|
||||
[
|
||||
"dlweek",
|
||||
"added",
|
||||
"updated",
|
||||
"topkeywords",
|
||||
"dlmonth",
|
||||
"dlday",
|
||||
"lastkeywords",
|
||||
]
|
||||
)
|
||||
== set(json.loads(result.output).keys())
|
||||
)
|
||||
|
||||
@@ -60,7 +60,8 @@ def test_install_unknown_from_registry(clirunner):
|
||||
|
||||
def test_install_core_3_dev_platform(clirunner, validate_cliresult, isolated_pio_core):
|
||||
result = clirunner.invoke(
|
||||
cli_platform.platform_install, ["atmelavr@1.2.0", "--skip-default-package"],
|
||||
cli_platform.platform_install,
|
||||
["atmelavr@1.2.0", "--skip-default-package"],
|
||||
)
|
||||
assert result.exit_code == 0
|
||||
|
||||
|
||||
@@ -34,9 +34,7 @@ def test_local_env():
|
||||
if result["returncode"] != 1:
|
||||
pytest.fail(str(result))
|
||||
# pylint: disable=unsupported-membership-test
|
||||
assert all([s in result["err"] for s in ("PASSED", "IGNORED", "FAILED")]), result[
|
||||
"out"
|
||||
]
|
||||
assert all([s in result["err"] for s in ("PASSED", "FAILED")]), result["out"]
|
||||
|
||||
|
||||
def test_multiple_env_build(clirunner, validate_cliresult, tmpdir):
|
||||
@@ -77,7 +75,8 @@ void loop() {}
|
||||
)
|
||||
|
||||
result = clirunner.invoke(
|
||||
cmd_test, ["-d", str(project_dir), "--without-testing", "--without-uploading"],
|
||||
cmd_test,
|
||||
["-d", str(project_dir), "--without-testing", "--without-uploading"],
|
||||
)
|
||||
|
||||
validate_cliresult(result)
|
||||
@@ -127,7 +126,8 @@ int main() {
|
||||
)
|
||||
|
||||
native_result = clirunner.invoke(
|
||||
cmd_test, ["-d", str(project_dir), "-e", "native"],
|
||||
cmd_test,
|
||||
["-d", str(project_dir), "-e", "native"],
|
||||
)
|
||||
|
||||
test_dir.join("unittest_transport.h").write(
|
||||
|
||||
@@ -21,6 +21,7 @@ import pytest
|
||||
import semantic_version
|
||||
|
||||
from platformio import fs, util
|
||||
from platformio.compat import PY2
|
||||
from platformio.package.exception import (
|
||||
MissingPackageManifestError,
|
||||
UnknownPackageError,
|
||||
@@ -144,6 +145,7 @@ def test_build_metadata(isolated_pio_core, tmpdir_factory):
|
||||
assert metadata.version.build[1] == vcs_revision
|
||||
|
||||
|
||||
@pytest.mark.skipif(PY2, reason="Requires Python 3.5 or higher")
|
||||
def test_install_from_url(isolated_pio_core, tmpdir_factory):
|
||||
tmp_dir = tmpdir_factory.mktemp("tmp")
|
||||
storage_dir = tmpdir_factory.mktemp("storage")
|
||||
@@ -230,6 +232,41 @@ def test_install_from_registry(isolated_pio_core, tmpdir_factory):
|
||||
tm.install("owner/unknown-package-tool", silent=True)
|
||||
|
||||
|
||||
def test_install_lib_depndencies(isolated_pio_core, tmpdir_factory):
|
||||
tmp_dir = tmpdir_factory.mktemp("tmp")
|
||||
|
||||
src_dir = tmp_dir.join("lib-with-deps").mkdir()
|
||||
root_dir = src_dir.mkdir("root")
|
||||
root_dir.mkdir("src").join("main.cpp").write("#include <stdio.h>")
|
||||
root_dir.join("library.json").write(
|
||||
"""
|
||||
{
|
||||
"name": "lib-with-deps",
|
||||
"version": "2.0.0",
|
||||
"dependencies": [
|
||||
{
|
||||
"owner": "bblanchon",
|
||||
"name": "ArduinoJson",
|
||||
"version": "^6.16.1"
|
||||
},
|
||||
{
|
||||
"name": "external-repo",
|
||||
"version": "https://github.com/milesburton/Arduino-Temperature-Control-Library.git#4a0ccc1"
|
||||
}
|
||||
]
|
||||
}
|
||||
"""
|
||||
)
|
||||
|
||||
lm = LibraryPackageManager(str(tmpdir_factory.mktemp("lib-storage")))
|
||||
lm.install("file://%s" % str(src_dir), silent=True)
|
||||
installed = lm.get_installed()
|
||||
assert len(installed) == 4
|
||||
assert set(["external-repo", "ArduinoJson", "lib-with-deps", "OneWire"]) == set(
|
||||
p.metadata.name for p in installed
|
||||
)
|
||||
|
||||
|
||||
def test_install_force(isolated_pio_core, tmpdir_factory):
|
||||
lm = LibraryPackageManager(str(tmpdir_factory.mktemp("lib-storage")))
|
||||
# install #64 ArduinoJson
|
||||
|
||||
@@ -44,7 +44,7 @@ def test_library_json_parser():
|
||||
"dependencies": {
|
||||
"deps1": "1.2.0",
|
||||
"deps2": "https://github.com/username/package.git",
|
||||
"@owner/deps3": "^2.1.3"
|
||||
"owner/deps3": "^2.1.3"
|
||||
},
|
||||
"customField": "Custom Value"
|
||||
}
|
||||
@@ -65,9 +65,9 @@ def test_library_json_parser():
|
||||
"homepage": "http://old.url.format",
|
||||
"build": {"flags": ["-DHELLO"]},
|
||||
"dependencies": [
|
||||
{"name": "@owner/deps3", "version": "^2.1.3"},
|
||||
{"name": "deps1", "version": "1.2.0"},
|
||||
{"name": "deps2", "version": "https://github.com/username/package.git"},
|
||||
{"owner": "owner", "name": "deps3", "version": "^2.1.3"},
|
||||
],
|
||||
"customField": "Custom Value",
|
||||
},
|
||||
@@ -83,7 +83,7 @@ def test_library_json_parser():
|
||||
},
|
||||
"dependencies": [
|
||||
{"name": "deps1", "version": "1.0.0"},
|
||||
{"name": "@owner/deps2", "version": "1.0.0", "platforms": "*", "frameworks": "arduino, espidf"},
|
||||
{"owner": "owner", "name": "deps2", "version": "1.0.0", "platforms": "*", "frameworks": "arduino, espidf"},
|
||||
{"name": "deps3", "version": "1.0.0", "platforms": ["ststm32", "sifive"]}
|
||||
]
|
||||
}
|
||||
@@ -98,13 +98,14 @@ def test_library_json_parser():
|
||||
"export": {"exclude": ["audio_samples"]},
|
||||
"platforms": ["atmelavr"],
|
||||
"dependencies": [
|
||||
{"name": "deps1", "version": "1.0.0"},
|
||||
{
|
||||
"name": "@owner/deps2",
|
||||
"owner": "owner",
|
||||
"name": "deps2",
|
||||
"version": "1.0.0",
|
||||
"platforms": ["*"],
|
||||
"frameworks": ["arduino", "espidf"],
|
||||
},
|
||||
{"name": "deps1", "version": "1.0.0"},
|
||||
{
|
||||
"name": "deps3",
|
||||
"version": "1.0.0",
|
||||
@@ -115,16 +116,16 @@ def test_library_json_parser():
|
||||
)
|
||||
|
||||
raw_data = parser.LibraryJsonManifestParser(
|
||||
'{"dependencies": ["dep1", "dep2", "@owner/dep3"]}'
|
||||
'{"dependencies": ["dep1", "dep2", "owner/dep3@1.2.3"]}'
|
||||
).as_dict()
|
||||
raw_data["dependencies"] = sorted(raw_data["dependencies"], key=lambda a: a["name"])
|
||||
assert not jsondiff.diff(
|
||||
raw_data,
|
||||
{
|
||||
"dependencies": [
|
||||
{"name": "@owner/dep3"},
|
||||
{"name": "dep1"},
|
||||
{"name": "dep2"},
|
||||
{"name": "owner/dep3@1.2.3"},
|
||||
],
|
||||
},
|
||||
)
|
||||
|
||||
@@ -90,12 +90,13 @@ def test_spec_local_urls(tmpdir_factory):
|
||||
assert PackageSpec("file:///tmp/some-lib/") == PackageSpec(
|
||||
url="file:///tmp/some-lib/", name="some-lib"
|
||||
)
|
||||
assert PackageSpec("file:///tmp/foo.tar.gz@~2.3.0-beta.1") == PackageSpec(
|
||||
url="file:///tmp/foo.tar.gz", name="foo", requirements="~2.3.0-beta.1"
|
||||
# detached package
|
||||
assert PackageSpec("file:///tmp/some-lib@src-67e1043a673d2") == PackageSpec(
|
||||
url="file:///tmp/some-lib@src-67e1043a673d2", name="some-lib"
|
||||
)
|
||||
# detached folder with "@" symbol
|
||||
# detached folder without scheme
|
||||
pkg_dir = tmpdir_factory.mktemp("storage").join("detached@1.2.3").mkdir()
|
||||
assert PackageSpec("file://%s" % str(pkg_dir)) == PackageSpec(
|
||||
assert PackageSpec(str(pkg_dir)) == PackageSpec(
|
||||
name="detached", url="file://%s" % pkg_dir
|
||||
)
|
||||
|
||||
|
||||
@@ -19,10 +19,12 @@ import tarfile
|
||||
import pytest
|
||||
|
||||
from platformio import fs
|
||||
from platformio.compat import WINDOWS
|
||||
from platformio.compat import PY2, WINDOWS
|
||||
from platformio.package.exception import UnknownManifestError
|
||||
from platformio.package.pack import PackagePacker
|
||||
|
||||
pytestmark = pytest.mark.skipif(PY2, reason="Requires Python 3.5 or higher")
|
||||
|
||||
|
||||
def test_base(tmpdir_factory):
|
||||
pkg_dir = tmpdir_factory.mktemp("package")
|
||||
|
||||
@@ -12,10 +12,9 @@
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import os
|
||||
import random
|
||||
from glob import glob
|
||||
from os import listdir, walk
|
||||
from os.path import basename, dirname, getsize, isdir, isfile, join, normpath
|
||||
|
||||
import pytest
|
||||
|
||||
@@ -32,24 +31,26 @@ def pytest_generate_tests(metafunc):
|
||||
examples_dirs = []
|
||||
|
||||
# repo examples
|
||||
examples_dirs.append(normpath(join(dirname(__file__), "..", "examples")))
|
||||
examples_dirs.append(
|
||||
os.path.normpath(os.path.join(os.path.dirname(__file__), "..", "examples"))
|
||||
)
|
||||
|
||||
# dev/platforms
|
||||
for pkg in PlatformPackageManager().get_installed():
|
||||
p = PlatformFactory.new(pkg)
|
||||
examples_dir = join(p.get_dir(), "examples")
|
||||
assert isdir(examples_dir)
|
||||
examples_dirs.append(examples_dir)
|
||||
examples_dir = os.path.join(p.get_dir(), "examples")
|
||||
if os.path.isdir(examples_dir):
|
||||
examples_dirs.append(examples_dir)
|
||||
|
||||
project_dirs = []
|
||||
for examples_dir in examples_dirs:
|
||||
candidates = {}
|
||||
for root, _, files in walk(examples_dir):
|
||||
for root, _, files in os.walk(examples_dir):
|
||||
if "platformio.ini" not in files or ".skiptest" in files:
|
||||
continue
|
||||
if "zephyr-" in root and PY2:
|
||||
continue
|
||||
group = basename(root)
|
||||
group = os.path.basename(root)
|
||||
if "-" in group:
|
||||
group = group.split("-", 1)[0]
|
||||
if group not in candidates:
|
||||
@@ -67,7 +68,7 @@ def test_run(pioproject_dir):
|
||||
with fs.cd(pioproject_dir):
|
||||
config = ProjectConfig()
|
||||
build_dir = config.get_optional_dir("build")
|
||||
if isdir(build_dir):
|
||||
if os.path.isdir(build_dir):
|
||||
fs.rmtree(build_dir)
|
||||
|
||||
env_names = config.envs()
|
||||
@@ -77,18 +78,18 @@ def test_run(pioproject_dir):
|
||||
if result["returncode"] != 0:
|
||||
pytest.fail(str(result))
|
||||
|
||||
assert isdir(build_dir)
|
||||
assert os.path.isdir(build_dir)
|
||||
|
||||
# check .elf file
|
||||
for item in listdir(build_dir):
|
||||
if not isdir(item):
|
||||
for item in os.listdir(build_dir):
|
||||
if not os.path.isdir(item):
|
||||
continue
|
||||
assert isfile(join(build_dir, item, "firmware.elf"))
|
||||
assert os.path.isfile(os.path.join(build_dir, item, "firmware.elf"))
|
||||
# check .hex or .bin files
|
||||
firmwares = []
|
||||
for ext in ("bin", "hex"):
|
||||
firmwares += glob(join(build_dir, item, "firmware*.%s" % ext))
|
||||
firmwares += glob(os.path.join(build_dir, item, "firmware*.%s" % ext))
|
||||
if not firmwares:
|
||||
pytest.fail("Missed firmware file")
|
||||
for firmware in firmwares:
|
||||
assert getsize(firmware) > 0
|
||||
assert os.path.getsize(firmware) > 0
|
||||
|
||||
@@ -91,7 +91,7 @@ def test_check_and_update_libraries(clirunner, isolated_pio_core, validate_clire
|
||||
assert "There are the new updates for libraries (ArduinoJson)" in result.output
|
||||
assert "Please wait while updating libraries" in result.output
|
||||
assert re.search(
|
||||
r"Updating bblanchon/ArduinoJson\s+6\.12\.0\s+\[Outdated [\d\.]+\]",
|
||||
r"Updating bblanchon/ArduinoJson\s+6\.12\.0\s+\[Updating to [\d\.]+\]",
|
||||
result.output,
|
||||
)
|
||||
|
||||
@@ -143,7 +143,9 @@ def test_check_and_update_platforms(clirunner, isolated_pio_core, validate_clire
|
||||
validate_cliresult(result)
|
||||
assert "There are the new updates for platforms (native)" in result.output
|
||||
assert "Please wait while updating platforms" in result.output
|
||||
assert re.search(r"Updating native\s+0.0.0\s+\[Outdated [\d\.]+\]", result.output)
|
||||
assert re.search(
|
||||
r"Updating native\s+0.0.0\s+\[Updating to [\d\.]+\]", result.output
|
||||
)
|
||||
|
||||
# check updated version
|
||||
result = clirunner.invoke(cli_pio, ["platform", "list", "--json-output"])
|
||||
|
||||
Reference in New Issue
Block a user