diff --git a/.gitignore b/.gitignore index 07c7b427..ceeaed9a 100644 --- a/.gitignore +++ b/.gitignore @@ -1,6 +1,6 @@ *.egg-info *.pyc -.pioenvs +__pycache__ .tox docs/_build dist diff --git a/HISTORY.rst b/HISTORY.rst index c6cd902b..44821cb2 100644 --- a/HISTORY.rst +++ b/HISTORY.rst @@ -1,301 +1,114 @@ Release Notes ============= +.. |PIOCONF| replace:: `"platformio.ini" `__ configuration file +.. |LDF| replace:: `LDF `__ + +.. _release_notes_6: + +PlatformIO Core 6 +----------------- + +**A professional collaborative platform for declarative, safety-critical, and test-driven embedded development.** + +6.0.0 (2022-05-16) +~~~~~~~~~~~~~~~~~~ + +Please check the `Migration guide from 5.x to 6.0 `__. + +* **Package Management** + + - New unified Package Management CLI (``pio pkg``): + + * `pio pkg exec `_ - run command from package tool (`issue #4163 `_) + * `pio pkg install `_ - install the project dependencies or custom packages + * `pio pkg list `__ - list installed packages + * `pio pkg outdated `__ - check for project outdated packages + * `pio pkg search `__ - search for packages + * `pio pkg show `__ - show package information + * `pio pkg uninstall `_ - uninstall the project dependencies or custom packages + * `pio pkg update `__ - update the project dependencies or custom packages + + - Package Manifest + + * Added support for `"scripts" `__ (`issue #485 `_) + * Added support for `multi-licensed `__ packages using SPDX Expressions (`issue #4037 `_) + * Added support for `"dependencies" `__ declared in a "tool" package manifest + + - Added support for `symbolic links `__ allowing pointing the local source folder to the Package Manager (`issue #3348 `_) + - Automatically install dependencies of the local (private) project libraries (`issue #2910 `_) + - Improved detection of a package type from the tarball archive (`issue #3828 `_) + - Ignore files according to the patterns declared in ".gitignore" when using the `pio package pack `__ command (`issue #4188 `_) + - Dropped automatic updates of global libraries and development platforms (`issue #4179 `_) + - Dropped support for the "pythonPackages" field in "platform.json" manifest in favor of `Extra Python Dependencies `__ + - Fixed an issue when manually removed dependencies from the |PIOCONF| were not uninstalled from the storage (`issue #3076 `_) + +* **Unit Testing** + + - Refactored from scratch `Unit Testing `_ solution and its documentation + - New: `Test Hierarchy `_ (`issue #4135 `_) + - New: `Doctest `__ testing framework (`issue #4240 `_) + - New: `GoogleTest `__ testing and mocking framework (`issue #3572 `_) + - New: `Semihosting `__ (`issue #3516 `_) + - New: Hardware `Simulators `__ for Unit Testing (QEMU, Renode, SimAVR, and custom solutions) + - New: ``test`` `build configuration `__ + - Added support for a `custom testing framework `_ + - Added support for a custom `testing command `__ + - Added support for a `custom Unity library `__ (`issue #3980 `_) + - Added support for the ``socket://`` and ``rfc2217://`` protocols using `test_port `__ option (`issue #4229 `_) + - List available project tests with a new `pio test --list-tests `__ option + - Pass extra arguments to the testing program with a new `pio test --program-arg `__ option (`issue #3132 `_) + - Generate reports in JUnit and JSON formats using the `pio test `__ command (`issue #2891 `_) + - Provide more information when the native program crashed on a host (errored with a non-zero return code) (`issue #3429 `_) + - Improved automatic detection of a testing serial port (`issue #4076 `_) + - Fixed an issue when command line parameters (``--ignore``, ``--filter``) do not override values defined in the |PIOCONF| (`issue #3845 `_) + - Renamed the "test_build_project_src" project configuration option to the `test_build_src `__ + - Removed the "test_transport" option in favor of the `Custom "unity_config.h" `_ + +* **Static Code Analysis** + + - Updated analysis tools: + + * `Cppcheck `__ v2.7 with various checker improvements and fixed false positives + * `PVS-Studio `__ v7.18 with improved and updated semantic analysis system + + - Added support for the custom `Clang-Tidy `__ configuration file (`issue #4186 `_) + - Added ability to override a tool version using the `platform_packages `__ option (`issue #3798 `_) + - Fixed an issue with improper handling of defects that don't specify a source file (`issue #4237 `_) + +* **Build System** + + - Show project dependency licenses when building in the verbose mode + - Fixed an issue when |LDF| ignores the project `lib_deps `__ while resolving library dependencies (`issue #3598 `_) + - Fixed an issue with calling an extra script located outside a project (`issue #4220 `_) + - Fixed an issue when GCC preprocessor was applied to the ".s" assembly files on case-sensitive OS such as Window OS (`issue #3917 `_) + - Fixed an issue when |LDF| ignores `build_src_flags `__ in the "deep+" mode (`issue #4253 `_) + +* **Integration** + + - Added a new build variable (``COMPILATIONDB_INCLUDE_TOOLCHAIN``) to include toolchain paths in the compilation database (`issue #3735 `_) + - Changed a default path for compilation database `compile_commands.json `__ to the project root + - Enhanced integration for Qt Creator (`issue #3046 `_) + +* **Project Configuration** + + - Extended `Interpolation of Values `__ with ``${this}`` pattern (`issue #3953 `_) + - Embed environment name of the current section in the |PIOCONF| using ``${this.__env__}`` pattern + - Renamed the "src_build_flags" project configuration option to the `build_src_flags `__ + - Renamed the "src_filter" project configuration option to the `build_src_filter `__ + +* **Miscellaneous** + + - Pass extra arguments to the `native `__ program with a new `pio run --program-arg `__ option (`issue #4246 `_) + - Improved PIO Remote setup on credit-card sized computers (Raspberry Pi, BeagleBon, etc) (`issue #3865 `_) + - Finally removed all tracks to the Python 2.7, the Python 3.6 is the minimum supported version. + .. _release_notes_5: PlatformIO Core 5 ----------------- -**A professional collaborative platform for embedded development** - -5.2.5 (2022-02-10) -~~~~~~~~~~~~~~~~~~ - -- Improved support for private packages in `PlatformIO Registry `__ -- Improved checking of available Internet connection for IPv6-only workstations (`pull #4151 `_) -- Better detecting of default PlatformIO project directory on Linux OS (`pull #4158 `_) -- Respect disabling debugging server from "platformio.ini" passing an empty value to the `debug_server `__ option -- Fixed a "module 'asyncio' has no attribute 'run'" error when launching PIO Home using Python 3.6 (`issue #4169 `_) - -5.2.4 (2021-12-15) -~~~~~~~~~~~~~~~~~~ - -- Added support for a new ``headers`` field in `library.json `__ (declare a list of header files that can be included in a project source files using ``#include <...>`` directive) -- Improved tab completion support for Bash, ZSH, and Fish shells (`issue #4114 `_) -- Improved support for projects located on a network share (`issue #3417 `_, `issue #3926 `_, `issue #4099 `_) -- Improved PIO Remote setup on credit-card sized computers (Raspberry Pi, BeagleBon, etc) (`issue #3865 `_) -- Upgraded build engine to the SCons 4.3 (`release notes `__) -- Fixed an issue with the CLion project generator when a macro contains a space (`issue #4102 `_) -- Fixed an issue with the NetBeans project generator when the path to PlatformIO contains a space (`issue #4096 `_) -- Fixed an issue when the system environment variable does not override a project configuration option (`issue #4125 `_) -- Fixed an issue when referencing ``*_dir`` option from a custom project configuration environment (`issue #4110 `_) -- Fixed an issue with the CLion template that generated a broken CMake file if user's home directory contained an unescaped backslash (`issue #4071 `_) -- Fixed an issue with wrong detecting Windows architecture when Python 32bit is used (`issue #4134 `_) - -5.2.3 (2021-11-05) -~~~~~~~~~~~~~~~~~~ - -- Automatically synchronize active projects between IDE and `PlatformIO Home `__ -- Added support for custom `device monitor filters `__ (`issue #3924 `_) -- Show human-readable message when infinite recursion is detected while processing `Interpolation of Values `__ (`issue #3883 `_) -- Improved directory interpolation (``${platformio.***_dir}``) in `"platformio.ini" `__ configuration file (`issue #3934 `_) -- Ignore resolving of SCons variables (e.g., ``${(SOURCE.get_abspath())}``) when preprocessing interpolations (`issue #3933 `_) -- Added "inc" as a sign that it's the root of the library (`issue #4093 `_) -- Fixed an issue when the ``$PROJECT_DIR`` variable was not properly replaced in the `debug_server `__ option (`issue #4086 `_) -- Fixed an issue when `PIO Remote `__ device monitor crashes on the first keypress (`issue #3832 `_) -- Fixed "Do not know how to make File target 'debug'" issue when debugging project using `CLion IDE `__ (`pull #4089 `_) -- Fixed "UnicodeEncodeError" when a build output contains non-ASCII characters (`issue #3971 `_) -- Fixed an issue when VSCode's debugger does not the honor default environment (`issue #4098 `_) - -5.2.2 (2021-10-20) -~~~~~~~~~~~~~~~~~~ - -- Override debugging firmware loading mode using ``--load-mode`` option for `pio debug `__ command -- Added support for CLion IDE 2021.3 (`pull #4085 `_) -- Removed debugging "legacy Click" message from CLI (`issue #4083 `_) -- Fixed a "TypeError: sequence item 1: expected str instance, list found" issue when extending configuration option in `"platformio.ini" `__ with the multi-line default value (`issue #4082 `_) - -5.2.1 (2021-10-11) -~~~~~~~~~~~~~~~~~~ - -- Clean a build environment and installed library dependencies using a new ``cleanall`` target (`issue #4062 `_) -- Override a default library builder via a new ``builder`` field in a ``build`` group of `library.json `__ manifest (`issue #3957 `_) -- Updated `Cppcheck `__ v2.6 with new checks, increased reliability of advanced addons (MISRA/CERT) and various improvements -- Handle the "test" folder as a part of CLion project (`issue #4005 `_) -- Improved handling of a library root based on "Conan" or "CMake" build systems (`issue #3887 `_) -- Fixed a "KeyError: Invalid board option 'build.cpu'" when using a precompiled library with a board that does not have a CPU field in the manifest (`issue #4056 `_) -- Fixed a "FileExist" error when the `platformio ci `__ command is used in pair with the ``--keep-build-dir`` option (`issue #4011 `_) -- Fixed an issue with draft values of C++ language standards that broke static analysis via Cppcheck (`issue #3944 `_) - -5.2.0 (2021-09-13) -~~~~~~~~~~~~~~~~~~ - -* **PlatformIO Debugging** - - - Boosted `PlatformIO Debugging `__ performance thanks to migrating the codebase to the pure Python 3 Asynchronous I/O stack - - `Debug unit tests `__ created with `PlatformIO Unit Testing `__ solution (`issue #948 `_) - - Debug native (desktop) applications on a host machine (`issue #980 `_) - - Support debugging on Windows using Windows CMD/CLI (`pio debug `__) (`issue #3793 `_) - - Configure a custom pattern to determine when debugging server is started with a new `debug_server_ready_pattern `__ option - - Fixed an issue with silent hanging when a custom debug server is not found (`issue #3756 `_) - -* **Package Management** - - - Improved a package publishing process: - - * Show package details - * Check for conflicting names in the PlatformIO Trusted Registry - * Check for duplicates and used version - * Validate package manifest - - - Added a new option ``--non-interactive`` to `pio package publish `__ command - -* **Build System** - - - Process "precompiled" and "ldflags" properties of the "library.properties" manifest (`issue #3994 `_) - - Upgraded build engine to the SCons 4.2 (`release notes `__) - - Fixed an issue with broken binary file extension when a custom ``PROGNAME`` contains dot symbols (`issue #3906 `_) - - Fixed an issue when PlatformIO archives a library that does not contain C/C++ source files (`issue #4019 `_) - -* **Static Code Analysis** - - - Updated analysis tools: - - * `Clang-Tidy `__ v12.0.1 with new modules and extended checks list - * `Cppcheck `__ v2.5.0 with improved code analysis and MISRA improvements - * `PVS-Studio `__ v7.14 with support for intermodular analysis, improved MISRA support and new diagnostics - -* **Miscellaneous** - - - Ensure that a serial port is ready before running unit tests on a remote target (`issue #3742 `_) - - Fixed an error "Unknown development platform" when running unit tests on a clean machine (`issue #3901 `_) - - Fixed an issue when "main.cpp" was generated for a new project for 8-bit development platforms (`issue #3872 `_) - -5.1.1 (2021-03-17) -~~~~~~~~~~~~~~~~~~ - -* Fixed a "The command line is too long" issue with a linking process on Windows (`issue #3827 `_) -* Fixed an issue with `device monitor `__ when the "send_on_enter" filter didn't send EOL chars (`issue #3787 `_) -* Fixed an issue with silent mode when unwanted data is printed to stdout (`issue #3837 `_) -* Fixed an issue when code inspection fails with "Bad JSON" (`issue #3790 `_) -* Fixed an issue with overriding user-specified debugging configuration information in VSCode (`issue #3824 `_) - -5.1.0 (2021-01-28) -~~~~~~~~~~~~~~~~~~ - -* **PlatformIO Home** - - - Boosted `PlatformIO Home `__ performance thanks to migrating the codebase to the pure Python 3 Asynchronous I/O stack - - Added a new ``--session-id`` option to `pio home `__ command that helps to keep PlatformIO Home isolated from other instances and protect from 3rd party access (`issue #3397 `_) - -* **Build System** - - - Upgraded build engine to the SCons 4.1 (`release notes `_) - - Refactored a workaround for a maximum command line character limitation (`issue #3792 `_) - - Fixed an issue with Python 3.8+ on Windows when a network drive is used (`issue #3417 `_) - -* **Package Management** - - - New options for `pio system prune `__ command: - - + ``--dry-run`` option to show data that will be removed - + ``--core-packages`` option to remove unnecessary core packages - + ``--platform-packages`` option to remove unnecessary development platform packages (`issue #923 `_) - - - Added new `check_prune_system_threshold `__ setting - - Disabled automatic removal of unnecessary development platform packages (`issue #3708 `_, `issue #3770 `_) - - Fixed an issue when unnecessary packages were removed in ``update --dry-run`` mode (`issue #3809 `_) - - Fixed a "ValueError: Invalid simple block" when uninstalling a package with a custom name and external source (`issue #3816 `_) - -* **Debugging** - - - Configure a custom debug adapter speed using a new `debug_speed `__ option (`issue #3799 `_) - - Handle debugging server's "ready_pattern" in "stderr" output - -* **Miscellaneous** - - - Improved listing of `multicast DNS services `_ - - Fixed a "UnicodeDecodeError: 'utf-8' codec can't decode byte" when using J-Link for firmware uploading on Linux (`issue #3804 `_) - - Fixed an issue with a compiler driver for ".ccls" language server (`issue #3808 `_) - - Fixed an issue when `pio device monitor --eol `__ and "send_on_enter" filter do not work properly (`issue #3787 `_) - -5.0.4 (2020-12-30) -~~~~~~~~~~~~~~~~~~ - -- Added "Core" suffix when showing PlatformIO Core version using ``pio --version`` command -- Improved ".ccls" configuration file for Emacs, Vim, and Sublime Text integrations -- Updated analysis tools: - - * `Cppcheck `__ v2.3 with improved C++ parser and several new MISRA rules - * `PVS-Studio `__ v7.11 with new diagnostics and updated mass suppression mechanism - -- Show a warning message about deprecated support for Python 2 and Python 3.5 -- Do not provide "intelliSenseMode" option when generating configuration for VSCode C/C++ extension -- Fixed a "git-sh-setup: file not found" error when installing project dependencies from Git VCS (`issue #3740 `_) -- Fixed an issue with package publishing on Windows when Unix permissions are not preserved (`issue #3776 `_) - -5.0.3 (2020-11-12) -~~~~~~~~~~~~~~~~~~ - -- Added an error selector for `Sublime Text `__ build runner (`issue #3733 `_) -- Generate a working "projectEnvName" for PlatformIO IDE's debugger for VSCode -- Force VSCode's intelliSenseMode to "gcc-x64" when GCC toolchain is used -- Print ignored test suites and environments in the test summary report only in verbose mode (`issue #3726 `_) -- Fixed an issue when the package manager tries to install a built-in library from the registry (`issue #3662 `_) -- Fixed an issue when `pio package pack `__ ignores some folders (`issue #3730 `_) - -5.0.2 (2020-10-30) -~~~~~~~~~~~~~~~~~~ - -- Initialize a new project or update the existing passing working environment name and its options (`issue #3686 `_) -- Automatically build PlatformIO Core extra Python dependencies on a host machine if they are missed in the registry (`issue #3700 `_) -- Improved "core.call" RPC for PlatformIO Home (`issue #3671 `_) -- Fixed a "PermissionError: [WinError 5]" on Windows when an external repository is used with `lib_deps `__ option (`issue #3664 `_) -- Fixed a "KeyError: 'versions'" when dependency does not exist in the registry (`issue #3666 `_) -- Fixed an issue with GCC linker when "native" dev-platform is used in pair with library dependencies (`issue #3669 `_) -- Fixed an "AssertionError: ensure_dir_exists" when checking library updates from simultaneous subprocesses (`issue #3677 `_) -- Fixed an issue when `pio package publish `__ command removes original archive after submitting to the registry (`issue #3716 `_) -- Fixed an issue when multiple `pio lib install `__ command with the same local library results in duplicates in ``lib_deps`` (`issue #3715 `_) -- Fixed an issue with a "wrong" timestamp in device monitor output using `"time" filter `__ (`issue #3712 `_) - -5.0.1 (2020-09-10) -~~~~~~~~~~~~~~~~~~ - -- Added support for "owner" requirement when declaring ``dependencies`` using `library.json `__ -- Fixed an issue when using a custom git/ssh package with `platform_packages `__ option (`issue #3624 `_) -- Fixed an issue with "ImportError: cannot import name '_get_backend' from 'cryptography.hazmat.backends'" when using `Remote Development `__ on RaspberryPi device (`issue #3652 `_) -- Fixed an issue when `pio package unpublish `__ command crashes (`issue #3660 `_) -- Fixed an issue when the package manager tries to install a built-in library from the registry (`issue #3662 `_) -- Fixed an issue with incorrect value for C++ language standard in IDE projects when an in-progress language standard is used (`issue #3653 `_) -- Fixed an issue with "Invalid simple block (semantic_version)" from library dependency that refs to an external source (repository, ZIP/Tar archives) (`issue #3658 `_) -- Fixed an issue when can not remove update or remove external dev-platform using PlatformIO Home (`issue #3663 `_) - -5.0.0 (2020-09-03) -~~~~~~~~~~~~~~~~~~ - -Please check `Migration guide from 4.x to 5.0 `__. - -* Integration with the new **PlatformIO Trusted Registry** - - - Enterprise-grade package storage with high availability (multi replicas) - - Secure, fast, and reliable global content delivery network (CDN) - - Universal support for all packages: - - * Libraries - * Development platforms - * Toolchains - - - Built-in fine-grained access control (role-based, teams, organizations) - - New CLI commands: - - * `pio package `__ – manage packages in the registry - * `pio access `__ – manage package access for users, teams, and maintainers - -* Integration with the new **Account Management System** - - - `Manage organizations `__ - - `Manage teams and team memberships `__ - -* New **Package Management System** - - - Integrated PlatformIO Core with the new PlatformIO Registry - - Support for owner-based dependency declaration (resolves name conflicts) (`issue #1824 `_) - - Automatically save dependencies to `"platformio.ini" `__ when installing using PlatformIO CLI (`issue #2964 `_) - - Follow SemVer complaint version constraints when checking library updates `issue #1281 `_) - - Dropped support for "packageRepositories" section in "platform.json" manifest (please publish packages directly to the registry) - -* **Build System** - - - Upgraded build engine to the `SCons 4.0 - a next-generation software construction tool `__ - - * `Configuration files are Python scripts `__ – use the power of a real programming language to solve build problems - * Built-in reliable and automatic dependency analysis - * Improved support for parallel builds - * Ability to `share built files in a cache `__ to speed up multiple builds - - - New `Custom Targets `__ - - * Pre/Post processing based on dependent sources (another target, source file, etc.) - * Command launcher with own arguments - * Launch command with custom options declared in `"platformio.ini" `__ - * Python callback as a target (use the power of Python interpreter and PlatformIO Build API) - * List available project targets (including dev-platform specific and custom targets) with a new `pio run --list-targets `__ command (`issue #3544 `_) - - - Enable "cyclic reference" for GCC linker only for the embedded dev-platforms (`issue #3570 `_) - - Automatically enable LDF dependency `chain+ mode (evaluates C/C++ Preprocessor conditional syntax) `__ for Arduino library when "library.property" has "depends" field (`issue #3607 `_) - - Fixed an issue with improper processing of source files added via multiple Build Middlewares (`issue #3531 `_) - - Fixed an issue with the ``clean`` target on Windows when project and build directories are located on different logical drives (`issue #3542 `_) - -* **Project Management** - - - Added support for "globstar/`**`" (recursive) pattern for the different commands and configuration options (`pio ci `__, `src_filter `__, `check_patterns `__, `library.json > srcFilter `__). Python 3.5+ is required - - Added a new ``-e, --environment`` option to `pio project init `__ command that helps to update a PlatformIO project using the existing environment - - Dump build system data intended for IDE extensions/plugins using a new `pio project data `__ command - - Do not generate ".travis.yml" for a new project, let the user have a choice - -* **Unit Testing** - - - Updated PIO Unit Testing support for Mbed framework and added compatibility with Mbed OS 6 - - Fixed an issue when running multiple test environments (`issue #3523 `_) - - Fixed an issue when Unit Testing engine fails with a custom project configuration file (`issue #3583 `_) - -* **Static Code Analysis** - - - Updated analysis tools: - - * `Cppcheck `__ v2.1 with a new "soundy" analysis option and improved code parser - * `PVS-Studio `__ v7.09 with a new file list analysis mode and an extended list of analysis diagnostics - - - Added Cppcheck package for ARM-based single-board computers (`issue #3559 `_) - - Fixed an issue with PIO Check when a defect with a multiline error message is not reported in verbose mode (`issue #3631 `_) - -* **Miscellaneous** - - - Display system-wide information using a new `pio system info `__ command (`issue #3521 `_) - - Remove unused data using a new `pio system prune `__ command (`issue #3522 `_) - - Show ignored project environments only in the verbose mode (`issue #3641 `_) - - Do not escape compiler arguments in VSCode template on Windows - - Drop support for Python 2 and 3.5 +See `PlatformIO Core 5.0 history `__. .. _release_notes_4: diff --git a/Makefile b/Makefile index 5ba6f788..8b280334 100644 --- a/Makefile +++ b/Makefile @@ -11,7 +11,7 @@ format: black ./tests test: - py.test --verbose --capture=no --exitfirst -n 6 --dist=loadscope tests --ignore tests/test_examples.py + py.test --verbose --exitfirst -n 6 --dist=loadscope tests --ignore tests/test_examples.py before-commit: isort format lint diff --git a/README.rst b/README.rst index 061c0142..ff10f8f2 100644 --- a/README.rst +++ b/README.rst @@ -1,8 +1,12 @@ +.. image:: https://raw.githubusercontent.com/vshymanskyy/StandWithUkraine/main/banner-direct.svg + :target: https://github.com/vshymanskyy/StandWithUkraine/blob/main/docs/README.md + :alt: SWUbanner + PlatformIO Core =============== .. image:: https://github.com/platformio/platformio-core/workflows/Core/badge.svg - :target: https://docs.platformio.org/page/core/index.html + :target: https://docs.platformio.org/en/latest/core/index.html :alt: CI Build for PlatformIO Core .. image:: https://github.com/platformio/platformio-core/workflows/Examples/badge.svg :target: https://github.com/platformio/platformio-examples @@ -36,7 +40,7 @@ PlatformIO Core .. image:: https://raw.githubusercontent.com/platformio/platformio-web/develop/app/images/platformio-ide-laptop.png :target: https://platformio.org?utm_source=github&utm_medium=core -`PlatformIO `_ is a professional collaborative platform for embedded development +`PlatformIO `_ is a professional collaborative platform for embedded development. **A place where Developers and Teams have true Freedom! No more vendor lock-in!** @@ -49,24 +53,24 @@ PlatformIO Core Get Started ----------- -* `What is PlatformIO? `_ +* `What is PlatformIO? `_ * `PlatformIO IDE `_ -* `PlatformIO Core (CLI) `_ +* `PlatformIO Core (CLI) `_ * `Project Examples `__ Solutions --------- -* `Library Management `_ -* `Desktop IDEs Integration `_ -* `Continuous Integration `_ +* `Library Management `_ +* `Desktop IDEs Integration `_ +* `Continuous Integration `_ **Advanced** -* `Debugging `_ -* `Unit Testing `_ -* `Static Code Analysis `_ -* `Remote Development `_ +* `Debugging `_ +* `Unit Testing `_ +* `Static Code Analysis `_ +* `Remote Development `_ Registry -------- @@ -86,7 +90,7 @@ Telemetry / Privacy Policy Share minimal diagnostics and usage information to help us make PlatformIO better. It is enabled by default. For more information see: -* `Telemetry Setting `_ +* `Telemetry Setting `_ License ------- diff --git a/docs b/docs index bbf4d275..6bbb8134 160000 --- a/docs +++ b/docs @@ -1 +1 @@ -Subproject commit bbf4d27508f4fe600dfb5706641bdccf6b2a762e +Subproject commit 6bbb813494d4a28bf6345a478b41c8f92e0b2533 diff --git a/examples b/examples index dcafbd19..8464bbb5 160000 --- a/examples +++ b/examples @@ -1 +1 @@ -Subproject commit dcafbd192ee19fdb310136fa62335a3ce13ec517 +Subproject commit 8464bbb5d96022ade33f92ca829c6401fb067d6a diff --git a/platformio/__init__.py b/platformio/__init__.py index 0c9e9bd5..f59130aa 100644 --- a/platformio/__init__.py +++ b/platformio/__init__.py @@ -14,7 +14,7 @@ import sys -VERSION = (5, 2, 5) +VERSION = (6, 0, 0) __version__ = ".".join([str(s) for s in VERSION]) __title__ = "platformio" @@ -38,9 +38,9 @@ __license__ = "Apache Software License" __copyright__ = "Copyright 2014-present PlatformIO Labs" __accounts_api__ = "https://api.accounts.platformio.org" -__registry_api__ = [ - "https://api.registry.platformio.org", - "https://api.registry.ns1.platformio.org", +__registry_mirror_hosts__ = [ + "registry.platformio.org", + "registry.nm1.platformio.org", ] __pioremote_endpoint__ = "ssl:host=remote.platformio.org:port=4413" @@ -49,16 +49,14 @@ __default_requests_timeout__ = (10, None) # (connect, read) __core_packages__ = { "contrib-piohome": "~3.4.1", "contrib-pysite": "~2.%d%d.0" % (sys.version_info.major, sys.version_info.minor), - "tool-unity": "~1.20500.0", "tool-scons": "~4.40300.0", - "tool-cppcheck": "~1.260.0", + "tool-cppcheck": "~1.270.0", "tool-clangtidy": "~1.120001.0", - "tool-pvs-studio": "~7.14.0", + "tool-pvs-studio": "~7.18.0", } __check_internet_hosts__ = [ "185.199.110.153", # Github.com "88.198.170.159", # platformio.org "github.com", - "platformio.org", -] +] + __registry_mirror_hosts__ diff --git a/platformio/__main__.py b/platformio/__main__.py index 38c2ce00..07816eda 100644 --- a/platformio/__main__.py +++ b/platformio/__main__.py @@ -12,15 +12,13 @@ # See the License for the specific language governing permissions and # limitations under the License. -# pylint: disable=import-outside-toplevel - import os import sys from traceback import format_exc import click -from platformio import __version__, exception +from platformio import __version__, exception, maintenance from platformio.commands import PlatformioCLI from platformio.compat import IS_CYGWIN, ensure_python3 @@ -29,7 +27,7 @@ from platformio.compat import IS_CYGWIN, ensure_python3 cls=PlatformioCLI, context_settings=dict(help_option_names=["-h", "--help"]) ) @click.version_option(__version__, prog_name="PlatformIO Core") -@click.option("--force", "-f", is_flag=True, help="DEPRECATE") +@click.option("--force", "-f", is_flag=True, help="DEPRECATED") @click.option("--caller", "-c", help="Caller ID (service)") @click.option("--no-ansi", is_flag=True, help="Do not print ANSI control characters") @click.pass_context @@ -55,29 +53,12 @@ def cli(ctx, force, caller, no_ansi): except: # pylint: disable=bare-except pass - from platformio import maintenance - maintenance.on_platformio_start(ctx, force, caller) -try: - - @cli.result_callback() - @click.pass_context - def process_result(ctx, result, *_, **__): - _process_result(ctx, result) - -except (AttributeError, TypeError): # legacy support for CLick > 8.0.1 - - @cli.resultcallback() - @click.pass_context - def process_result(ctx, result, *_, **__): - _process_result(ctx, result) - - -def _process_result(ctx, result): - from platformio import maintenance - +@cli.result_callback() +@click.pass_context +def process_result(ctx, result, *_, **__): maintenance.on_platformio_end(ctx, result) @@ -124,10 +105,7 @@ def main(argv=None): exit_code = int(e.code) except Exception as e: # pylint: disable=broad-except if not isinstance(e, exception.ReturnErrorCode): - if sys.version_info.major != 2: - from platformio import maintenance - - maintenance.on_platformio_exception(e) + maintenance.on_platformio_exception(e) error_str = "Error: " if isinstance(e, exception.PlatformioException): error_str += str(e) diff --git a/platformio/app.py b/platformio/app.py index 6c152c17..26b4c80b 100644 --- a/platformio/app.py +++ b/platformio/app.py @@ -35,24 +35,8 @@ def projects_dir_validate(projects_dir): DEFAULT_SETTINGS = { - "auto_update_libraries": { - "description": "Automatically update libraries (Yes/No)", - "value": False, - }, - "auto_update_platforms": { - "description": "Automatically update platforms (Yes/No)", - "value": False, - }, - "check_libraries_interval": { - "description": "Check for the library updates interval (days)", - "value": 7, - }, "check_platformio_interval": { - "description": "Check for the new PlatformIO interval (days)", - "value": 3, - }, - "check_platforms_interval": { - "description": "Check for the platform updates interval (days)", + "description": "Check for the new PlatformIO Core interval (days)", "value": 7, }, "check_prune_system_threshold": { diff --git a/platformio/builder/main.py b/platformio/builder/main.py index 6384b08d..616f4a0a 100644 --- a/platformio/builder/main.py +++ b/platformio/builder/main.py @@ -44,25 +44,28 @@ clivars.AddVariables( ("PIOENV",), ("PIOTEST_RUNNING_NAME",), ("UPLOAD_PORT",), + ("PROGRAM_ARGS",), ) DEFAULT_ENV_OPTIONS = dict( tools=[ "ar", - "as", "cc", "c++", "link", + "pioasm", "platformio", - "piotarget", - "pioplatform", "pioproject", + "pioplatform", + "piotest", + "piotarget", "piomaxlen", "piolib", "pioupload", - "piomisc", - "pioide", "piosize", + "pioino", + "piomisc", + "piointegration", ], toolpath=[os.path.join(fs.get_source_dir(), "builder", "tools")], variables=clivars, @@ -72,7 +75,7 @@ DEFAULT_ENV_OPTIONS = dict( BUILD_DIR=os.path.join("$PROJECT_BUILD_DIR", "$PIOENV"), BUILD_SRC_DIR=os.path.join("$BUILD_DIR", "src"), BUILD_TEST_DIR=os.path.join("$BUILD_DIR", "test"), - COMPILATIONDB_PATH=os.path.join("$BUILD_DIR", "compile_commands.json"), + COMPILATIONDB_PATH=os.path.join("$PROJECT_DIR", "compile_commands.json"), LIBPATH=["$BUILD_DIR"], PROGNAME="program", PROG_PATH=os.path.join("$BUILD_DIR", "$PROGNAME$PROGSUFFIX"), @@ -223,12 +226,13 @@ if "envdump" in COMMAND_LINE_TARGETS: env.Exit(0) if set(["_idedata", "idedata"]) & set(COMMAND_LINE_TARGETS): + projenv = None try: Import("projenv") except: # pylint: disable=bare-except projenv = env - data = projenv.DumpIDEData(env) - # dump to file for the further reading by project.helpers.load_project_ide_data + data = projenv.DumpIntegrationData(env) + # dump to file for the further reading by project.helpers.load_build_metadata with open( projenv.subst(os.path.join("$BUILD_DIR", "idedata.json")), mode="w", diff --git a/platformio/builder/tools/pioasm.py b/platformio/builder/tools/pioasm.py new file mode 100644 index 00000000..6af96bcd --- /dev/null +++ b/platformio/builder/tools/pioasm.py @@ -0,0 +1,31 @@ +# Copyright (c) 2014-present PlatformIO +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from __future__ import absolute_import + +import SCons.Tool.asm # pylint: disable=import-error + +# +# Resolve https://github.com/platformio/platformio-core/issues/3917 +# Avoid forcing .S to bare assembly on Windows OS +# + +if ".S" in SCons.Tool.asm.ASSuffixes: + SCons.Tool.asm.ASSuffixes.remove(".S") +if ".S" not in SCons.Tool.asm.ASPPSuffixes: + SCons.Tool.asm.ASPPSuffixes.append(".S") + + +generate = SCons.Tool.asm.generate +exists = SCons.Tool.asm.exists diff --git a/platformio/builder/tools/pioino.py b/platformio/builder/tools/pioino.py new file mode 100644 index 00000000..7a95da9b --- /dev/null +++ b/platformio/builder/tools/pioino.py @@ -0,0 +1,254 @@ +# Copyright (c) 2014-present PlatformIO +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from __future__ import absolute_import + +import atexit +import glob +import io +import os +import re +import tempfile + +import click + +from platformio.compat import get_filesystem_encoding, get_locale_encoding + + +class InoToCPPConverter(object): + + PROTOTYPE_RE = re.compile( + r"""^( + (?:template\<.*\>\s*)? # template + ([a-z_\d\&]+\*?\s+){1,2} # return type + ([a-z_\d]+\s*) # name of prototype + \([a-z_,\.\*\&\[\]\s\d]*\) # arguments + )\s*(\{|;) # must end with `{` or `;` + """, + re.X | re.M | re.I, + ) + DETECTMAIN_RE = re.compile(r"void\s+(setup|loop)\s*\(", re.M | re.I) + PROTOPTRS_TPLRE = r"\([^&\(]*&(%s)[^\)]*\)" + + def __init__(self, env): + self.env = env + self._main_ino = None + self._safe_encoding = None + + def read_safe_contents(self, path): + error_reported = False + for encoding in ( + "utf-8", + None, + get_filesystem_encoding(), + get_locale_encoding(), + "latin-1", + ): + try: + with io.open(path, encoding=encoding) as fp: + contents = fp.read() + self._safe_encoding = encoding + return contents + except UnicodeDecodeError: + if not error_reported: + error_reported = True + click.secho( + "Unicode decode error has occurred, please remove invalid " + "(non-ASCII or non-UTF8) characters from %s file or convert it to UTF-8" + % path, + fg="yellow", + err=True, + ) + return "" + + def write_safe_contents(self, path, contents): + with io.open( + path, "w", encoding=self._safe_encoding, errors="backslashreplace" + ) as fp: + return fp.write(contents) + + def is_main_node(self, contents): + return self.DETECTMAIN_RE.search(contents) + + def convert(self, nodes): + contents = self.merge(nodes) + if not contents: + return None + return self.process(contents) + + def merge(self, nodes): + assert nodes + lines = [] + for node in nodes: + contents = self.read_safe_contents(node.get_path()) + _lines = ['# 1 "%s"' % node.get_path().replace("\\", "/"), contents] + if self.is_main_node(contents): + lines = _lines + lines + self._main_ino = node.get_path() + else: + lines.extend(_lines) + + if not self._main_ino: + self._main_ino = nodes[0].get_path() + + return "\n".join(["#include "] + lines) if lines else None + + def process(self, contents): + out_file = self._main_ino + ".cpp" + assert self._gcc_preprocess(contents, out_file) + contents = self.read_safe_contents(out_file) + contents = self._join_multiline_strings(contents) + self.write_safe_contents(out_file, self.append_prototypes(contents)) + return out_file + + def _gcc_preprocess(self, contents, out_file): + tmp_path = tempfile.mkstemp()[1] + self.write_safe_contents(tmp_path, contents) + self.env.Execute( + self.env.VerboseAction( + '$CXX -o "{0}" -x c++ -fpreprocessed -dD -E "{1}"'.format( + out_file, tmp_path + ), + "Converting " + os.path.basename(out_file[:-4]), + ) + ) + atexit.register(_delete_file, tmp_path) + return os.path.isfile(out_file) + + def _join_multiline_strings(self, contents): + if "\\\n" not in contents: + return contents + newlines = [] + linenum = 0 + stropen = False + for line in contents.split("\n"): + _linenum = self._parse_preproc_line_num(line) + if _linenum is not None: + linenum = _linenum + else: + linenum += 1 + + if line.endswith("\\"): + if line.startswith('"'): + stropen = True + newlines.append(line[:-1]) + continue + if stropen: + newlines[len(newlines) - 1] += line[:-1] + continue + elif stropen and line.endswith(('",', '";')): + newlines[len(newlines) - 1] += line + stropen = False + newlines.append( + '#line %d "%s"' % (linenum, self._main_ino.replace("\\", "/")) + ) + continue + + newlines.append(line) + + return "\n".join(newlines) + + @staticmethod + def _parse_preproc_line_num(line): + if not line.startswith("#"): + return None + tokens = line.split(" ", 3) + if len(tokens) > 2 and tokens[1].isdigit(): + return int(tokens[1]) + return None + + def _parse_prototypes(self, contents): + prototypes = [] + reserved_keywords = set(["if", "else", "while"]) + for match in self.PROTOTYPE_RE.finditer(contents): + if ( + set([match.group(2).strip(), match.group(3).strip()]) + & reserved_keywords + ): + continue + prototypes.append(match) + return prototypes + + def _get_total_lines(self, contents): + total = 0 + if contents.endswith("\n"): + contents = contents[:-1] + for line in contents.split("\n")[::-1]: + linenum = self._parse_preproc_line_num(line) + if linenum is not None: + return total + linenum + total += 1 + return total + + def append_prototypes(self, contents): + prototypes = self._parse_prototypes(contents) or [] + + # skip already declared prototypes + declared = set(m.group(1).strip() for m in prototypes if m.group(4) == ";") + prototypes = [m for m in prototypes if m.group(1).strip() not in declared] + + if not prototypes: + return contents + + prototype_names = set(m.group(3).strip() for m in prototypes) + split_pos = prototypes[0].start() + match_ptrs = re.search( + self.PROTOPTRS_TPLRE % ("|".join(prototype_names)), + contents[:split_pos], + re.M, + ) + if match_ptrs: + split_pos = contents.rfind("\n", 0, match_ptrs.start()) + 1 + + result = [] + result.append(contents[:split_pos].strip()) + result.append("%s;" % ";\n".join([m.group(1) for m in prototypes])) + result.append( + '#line %d "%s"' + % ( + self._get_total_lines(contents[:split_pos]), + self._main_ino.replace("\\", "/"), + ) + ) + result.append(contents[split_pos:].strip()) + return "\n".join(result) + + +def ConvertInoToCpp(env): + src_dir = glob.escape(env.subst("$PROJECT_SRC_DIR")) + ino_nodes = env.Glob(os.path.join(src_dir, "*.ino")) + env.Glob( + os.path.join(src_dir, "*.pde") + ) + if not ino_nodes: + return + c = InoToCPPConverter(env) + out_file = c.convert(ino_nodes) + + atexit.register(_delete_file, out_file) + + +def _delete_file(path): + try: + if os.path.isfile(path): + os.remove(path) + except: # pylint: disable=bare-except + pass + + +def generate(env): + env.AddMethod(ConvertInoToCpp) + + +def exists(_): + return True diff --git a/platformio/builder/tools/pioide.py b/platformio/builder/tools/piointegration.py similarity index 84% rename from platformio/builder/tools/pioide.py rename to platformio/builder/tools/piointegration.py index 5a6c2851..36989f06 100644 --- a/platformio/builder/tools/pioide.py +++ b/platformio/builder/tools/piointegration.py @@ -20,32 +20,31 @@ import os import SCons.Defaults # pylint: disable=import-error import SCons.Subst # pylint: disable=import-error -from platformio.package.manager.core import get_core_package_dir from platformio.proc import exec_command, where_is_program -def _dump_includes(env): - includes = {} +def DumpIntegrationIncludes(env): + result = dict(build=[], compatlib=[], toolchain=[]) - includes["build"] = [ - env.subst("$PROJECT_INCLUDE_DIR"), - env.subst("$PROJECT_SRC_DIR"), - ] - includes["build"].extend( + result["build"].extend( + [ + env.subst("$PROJECT_INCLUDE_DIR"), + env.subst("$PROJECT_SRC_DIR"), + ] + ) + result["build"].extend( [os.path.abspath(env.subst(item)) for item in env.get("CPPPATH", [])] ) # installed libs - includes["compatlib"] = [] for lb in env.GetLibBuilders(): - includes["compatlib"].extend( + result["compatlib"].extend( [os.path.abspath(inc) for inc in lb.get_include_dirs()] ) # includes from toolchains p = env.PioPlatform() - includes["toolchain"] = [] - for pkg in p.get_installed_packages(): + for pkg in p.get_installed_packages(with_optional=False): if p.get_package_type(pkg.metadata.name) != "toolchain": continue toolchain_dir = glob.escape(pkg.path) @@ -56,22 +55,9 @@ def _dump_includes(env): os.path.join(toolchain_dir, "*", "include*"), ] for g in toolchain_incglobs: - includes["toolchain"].extend([os.path.abspath(inc) for inc in glob.glob(g)]) + result["toolchain"].extend([os.path.abspath(inc) for inc in glob.glob(g)]) - # include Unity framework if there are tests in project - includes["unity"] = [] - auto_install_unity = False - test_dir = env.GetProjectConfig().get("platformio", "test_dir") - if os.path.isdir(test_dir) and os.listdir(test_dir) != ["README"]: - auto_install_unity = True - unity_dir = get_core_package_dir( - "tool-unity", - auto_install=auto_install_unity, - ) - if unity_dir: - includes["unity"].append(unity_dir) - - return includes + return result def _get_gcc_defines(env): @@ -154,14 +140,14 @@ def _subst_cmd(env, cmd): return " ".join([SCons.Subst.quote_spaces(arg) for arg in args]) -def DumpIDEData(env, globalenv): +def DumpIntegrationData(env, globalenv): """env here is `projenv`""" data = { "env_name": env["PIOENV"], "libsource_dirs": [env.subst(item) for item in env.GetLibSourceDirs()], "defines": _dump_defines(env), - "includes": _dump_includes(env), + "includes": env.DumpIntegrationIncludes(), "cc_path": where_is_program(env.subst("$CC"), env.subst("${ENV['PATH']}")), "cxx_path": where_is_program(env.subst("$CXX"), env.subst("${ENV['PATH']}")), "gdb_path": where_is_program(env.subst("$GDB"), env.subst("${ENV['PATH']}")), @@ -205,5 +191,6 @@ def exists(_): def generate(env): - env.AddMethod(DumpIDEData) + env.AddMethod(DumpIntegrationIncludes) + env.AddMethod(DumpIntegrationData) return env diff --git a/platformio/builder/tools/piolib.py b/platformio/builder/tools/piolib.py index 3b57c1e7..2372c3de 100644 --- a/platformio/builder/tools/piolib.py +++ b/platformio/builder/tools/piolib.py @@ -27,14 +27,16 @@ import sys import click import SCons.Scanner # pylint: disable=import-error from SCons.Script import ARGUMENTS # pylint: disable=import-error -from SCons.Script import COMMAND_LINE_TARGETS # pylint: disable=import-error from SCons.Script import DefaultEnvironment # pylint: disable=import-error from platformio import exception, fs, util from platformio.builder.tools import platformio as piotool from platformio.clients.http import HTTPClientError, InternetIsOffline from platformio.compat import IS_WINDOWS, hashlib_encode_data, string_types -from platformio.package.exception import UnknownPackageError +from platformio.package.exception import ( + MissingPackageManifestError, + UnknownPackageError, +) from platformio.package.manager.library import LibraryPackageManager from platformio.package.manifest.parser import ( ManifestParserError, @@ -54,9 +56,9 @@ class LibBuilderFactory(object): used_frameworks = LibBuilderFactory.get_used_frameworks(env, path) common_frameworks = set(env.get("PIOFRAMEWORK", [])) & set(used_frameworks) if common_frameworks: - clsname = "%sLibBuilder" % list(common_frameworks)[0].title() + clsname = "%sLibBuilder" % list(common_frameworks)[0].capitalize() elif used_frameworks: - clsname = "%sLibBuilder" % used_frameworks[0].title() + clsname = "%sLibBuilder" % used_frameworks[0].capitalize() obj = getattr(sys.modules[__name__], clsname)(env, path, verbose=verbose) @@ -136,9 +138,11 @@ class LibBuilderBase(object): ) self._manifest = {} - self._is_dependent = False - self._is_built = False - self._depbuilders = [] + self.is_dependent = False + self.is_built = False + self.depbuilders = [] + + self._deps_are_processed = False self._circular_deps = [] self._processed_files = [] @@ -159,7 +163,12 @@ class LibBuilderBase(object): p2 = p2.lower() if p1 == p2: return True - return os.path.commonprefix((p1 + os.path.sep, p2)) == p1 + os.path.sep + if os.path.commonprefix([p1 + os.path.sep, p2]) == p1 + os.path.sep: + return True + # try to resolve paths + p1 = os.path.os.path.realpath(p1) + p2 = os.path.os.path.realpath(p2) + return os.path.commonprefix([p1 + os.path.sep, p2]) == p1 + os.path.sep @property def name(self): @@ -169,6 +178,11 @@ class LibBuilderBase(object): def version(self): return self._manifest.get("version") + @property + def dependent(self): + """Backward compatibility with ESP-IDF""" + return self.is_dependent + @property def dependencies(self): return self._manifest.get("dependencies") @@ -225,18 +239,6 @@ class LibBuilderBase(object): def extra_script(self): return None - @property - def depbuilders(self): - return self._depbuilders - - @property - def dependent(self): - return self._is_dependent - - @property - def is_built(self): - return self._is_built - @property def lib_archive(self): return self.env.GetProjectOption("lib_archive") @@ -296,8 +298,9 @@ class LibBuilderBase(object): self.env.ProcessUnFlags(self.build_unflags) def process_dependencies(self): - if not self.dependencies: + if not self.dependencies or self._deps_are_processed: return + self._deps_are_processed = True for item in self.dependencies: found = False for lb in self.env.GetLibBuilders(): @@ -305,7 +308,7 @@ class LibBuilderBase(object): continue found = True if lb not in self.depbuilders: - self.depend_recursive(lb) + self.depend_on(lb) break if not found and self.verbose: @@ -400,7 +403,29 @@ class LibBuilderBase(object): return result - def depend_recursive(self, lb, search_files=None): + def search_deps_recursive(self, search_files=None): + self.process_dependencies() + + # when LDF is disabled + if self.lib_ldf_mode == "off": + return + + if self.lib_ldf_mode.startswith("deep"): + search_files = self.get_search_files() + + lib_inc_map = {} + for inc in self._get_found_includes(search_files): + for lb in self.env.GetLibBuilders(): + if inc.get_abspath() in lb: + if lb not in lib_inc_map: + lib_inc_map[lb] = [] + lib_inc_map[lb].append(inc.get_abspath()) + break + + for lb, lb_search_files in lib_inc_map.items(): + self.depend_on(lb, search_files=lb_search_files) + + def depend_on(self, lb, search_files=None, recursive=True): def _already_depends(_lb): if self in _lb.depbuilders: return True @@ -418,38 +443,17 @@ class LibBuilderBase(object): "between `%s` and `%s`\n" % (self.path, lb.path) ) self._circular_deps.append(lb) - elif lb not in self._depbuilders: - self._depbuilders.append(lb) + elif lb not in self.depbuilders: + self.depbuilders.append(lb) + lb.is_dependent = True LibBuilderBase._INCLUDE_DIRS_CACHE = None - lb.search_deps_recursive(search_files) - def search_deps_recursive(self, search_files=None): - if not self._is_dependent: - self._is_dependent = True - self.process_dependencies() - - if self.lib_ldf_mode.startswith("deep"): - search_files = self.get_search_files() - - # when LDF is disabled - if self.lib_ldf_mode == "off": - return - - lib_inc_map = {} - for inc in self._get_found_includes(search_files): - for lb in self.env.GetLibBuilders(): - if inc.get_abspath() in lb: - if lb not in lib_inc_map: - lib_inc_map[lb] = [] - lib_inc_map[lb].append(inc.get_abspath()) - break - - for lb, lb_search_files in lib_inc_map.items(): - self.depend_recursive(lb, lb_search_files) + if recursive: + lb.search_deps_recursive(search_files) def build(self): libs = [] - for lb in self._depbuilders: + for lb in self.depbuilders: libs.extend(lb.build()) # copy shared information to self env for key in ("CPPPATH", "LIBPATH", "LIBS", "LINKFLAGS"): @@ -458,9 +462,9 @@ class LibBuilderBase(object): for lb in self._circular_deps: self.env.PrependUnique(CPPPATH=lb.get_include_dirs()) - if self._is_built: + if self.is_built: return libs - self._is_built = True + self.is_built = True self.env.PrependUnique(CPPPATH=self.get_include_dirs()) @@ -632,7 +636,7 @@ class MbedLibBuilder(LibBuilderBase): def process_extra_options(self): self._process_mbed_lib_confs() - return super(MbedLibBuilder, self).process_extra_options() + return super().process_extra_options() def _process_mbed_lib_confs(self): mbed_lib_paths = [ @@ -851,7 +855,7 @@ class ProjectAsLibBuilder(LibBuilderBase): def __init__(self, env, *args, **kwargs): # backup original value, will be reset in base.__init__ project_src_filter = env.get("SRC_FILTER") - super(ProjectAsLibBuilder, self).__init__(env, *args, **kwargs) + super().__init__(env, *args, **kwargs) self.env["SRC_FILTER"] = project_src_filter @property @@ -877,7 +881,7 @@ class ProjectAsLibBuilder(LibBuilderBase): # project files items = LibBuilderBase.get_search_files(self) # test files - if "__test" in COMMAND_LINE_TARGETS: + if "test" in self.env.GetBuildType(): items.extend( [ os.path.join("$PROJECT_TEST_DIR", item) @@ -901,13 +905,19 @@ class ProjectAsLibBuilder(LibBuilderBase): # pylint: disable=no-member return self.env.get("SRC_FILTER") or LibBuilderBase.src_filter.fget(self) + @property + def build_flags(self): + # pylint: disable=no-member + return self.env.get("SRC_BUILD_FLAGS") or LibBuilderBase.build_flags.fget(self) + @property def dependencies(self): return self.env.GetProjectOption("lib_deps", []) def process_extra_options(self): - # skip for project, options are already processed - pass + with fs.cd(self.path): + self.env.ProcessFlags(self.build_flags) + self.env.ProcessUnFlags(self.build_unflags) def install_dependencies(self): def _is_builtin(spec): @@ -947,6 +957,7 @@ class ProjectAsLibBuilder(LibBuilderBase): DefaultEnvironment().Replace(__PIO_LIB_BUILDERS=None) def process_dependencies(self): # pylint: disable=too-many-branches + found_lbs = [] for spec in self.dependencies: found = False for storage_dir in self.env.GetLibSourceDirs(): @@ -960,7 +971,8 @@ class ProjectAsLibBuilder(LibBuilderBase): if pkg.path != lb.path: continue if lb not in self.depbuilders: - self.depend_recursive(lb) + self.depend_on(lb, recursive=False) + found_lbs.append(lb) found = True break if found: @@ -972,12 +984,16 @@ class ProjectAsLibBuilder(LibBuilderBase): if lb.name != spec: continue if lb not in self.depbuilders: - self.depend_recursive(lb) + self.depend_on(lb) found = True break + # process library dependencies + for lb in found_lbs: + lb.search_deps_recursive() + def build(self): - self._is_built = True # do not build Project now + self.is_built = True # do not build Project now result = LibBuilderBase.build(self) self.env.PrependUnique(CPPPATH=self.get_include_dirs()) return result @@ -1015,7 +1031,7 @@ def GetLibBuilders(env): # pylint: disable=too-many-branches if DefaultEnvironment().get("__PIO_LIB_BUILDERS", None) is not None: return sorted( DefaultEnvironment()["__PIO_LIB_BUILDERS"], - key=lambda lb: 0 if lb.dependent else 1, + key=lambda lb: 0 if lb.is_dependent else 1, ) DefaultEnvironment().Replace(__PIO_LIB_BUILDERS=[]) @@ -1029,7 +1045,11 @@ def GetLibBuilders(env): # pylint: disable=too-many-branches continue for item in sorted(os.listdir(storage_dir)): lib_dir = os.path.join(storage_dir, item) - if item == "__cores__" or not os.path.isdir(lib_dir): + if item == "__cores__": + continue + if LibraryPackageManager.is_symlink(lib_dir): + lib_dir, _ = LibraryPackageManager.resolve_symlink(lib_dir) + if not lib_dir or not os.path.isdir(lib_dir): continue try: lb = LibBuilderFactory.new(env, lib_dir) @@ -1061,9 +1081,21 @@ def GetLibBuilders(env): # pylint: disable=too-many-branches def ConfigureProjectLibBuilder(env): + _pm_storage = {} + + def _get_lib_license(pkg): + storage_dir = os.path.dirname(os.path.dirname(pkg.path)) + if storage_dir not in _pm_storage: + _pm_storage[storage_dir] = LibraryPackageManager(storage_dir) + try: + return (_pm_storage[storage_dir].load_manifest(pkg) or {}).get("license") + except MissingPackageManifestError: + pass + return None + def _correct_found_libs(lib_builders): # build full dependency graph - found_lbs = [lb for lb in lib_builders if lb.dependent] + found_lbs = [lb for lb in lib_builders if lb.is_dependent] for lb in lib_builders: if lb in found_lbs: lb.search_deps_recursive(lb.get_search_files()) @@ -1075,18 +1107,20 @@ def ConfigureProjectLibBuilder(env): def _print_deps_tree(root, level=0): margin = "| " * (level) for lb in root.depbuilders: - title = "<%s>" % lb.name + title = lb.name pkg = PackageItem(lb.path) if pkg.metadata: - title += " %s" % pkg.metadata.version + title += " @ %s" % pkg.metadata.version elif lb.version: - title += " %s" % lb.version + title += " @ %s" % lb.version click.echo("%s|-- %s" % (margin, title), nl=False) if int(ARGUMENTS.get("PIOVERBOSE", 0)): + click.echo( + " (License: %s, " % (_get_lib_license(pkg) or "Unknown"), nl=False + ) if pkg.metadata and pkg.metadata.spec.external: - click.echo(" [%s]" % pkg.metadata.spec.url, nl=False) - click.echo(" (", nl=False) - click.echo(lb.path, nl=False) + click.echo("URI: %s, " % pkg.metadata.spec.uri, nl=False) + click.echo("Path: %s" % lb.path, nl=False) click.echo(")", nl=False) click.echo("") if lb.depbuilders: diff --git a/platformio/builder/tools/piomisc.py b/platformio/builder/tools/piomisc.py index af703185..75fec40a 100644 --- a/platformio/builder/tools/piomisc.py +++ b/platformio/builder/tools/piomisc.py @@ -14,244 +14,15 @@ from __future__ import absolute_import -import atexit -import glob -import io import os -import re import sys -import tempfile - -import click from platformio import fs, util -from platformio.compat import get_filesystem_encoding, get_locale_encoding -from platformio.package.manager.core import get_core_package_dir from platformio.proc import exec_command -class InoToCPPConverter(object): - - PROTOTYPE_RE = re.compile( - r"""^( - (?:template\<.*\>\s*)? # template - ([a-z_\d\&]+\*?\s+){1,2} # return type - ([a-z_\d]+\s*) # name of prototype - \([a-z_,\.\*\&\[\]\s\d]*\) # arguments - )\s*(\{|;) # must end with `{` or `;` - """, - re.X | re.M | re.I, - ) - DETECTMAIN_RE = re.compile(r"void\s+(setup|loop)\s*\(", re.M | re.I) - PROTOPTRS_TPLRE = r"\([^&\(]*&(%s)[^\)]*\)" - - def __init__(self, env): - self.env = env - self._main_ino = None - self._safe_encoding = None - - def read_safe_contents(self, path): - error_reported = False - for encoding in ( - "utf-8", - None, - get_filesystem_encoding(), - get_locale_encoding(), - "latin-1", - ): - try: - with io.open(path, encoding=encoding) as fp: - contents = fp.read() - self._safe_encoding = encoding - return contents - except UnicodeDecodeError: - if not error_reported: - error_reported = True - click.secho( - "Unicode decode error has occurred, please remove invalid " - "(non-ASCII or non-UTF8) characters from %s file or convert it to UTF-8" - % path, - fg="yellow", - err=True, - ) - return "" - - def write_safe_contents(self, path, contents): - with io.open( - path, "w", encoding=self._safe_encoding, errors="backslashreplace" - ) as fp: - return fp.write(contents) - - def is_main_node(self, contents): - return self.DETECTMAIN_RE.search(contents) - - def convert(self, nodes): - contents = self.merge(nodes) - if not contents: - return None - return self.process(contents) - - def merge(self, nodes): - assert nodes - lines = [] - for node in nodes: - contents = self.read_safe_contents(node.get_path()) - _lines = ['# 1 "%s"' % node.get_path().replace("\\", "/"), contents] - if self.is_main_node(contents): - lines = _lines + lines - self._main_ino = node.get_path() - else: - lines.extend(_lines) - - if not self._main_ino: - self._main_ino = nodes[0].get_path() - - return "\n".join(["#include "] + lines) if lines else None - - def process(self, contents): - out_file = self._main_ino + ".cpp" - assert self._gcc_preprocess(contents, out_file) - contents = self.read_safe_contents(out_file) - contents = self._join_multiline_strings(contents) - self.write_safe_contents(out_file, self.append_prototypes(contents)) - return out_file - - def _gcc_preprocess(self, contents, out_file): - tmp_path = tempfile.mkstemp()[1] - self.write_safe_contents(tmp_path, contents) - self.env.Execute( - self.env.VerboseAction( - '$CXX -o "{0}" -x c++ -fpreprocessed -dD -E "{1}"'.format( - out_file, tmp_path - ), - "Converting " + os.path.basename(out_file[:-4]), - ) - ) - atexit.register(_delete_file, tmp_path) - return os.path.isfile(out_file) - - def _join_multiline_strings(self, contents): - if "\\\n" not in contents: - return contents - newlines = [] - linenum = 0 - stropen = False - for line in contents.split("\n"): - _linenum = self._parse_preproc_line_num(line) - if _linenum is not None: - linenum = _linenum - else: - linenum += 1 - - if line.endswith("\\"): - if line.startswith('"'): - stropen = True - newlines.append(line[:-1]) - continue - if stropen: - newlines[len(newlines) - 1] += line[:-1] - continue - elif stropen and line.endswith(('",', '";')): - newlines[len(newlines) - 1] += line - stropen = False - newlines.append( - '#line %d "%s"' % (linenum, self._main_ino.replace("\\", "/")) - ) - continue - - newlines.append(line) - - return "\n".join(newlines) - - @staticmethod - def _parse_preproc_line_num(line): - if not line.startswith("#"): - return None - tokens = line.split(" ", 3) - if len(tokens) > 2 and tokens[1].isdigit(): - return int(tokens[1]) - return None - - def _parse_prototypes(self, contents): - prototypes = [] - reserved_keywords = set(["if", "else", "while"]) - for match in self.PROTOTYPE_RE.finditer(contents): - if ( - set([match.group(2).strip(), match.group(3).strip()]) - & reserved_keywords - ): - continue - prototypes.append(match) - return prototypes - - def _get_total_lines(self, contents): - total = 0 - if contents.endswith("\n"): - contents = contents[:-1] - for line in contents.split("\n")[::-1]: - linenum = self._parse_preproc_line_num(line) - if linenum is not None: - return total + linenum - total += 1 - return total - - def append_prototypes(self, contents): - prototypes = self._parse_prototypes(contents) or [] - - # skip already declared prototypes - declared = set(m.group(1).strip() for m in prototypes if m.group(4) == ";") - prototypes = [m for m in prototypes if m.group(1).strip() not in declared] - - if not prototypes: - return contents - - prototype_names = set(m.group(3).strip() for m in prototypes) - split_pos = prototypes[0].start() - match_ptrs = re.search( - self.PROTOPTRS_TPLRE % ("|".join(prototype_names)), - contents[:split_pos], - re.M, - ) - if match_ptrs: - split_pos = contents.rfind("\n", 0, match_ptrs.start()) + 1 - - result = [] - result.append(contents[:split_pos].strip()) - result.append("%s;" % ";\n".join([m.group(1) for m in prototypes])) - result.append( - '#line %d "%s"' - % ( - self._get_total_lines(contents[:split_pos]), - self._main_ino.replace("\\", "/"), - ) - ) - result.append(contents[split_pos:].strip()) - return "\n".join(result) - - -def ConvertInoToCpp(env): - src_dir = glob.escape(env.subst("$PROJECT_SRC_DIR")) - ino_nodes = env.Glob(os.path.join(src_dir, "*.ino")) + env.Glob( - os.path.join(src_dir, "*.pde") - ) - if not ino_nodes: - return - c = InoToCPPConverter(env) - out_file = c.convert(ino_nodes) - - atexit.register(_delete_file, out_file) - - -def _delete_file(path): - try: - if os.path.isfile(path): - os.remove(path) - except: # pylint: disable=bare-except - pass - - @util.memoized() -def _get_compiler_type(env): +def GetCompilerType(env): if env.subst("$CC").endswith("-gcc"): return "gcc" try: @@ -270,10 +41,6 @@ def _get_compiler_type(env): return None -def GetCompilerType(env): - return _get_compiler_type(env) - - def GetActualLDScript(env): def _lookup_in_ldpath(script): for d in env.get("LIBPATH", []): @@ -319,7 +86,7 @@ def GetActualLDScript(env): env.Exit(1) -def ConfigureDebugFlags(env): +def ConfigureDebugTarget(env): def _cleanup_debug_flags(scope): if scope not in env: return @@ -350,22 +117,6 @@ def ConfigureDebugFlags(env): env.AppendUnique(ASFLAGS=optimization_flags, LINKFLAGS=optimization_flags) -def ConfigureTestTarget(env): - env.Append( - CPPDEFINES=["UNIT_TEST", "UNITY_INCLUDE_CONFIG_H"], - CPPPATH=[os.path.join("$BUILD_DIR", "UnityTestLib")], - ) - unitylib = env.BuildLibrary( - os.path.join("$BUILD_DIR", "UnityTestLib"), get_core_package_dir("tool-unity") - ) - env.Prepend(LIBS=[unitylib]) - - src_filter = ["+<*.cpp>", "+<*.c>"] - if "PIOTEST_RUNNING_NAME" in env: - src_filter.append("+<%s%s>" % (env["PIOTEST_RUNNING_NAME"], os.path.sep)) - env.Replace(PIOTEST_SRC_FILTER=src_filter) - - def GetExtraScripts(env, scope): items = [] for item in env.GetProjectOption("extra_scripts", []): @@ -376,18 +127,17 @@ def GetExtraScripts(env, scope): if not items: return items with fs.cd(env.subst("$PROJECT_DIR")): - return [os.path.abspath(item) for item in items] + return [os.path.abspath(env.subst(item)) for item in items] + + +def generate(env): + env.AddMethod(GetCompilerType) + env.AddMethod(GetActualLDScript) + env.AddMethod(ConfigureDebugTarget) + env.AddMethod(GetExtraScripts) + # bakward-compatibility with Zephyr build script + env.AddMethod(ConfigureDebugTarget, "ConfigureDebugFlags") def exists(_): return True - - -def generate(env): - env.AddMethod(ConvertInoToCpp) - env.AddMethod(GetCompilerType) - env.AddMethod(GetActualLDScript) - env.AddMethod(ConfigureDebugFlags) - env.AddMethod(ConfigureTestTarget) - env.AddMethod(GetExtraScripts) - return env diff --git a/platformio/builder/tools/pioplatform.py b/platformio/builder/tools/pioplatform.py index 5f7182c4..c8c2785f 100644 --- a/platformio/builder/tools/pioplatform.py +++ b/platformio/builder/tools/pioplatform.py @@ -19,6 +19,7 @@ import sys from SCons.Script import ARGUMENTS # pylint: disable=import-error from SCons.Script import COMMAND_LINE_TARGETS # pylint: disable=import-error +from SCons.Script import DefaultEnvironment # pylint: disable=import-error from platformio import fs, util from platformio.compat import IS_MACOS, IS_WINDOWS @@ -32,16 +33,17 @@ from platformio.project.config import ProjectOptions @util.memoized() -def PioPlatform(env): - variables = env.GetProjectOptions(as_dict=True) - if "framework" in variables: - # support PIO Core 3.0 dev/platforms - variables["pioframework"] = variables["framework"] +def _PioPlatform(): + env = DefaultEnvironment() p = PlatformFactory.new(os.path.dirname(env["PLATFORM_MANIFEST"])) - p.configure_default_packages(variables, COMMAND_LINE_TARGETS) + p.configure_project_packages(env["PIOENV"], COMMAND_LINE_TARGETS) return p +def PioPlatform(_): + return _PioPlatform() + + def BoardConfig(env, board=None): with fs.cd(env.subst("$PROJECT_DIR")): try: @@ -160,7 +162,7 @@ def PrintConfiguration(env): # pylint: disable=too-many-statements and pkg_metadata and pkg_metadata.spec.external ): - data.append("(%s)" % pkg_metadata.spec.url) + data.append("(%s)" % pkg_metadata.spec.uri) if board_config: data.extend([">", board_config.get("name")]) return data @@ -213,7 +215,7 @@ def PrintConfiguration(env): # pylint: disable=too-many-statements data = [] for item in platform.dump_used_packages(): original_version = get_original_version(item["version"]) - info = "%s %s" % (item["name"], item["version"]) + info = "%s @ %s" % (item["name"], item["version"]) extra = [] if original_version: extra.append(original_version) diff --git a/platformio/builder/tools/pioproject.py b/platformio/builder/tools/pioproject.py index 4bf848d9..425d6758 100644 --- a/platformio/builder/tools/pioproject.py +++ b/platformio/builder/tools/pioproject.py @@ -14,7 +14,8 @@ from __future__ import absolute_import -from platformio.project.config import MISSING, ProjectConfig, ProjectOptions +from platformio.compat import MISSING +from platformio.project.config import ProjectConfig, ProjectOptions def GetProjectConfig(env): diff --git a/platformio/builder/tools/piotest.py b/platformio/builder/tools/piotest.py new file mode 100644 index 00000000..08d475d2 --- /dev/null +++ b/platformio/builder/tools/piotest.py @@ -0,0 +1,63 @@ +# Copyright (c) 2014-present PlatformIO +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from __future__ import absolute_import + +import os + +from platformio.builder.tools import platformio as piotool +from platformio.test.result import TestSuite +from platformio.test.runners.factory import TestRunnerFactory + + +def ConfigureTestTarget(env): + env.Append( + CPPDEFINES=["UNIT_TEST", "PIO_UNIT_TESTING"], + PIOTEST_SRC_FILTER=[f"+<*.{ext}>" for ext in piotool.SRC_BUILD_EXT], + ) + env.Prepend(CPPPATH=["$PROJECT_TEST_DIR"]) + + if "PIOTEST_RUNNING_NAME" in env: + test_name = env["PIOTEST_RUNNING_NAME"] + while True: + test_name = os.path.dirname(test_name) # parent dir + # skip nested tests (user's side issue?) + if not test_name or os.path.basename(test_name).startswith("test_"): + break + env.Prepend( + PIOTEST_SRC_FILTER=[ + f"+<{test_name}{os.path.sep}*.{ext}>" + for ext in piotool.SRC_BUILD_EXT + ], + CPPPATH=[os.path.join("$PROJECT_TEST_DIR", test_name)], + ) + + env.Prepend( + PIOTEST_SRC_FILTER=[f"+<$PIOTEST_RUNNING_NAME{os.path.sep}>"], + CPPPATH=[os.path.join("$PROJECT_TEST_DIR", "$PIOTEST_RUNNING_NAME")], + ) + + test_runner = TestRunnerFactory.new( + TestSuite(env["PIOENV"], env.get("PIOTEST_RUNNING_NAME", "*")), + env.GetProjectConfig(), + ) + test_runner.configure_build_env(env) + + +def generate(env): + env.AddMethod(ConfigureTestTarget) + + +def exists(_): + return True diff --git a/platformio/builder/tools/pioupload.py b/platformio/builder/tools/pioupload.py index 23f252e5..14ee59e1 100644 --- a/platformio/builder/tools/pioupload.py +++ b/platformio/builder/tools/pioupload.py @@ -12,25 +12,24 @@ # See the License for the specific language governing permissions and # limitations under the License. +# pylint: disable=unused-argument + from __future__ import absolute_import +import os import re import sys -from fnmatch import fnmatch -from os import environ -from os.path import isfile, join from shutil import copyfile from time import sleep from SCons.Script import ARGUMENTS # pylint: disable=import-error from serial import Serial, SerialException -from platformio import exception, fs, util -from platformio.compat import IS_WINDOWS +from platformio import exception, fs +from platformio.device.finder import find_mbed_disk, find_serial_port, is_pattern_port +from platformio.device.list import list_serial_ports from platformio.proc import exec_command -# pylint: disable=unused-argument - def FlushSerialBuffer(env, port): s = Serial(env.subst(port)) @@ -62,7 +61,7 @@ def WaitForNewSerialPort(env, before): elapsed = 0 before = [p["port"] for p in before] while elapsed < 5 and new_port is None: - now = [p["port"] for p in util.get_serial_ports()] + now = [p["port"] for p in list_serial_ports()] for p in now: if p not in before: new_port = p @@ -97,67 +96,28 @@ def WaitForNewSerialPort(env, before): def AutodetectUploadPort(*args, **kwargs): env = args[0] - - def _get_pattern(): - if "UPLOAD_PORT" not in env: - return None - if set(["*", "?", "[", "]"]) & set(env["UPLOAD_PORT"]): - return env["UPLOAD_PORT"] - return None - - def _is_match_pattern(port): - pattern = _get_pattern() - if not pattern: - return True - return fnmatch(port, pattern) - - def _look_for_mbed_disk(): - msdlabels = ("mbed", "nucleo", "frdm", "microbit") - for item in util.get_logical_devices(): - if item["path"].startswith("/net") or not _is_match_pattern(item["path"]): - continue - mbed_pages = [join(item["path"], n) for n in ("mbed.htm", "mbed.html")] - if any(isfile(p) for p in mbed_pages): - return item["path"] - if item["name"] and any(l in item["name"].lower() for l in msdlabels): - return item["path"] - return None - - def _look_for_serial_port(): - port = None - board_hwids = [] - upload_protocol = env.subst("$UPLOAD_PROTOCOL") - if "BOARD" in env and "build.hwids" in env.BoardConfig(): - board_hwids = env.BoardConfig().get("build.hwids") - for item in util.get_serial_ports(filter_hwid=True): - if not _is_match_pattern(item["port"]): - continue - port = item["port"] - if upload_protocol.startswith("blackmagic"): - if IS_WINDOWS and port.startswith("COM") and len(port) > 4: - port = "\\\\.\\%s" % port - if "GDB" in item["description"]: - return port - for hwid in board_hwids: - hwid_str = ("%s:%s" % (hwid[0], hwid[1])).replace("0x", "") - if hwid_str in item["hwid"]: - return port - return port - - if "UPLOAD_PORT" in env and not _get_pattern(): - print(env.subst("Use manually specified: $UPLOAD_PORT")) + initial_port = env.subst("$UPLOAD_PORT") + upload_protocol = env.subst("$UPLOAD_PROTOCOL") + if initial_port and not is_pattern_port(initial_port): + print(env.subst("Using manually specified: $UPLOAD_PORT")) return - if env.subst("$UPLOAD_PROTOCOL") == "mbed" or ( - "mbed" in env.subst("$PIOFRAMEWORK") and not env.subst("$UPLOAD_PROTOCOL") + if upload_protocol == "mbed" or ( + "mbed" in env.subst("$PIOFRAMEWORK") and not upload_protocol ): - env.Replace(UPLOAD_PORT=_look_for_mbed_disk()) + env.Replace(UPLOAD_PORT=find_mbed_disk(initial_port)) else: try: fs.ensure_udev_rules() except exception.InvalidUdevRules as e: sys.stderr.write("\n%s\n\n" % e) - env.Replace(UPLOAD_PORT=_look_for_serial_port()) + env.Replace( + UPLOAD_PORT=find_serial_port( + initial_port=initial_port, + board_config=env.BoardConfig() if "BOARD" in env else None, + upload_protocol=upload_protocol, + ) + ) if env.subst("$UPLOAD_PORT"): print(env.subst("Auto-detected: $UPLOAD_PORT")) @@ -175,10 +135,12 @@ def UploadToDisk(_, target, source, env): assert "UPLOAD_PORT" in env progname = env.subst("$PROGNAME") for ext in ("bin", "hex"): - fpath = join(env.subst("$BUILD_DIR"), "%s.%s" % (progname, ext)) - if not isfile(fpath): + fpath = os.path.join(env.subst("$BUILD_DIR"), "%s.%s" % (progname, ext)) + if not os.path.isfile(fpath): continue - copyfile(fpath, join(env.subst("$UPLOAD_PORT"), "%s.%s" % (progname, ext))) + copyfile( + fpath, os.path.join(env.subst("$UPLOAD_PORT"), "%s.%s" % (progname, ext)) + ) print( "Firmware has been successfully uploaded.\n" "(Some boards may require manual hard reset)" @@ -211,7 +173,7 @@ def CheckUploadSize(_, target, source, env): if not isinstance(cmd, list): cmd = cmd.split() cmd = [arg.replace("$SOURCES", str(source[0])) for arg in cmd if arg] - sysenv = environ.copy() + sysenv = os.environ.copy() sysenv["PATH"] = str(env["ENV"]["PATH"]) result = exec_command(env.subst(cmd), env=sysenv) if result["returncode"] != 0: diff --git a/platformio/builder/tools/platformio.py b/platformio/builder/tools/platformio.py index 61274dc7..fde67426 100644 --- a/platformio/builder/tools/platformio.py +++ b/platformio/builder/tools/platformio.py @@ -47,14 +47,16 @@ def scons_patched_match_splitext(path, suffixes=None): def GetBuildType(env): - return ( - "debug" - if ( - set(["__debug", "sizedata"]) & set(COMMAND_LINE_TARGETS) - or env.GetProjectOption("build_type") == "debug" - ) - else "release" - ) + modes = [] + if ( + set(["__debug", "sizedata"]) # sizedata = for memory inspection + & set(COMMAND_LINE_TARGETS) + or env.GetProjectOption("build_type") == "debug" + ): + modes.append("debug") + if "__test" in COMMAND_LINE_TARGETS or env.GetProjectOption("build_type") == "test": + modes.append("test") + return "+".join(modes or ["release"]) def BuildProgram(env): @@ -113,10 +115,6 @@ def ProcessProgramDeps(env): env.PrintConfiguration() - # fix ASM handling under non case-sensitive OS - if not Util.case_sensitive_suffixes(".s", ".S"): - env.Replace(AS="$CC", ASCOM="$ASPPCOM") - # process extra flags from board if "BOARD" in env and "build.extra_flags" in env.BoardConfig(): env.ProcessFlags(env.BoardConfig().get("build.extra_flags")) @@ -127,14 +125,20 @@ def ProcessProgramDeps(env): # process framework scripts env.BuildFrameworks(env.get("PIOFRAMEWORK")) - if env.GetBuildType() == "debug": - env.ConfigureDebugFlags() + if "debug" in env.GetBuildType(): + env.ConfigureDebugTarget() + if "test" in env.GetBuildType(): + env.ConfigureTestTarget() # remove specified flags env.ProcessUnFlags(env.get("BUILD_UNFLAGS")) - if "__test" in COMMAND_LINE_TARGETS: - env.ConfigureTestTarget() + if "compiledb" in COMMAND_LINE_TARGETS and env.get( + "COMPILATIONDB_INCLUDE_TOOLCHAIN" + ): + for scope, includes in env.DumpIntegrationIncludes().items(): + if scope in ("toolchain",): + env.Append(CPPPATH=includes) def ProcessProjectDeps(env): @@ -158,12 +162,11 @@ def ProcessProjectDeps(env): # extra build flags from `platformio.ini` projenv.ProcessFlags(env.get("SRC_BUILD_FLAGS")) - is_test = "__test" in COMMAND_LINE_TARGETS - if is_test: + if "test" in env.GetBuildType(): projenv.BuildSources( "$BUILD_TEST_DIR", "$PROJECT_TEST_DIR", "$PIOTEST_SRC_FILTER" ) - if not is_test or env.GetProjectOption("test_build_project_src"): + if "test" not in env.GetBuildType() or env.GetProjectOption("test_build_src"): projenv.BuildSources( "$BUILD_SRC_DIR", "$PROJECT_SRC_DIR", env.get("SRC_FILTER") ) diff --git a/platformio/clients/account.py b/platformio/clients/account.py index 60349934..2afe6fbe 100644 --- a/platformio/clients/account.py +++ b/platformio/clients/account.py @@ -40,7 +40,7 @@ class AccountClient(HTTPClient): # pylint:disable=too-many-public-methods SUMMARY_CACHE_TTL = 60 * 60 * 24 * 7 def __init__(self): - super(AccountClient, self).__init__(__accounts_api__) + super().__init__(__accounts_api__) @staticmethod def get_refresh_token(): @@ -63,7 +63,7 @@ class AccountClient(HTTPClient): # pylint:disable=too-many-public-methods def fetch_json_data(self, *args, **kwargs): try: - return super(AccountClient, self).fetch_json_data(*args, **kwargs) + return super().fetch_json_data(*args, **kwargs) except HTTPClientError as exc: raise AccountError(exc) from exc diff --git a/platformio/clients/http.py b/platformio/clients/http.py index 3cf247b4..86fb8cae 100644 --- a/platformio/clients/http.py +++ b/platformio/clients/http.py @@ -16,6 +16,7 @@ import json import math import os import socket +from urllib.parse import urljoin import requests.adapters from requests.packages.urllib3.util.retry import Retry # pylint:disable=import-error @@ -24,15 +25,10 @@ from platformio import __check_internet_hosts__, __default_requests_timeout__, a from platformio.cache import ContentCache, cleanup_content_cache from platformio.exception import PlatformioException, UserSideException -try: - from urllib.parse import urljoin -except ImportError: - from urlparse import urljoin - class HTTPClientError(PlatformioException): def __init__(self, message, response=None): - super(HTTPClientError, self).__init__() + super().__init__() self.message = message self.response = response @@ -51,16 +47,14 @@ class InternetIsOffline(UserSideException): class EndpointSession(requests.Session): def __init__(self, base_url, *args, **kwargs): - super(EndpointSession, self).__init__(*args, **kwargs) + super().__init__(*args, **kwargs) self.base_url = base_url def request( # pylint: disable=signature-differs,arguments-differ self, method, url, *args, **kwargs ): # print(self.base_url, method, url, args, kwargs) - return super(EndpointSession, self).request( - method, urljoin(self.base_url, url), *args, **kwargs - ) + return super().request(method, urljoin(self.base_url, url), *args, **kwargs) class EndpointSessionIterator(object): @@ -79,10 +73,6 @@ class EndpointSessionIterator(object): def __iter__(self): # pylint: disable=non-iterator-returned return self - def next(self): - """For Python 2 compatibility""" - return self.__next__() - def __next__(self): base_url = next(self.endpoints_iter) session = EndpointSession(base_url) @@ -149,7 +139,7 @@ class HTTPClient(object): raise HTTPClientError(str(e)) def fetch_json_data(self, method, path, **kwargs): - if method != "get": + if method not in ("get", "head", "options"): cleanup_content_cache("http") cache_valid = kwargs.pop("x_cache_valid") if "x_cache_valid" in kwargs else None if not cache_valid: diff --git a/platformio/clients/registry.py b/platformio/clients/registry.py index aba734ff..75bfd99b 100644 --- a/platformio/clients/registry.py +++ b/platformio/clients/registry.py @@ -12,7 +12,7 @@ # See the License for the specific language governing permissions and # limitations under the License. -from platformio import __registry_api__, fs +from platformio import __registry_mirror_hosts__, fs from platformio.clients.account import AccountClient, AccountError from platformio.clients.http import HTTPClient, HTTPClientError @@ -21,7 +21,8 @@ from platformio.clients.http import HTTPClient, HTTPClientError class RegistryClient(HTTPClient): def __init__(self): - super(RegistryClient, self).__init__(__registry_api__) + endpoints = [f"https://api.{host}" for host in __registry_mirror_hosts__] + super().__init__(endpoints) @staticmethod def allowed_private_packages(): @@ -102,14 +103,14 @@ class RegistryClient(HTTPClient): "get", "/v3/resources", params={"owner": owner} if owner else None, + x_cache_valid="1h", x_with_authorization=True, ) - def list_packages(self, query=None, filters=None, page=None): - assert query or filters + def list_packages(self, query=None, qualifiers=None, page=None, sort=None): search_query = [] - if filters: - valid_filters = ( + if qualifiers: + valid_qualifiers = ( "authors", "keywords", "frameworks", @@ -120,8 +121,8 @@ class RegistryClient(HTTPClient): "owners", "types", ) - assert set(filters.keys()) <= set(valid_filters) - for name, values in filters.items(): + assert set(qualifiers.keys()) <= set(valid_qualifiers) + for name, values in qualifiers.items(): for value in set( values if isinstance(values, (list, tuple)) else [values] ): @@ -131,6 +132,8 @@ class RegistryClient(HTTPClient): params = dict(query=" ".join(search_query)) if page: params["page"] = int(page) + if sort: + params["sort"] = sort return self.fetch_json_data( "get", "/v3/search", diff --git a/platformio/commands/__init__.py b/platformio/commands/__init__.py index f6bac830..22cacc60 100644 --- a/platformio/commands/__init__.py +++ b/platformio/commands/__init__.py @@ -22,7 +22,7 @@ class PlatformioCLI(click.MultiCommand): leftover_args = [] def __init__(self, *args, **kwargs): - super(PlatformioCLI, self).__init__(*args, **kwargs) + super().__init__(*args, **kwargs) self._pio_cmds_dir = os.path.dirname(__file__) @staticmethod @@ -41,7 +41,7 @@ class PlatformioCLI(click.MultiCommand): PlatformioCLI.leftover_args = ctx.args if hasattr(ctx, "protected_args"): PlatformioCLI.leftover_args = ctx.protected_args + ctx.args - return super(PlatformioCLI, self).invoke(ctx) + return super().invoke(ctx) def list_commands(self, ctx): cmds = [] @@ -74,7 +74,13 @@ class PlatformioCLI(click.MultiCommand): def _handle_obsolate_command(name): # pylint: disable=import-outside-toplevel if name == "init": - from platformio.commands.project import project_init + from platformio.project.commands.init import project_init_cmd + + return project_init_cmd + + if name == "package": + from platformio.commands.pkg import cli + + return cli - return project_init raise AttributeError() diff --git a/platformio/commands/account.py b/platformio/commands/account.py index 0282767e..48b26717 100644 --- a/platformio/commands/account.py +++ b/platformio/commands/account.py @@ -14,13 +14,13 @@ # pylint: disable=unused-argument -import datetime import json import re import click from tabulate import tabulate +from platformio import util from platformio.clients.account import AccountClient, AccountNotAuthorized @@ -244,12 +244,9 @@ def print_packages(packages): data = [] expire = "-" if "subscription" in package: - expire = datetime.datetime.strptime( - ( - package["subscription"].get("end_at") - or package["subscription"].get("next_bill_at") - ), - "%Y-%m-%dT%H:%M:%SZ", + expire = util.parse_datetime( + package["subscription"].get("end_at") + or package["subscription"].get("next_bill_at") ).strftime("%Y-%m-%d") data.append(("Expire:", expire)) services = [] @@ -274,21 +271,17 @@ def print_subscriptions(subscriptions): click.secho(subscription.get("product_name"), bold=True) click.echo("-" * len(subscription.get("product_name"))) data = [("State:", subscription.get("status"))] - begin_at = datetime.datetime.strptime( - subscription.get("begin_at"), "%Y-%m-%dT%H:%M:%SZ" - ).strftime("%Y-%m-%d %H:%M:%S") + begin_at = util.parse_datetime(subscription.get("begin_at")).strftime("%c") data.append(("Start date:", begin_at or "-")) end_at = subscription.get("end_at") if end_at: - end_at = datetime.datetime.strptime( - subscription.get("end_at"), "%Y-%m-%dT%H:%M:%SZ" - ).strftime("%Y-%m-%d %H:%M:%S") + end_at = util.parse_datetime(subscription.get("end_at")).strftime("%c") data.append(("End date:", end_at or "-")) next_bill_at = subscription.get("next_bill_at") if next_bill_at: - next_bill_at = datetime.datetime.strptime( - subscription.get("next_bill_at"), "%Y-%m-%dT%H:%M:%SZ" - ).strftime("%Y-%m-%d %H:%M:%S") + next_bill_at = util.parse_datetime( + subscription.get("next_bill_at") + ).strftime("%c") data.append(("Next payment:", next_bill_at or "-")) data.append( ("Edit:", click.style(subscription.get("update_url"), fg="blue") or "-") diff --git a/platformio/commands/boards.py b/platformio/commands/boards.py index b51103ca..2fe047e5 100644 --- a/platformio/commands/boards.py +++ b/platformio/commands/boards.py @@ -22,7 +22,7 @@ from platformio import fs from platformio.package.manager.platform import PlatformPackageManager -@click.command("boards", short_help="Embedded board explorer") +@click.command("boards", short_help="Board Explorer") @click.argument("query", required=False) @click.option("--installed", is_flag=True) @click.option("--json-output", is_flag=True) diff --git a/platformio/commands/check/command.py b/platformio/commands/check/command.py index 82ab7566..e24836cc 100644 --- a/platformio/commands/check/command.py +++ b/platformio/commands/check/command.py @@ -32,7 +32,7 @@ from platformio.project.config import ProjectConfig from platformio.project.helpers import find_project_dir_above, get_project_dir -@click.command("check", short_help="Static code analysis") +@click.command("check", short_help="Static Code Analysis") @click.option("-e", "--environment", multiple=True) @click.option( "-d", @@ -118,6 +118,7 @@ def cli( if silent else severity or config.get("env:" + envname, "check_severity"), skip_packages=skip_packages or env_options.get("check_skip_packages"), + platform_packages=env_options.get("platform_packages"), ) for tool in config.get("env:" + envname, "check_tool"): @@ -166,7 +167,10 @@ def cli( if json_output: click.echo(json.dumps(results_to_json(results))) elif not silent: - print_check_summary(results) + print_check_summary(results, verbose=verbose) + + # Reset custom project config + app.set_session_var("custom_project_conf", None) command_failed = any(r.get("succeeded") is False for r in results) if command_failed: @@ -267,7 +271,7 @@ def print_defects_stats(results): click.echo() -def print_check_summary(results): +def print_check_summary(results, verbose=False): click.echo() tabular_data = [] @@ -284,6 +288,8 @@ def print_check_summary(results): status_str = click.style("FAILED", fg="red") elif result.get("succeeded") is None: status_str = "IGNORED" + if not verbose: + continue else: succeeded_nums += 1 status_str = click.style("PASSED", fg="green") diff --git a/platformio/commands/check/defect.py b/platformio/commands/check/defect.py index 5e907d3e..d271ab48 100644 --- a/platformio/commands/check/defect.py +++ b/platformio/commands/check/defect.py @@ -34,7 +34,7 @@ class DefectItem(object): severity, category, message, - file="unknown", + file=None, line=0, column=0, id=None, @@ -50,7 +50,7 @@ class DefectItem(object): self.callstack = callstack self.cwe = cwe self.id = id - self.file = file + self.file = file or "unknown" if file.lower().startswith(get_project_dir().lower()): self.file = os.path.relpath(file, get_project_dir()) diff --git a/platformio/commands/check/tools/base.py b/platformio/commands/check/tools/base.py index d5328d1a..07636e1f 100644 --- a/platformio/commands/check/tools/base.py +++ b/platformio/commands/check/tools/base.py @@ -20,7 +20,9 @@ import click from platformio import fs, proc from platformio.commands.check.defect import DefectItem -from platformio.project.helpers import load_project_ide_data +from platformio.package.manager.core import get_core_package_dir +from platformio.package.meta import PackageSpec +from platformio.project.helpers import load_build_metadata class CheckToolBase(object): # pylint: disable=too-many-instance-attributes @@ -55,7 +57,7 @@ class CheckToolBase(object): # pylint: disable=too-many-instance-attributes ] def _load_cpp_data(self, project_dir): - data = load_project_ide_data(project_dir, self.envname) + data = load_build_metadata(project_dir, self.envname) if not data: return self.cc_flags = click.parser.split_arg_string(data.get("cc_flags", "")) @@ -66,6 +68,13 @@ class CheckToolBase(object): # pylint: disable=too-many-instance-attributes self.cxx_path = data.get("cxx_path") self.toolchain_defines = self._get_toolchain_defines() + def get_tool_dir(self, pkg_name): + for spec in self.options["platform_packages"] or []: + spec = PackageSpec(spec) + if spec.name == pkg_name: + return get_core_package_dir(pkg_name, spec=spec) + return get_core_package_dir(pkg_name) + def get_flags(self, tool): result = [] flags = self.options.get("flags") or [] diff --git a/platformio/commands/check/tools/clangtidy.py b/platformio/commands/check/tools/clangtidy.py index d6f99058..c357cf4d 100644 --- a/platformio/commands/check/tools/clangtidy.py +++ b/platformio/commands/check/tools/clangtidy.py @@ -17,11 +17,10 @@ from os.path import join from platformio.commands.check.defect import DefectItem from platformio.commands.check.tools.base import CheckToolBase -from platformio.package.manager.core import get_core_package_dir class ClangtidyCheckTool(CheckToolBase): - def tool_output_filter(self, line): + def tool_output_filter(self, line): # pylint: disable=arguments-differ if not self.options.get("verbose") and "[clang-diagnostic-error]" in line: return "" @@ -34,7 +33,7 @@ class ClangtidyCheckTool(CheckToolBase): return "" - def parse_defect(self, raw_line): + def parse_defect(self, raw_line): # pylint: disable=arguments-differ match = re.match(r"^(.*):(\d+):(\d+):\s+([^:]+):\s(.+)\[([^]]+)\]$", raw_line) if not match: return raw_line @@ -56,11 +55,13 @@ class ClangtidyCheckTool(CheckToolBase): return cmd_result["returncode"] < 2 def configure_command(self): - tool_path = join(get_core_package_dir("tool-clangtidy"), "clang-tidy") + tool_path = join(self.get_tool_dir("tool-clangtidy"), "clang-tidy") cmd = [tool_path, "--quiet"] flags = self.get_flags("clangtidy") - if not self.is_flag_set("--checks", flags): + if not ( + self.is_flag_set("--checks", flags) or self.is_flag_set("--config", flags) + ): cmd.append("--checks=*") project_files = self.get_project_target_files(self.options["patterns"]) diff --git a/platformio/commands/check/tools/cppcheck.py b/platformio/commands/check/tools/cppcheck.py index 99512d52..ec2b96d3 100644 --- a/platformio/commands/check/tools/cppcheck.py +++ b/platformio/commands/check/tools/cppcheck.py @@ -19,11 +19,11 @@ import click from platformio import proc from platformio.commands.check.defect import DefectItem from platformio.commands.check.tools.base import CheckToolBase -from platformio.package.manager.core import get_core_package_dir class CppcheckCheckTool(CheckToolBase): def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) self._field_delimiter = "<&PIO&>" self._buffer = "" self.defect_fields = [ @@ -36,9 +36,8 @@ class CppcheckCheckTool(CheckToolBase): "cwe", "id", ] - super(CppcheckCheckTool, self).__init__(*args, **kwargs) - def tool_output_filter(self, line): + def tool_output_filter(self, line): # pylint: disable=arguments-differ if ( not self.options.get("verbose") and "--suppress=unmatchedSuppression:" in line @@ -50,13 +49,14 @@ class CppcheckCheckTool(CheckToolBase): for msg in ( "No C or C++ source files found", "unrecognized command line option", + "there was an internal error", ) ): self._bad_input = True return line - def parse_defect(self, raw_line): + def parse_defect(self, raw_line): # pylint: disable=arguments-differ if self._field_delimiter not in raw_line: return None @@ -103,7 +103,7 @@ class CppcheckCheckTool(CheckToolBase): return DefectItem(**args) def configure_command(self, language, src_file): # pylint: disable=arguments-differ - tool_path = os.path.join(get_core_package_dir("tool-cppcheck"), "cppcheck") + tool_path = os.path.join(self.get_tool_dir("tool-cppcheck"), "cppcheck") cmd = [ tool_path, @@ -208,7 +208,7 @@ class CppcheckCheckTool(CheckToolBase): return self._create_tmp_file("\n".join(result)) def clean_up(self): - super(CppcheckCheckTool, self).clean_up() + super().clean_up() # delete temporary dump files generated by addons if not self.is_flag_set("--addon", self.get_flags("cppcheck")): diff --git a/platformio/commands/check/tools/pvsstudio.py b/platformio/commands/check/tools/pvsstudio.py index a872c0af..0479d91e 100644 --- a/platformio/commands/check/tools/pvsstudio.py +++ b/platformio/commands/check/tools/pvsstudio.py @@ -23,22 +23,21 @@ from platformio import proc from platformio.commands.check.defect import DefectItem from platformio.commands.check.tools.base import CheckToolBase from platformio.compat import IS_WINDOWS -from platformio.package.manager.core import get_core_package_dir class PvsStudioCheckTool(CheckToolBase): # pylint: disable=too-many-instance-attributes def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) self._tmp_dir = tempfile.mkdtemp(prefix="piocheck") self._tmp_preprocessed_file = self._generate_tmp_file_path() + ".i" self._tmp_output_file = self._generate_tmp_file_path() + ".pvs" self._tmp_cfg_file = self._generate_tmp_file_path() + ".cfg" self._tmp_cmd_file = self._generate_tmp_file_path() + ".cmd" self.tool_path = os.path.join( - get_core_package_dir("tool-pvs-studio"), + self.get_tool_dir("tool-pvs-studio"), "x64" if IS_WINDOWS else "bin", "pvs-studio", ) - super(PvsStudioCheckTool, self).__init__(*args, **kwargs) with open(self._tmp_cfg_file, mode="w", encoding="utf8") as fp: fp.write( @@ -53,8 +52,14 @@ class PvsStudioCheckTool(CheckToolBase): # pylint: disable=too-many-instance-at ) ) - def tool_output_filter(self, line): - if "license was not entered" in line.lower(): + def tool_output_filter(self, line): # pylint: disable=arguments-differ + if any( + err_msg in line.lower() + for err_msg in ( + "license was not entered", + "license information is incorrect", + ) + ): self._bad_input = True return line @@ -70,7 +75,7 @@ class PvsStudioCheckTool(CheckToolBase): # pylint: disable=too-many-instance-at def _demangle_report(self, output_file): converter_tool = os.path.join( - get_core_package_dir("tool-pvs-studio"), + self.get_tool_dir("tool-pvs-studio"), "HtmlGenerator" if IS_WINDOWS else os.path.join("bin", "plog-converter"), ) @@ -194,7 +199,7 @@ class PvsStudioCheckTool(CheckToolBase): # pylint: disable=too-many-instance-at '"%s"' % self._tmp_preprocessed_file, ] cmd.extend([f for f in flags if f]) - cmd.extend(["-D%s" % d for d in self.cpp_defines]) + cmd.extend(['"-D%s"' % d.replace('"', '\\"') for d in self.cpp_defines]) cmd.append('@"%s"' % self._tmp_cmd_file) # Explicitly specify C++ as the language used in .ino files @@ -209,7 +214,7 @@ class PvsStudioCheckTool(CheckToolBase): # pylint: disable=too-many-instance-at self._bad_input = True def clean_up(self): - super(PvsStudioCheckTool, self).clean_up() + super().clean_up() if os.path.isdir(self._tmp_dir): shutil.rmtree(self._tmp_dir) diff --git a/platformio/commands/ci.py b/platformio/commands/ci.py index 050baa65..17880196 100644 --- a/platformio/commands/ci.py +++ b/platformio/commands/ci.py @@ -20,10 +20,9 @@ import tempfile import click from platformio import app, fs -from platformio.commands.project import project_init as cmd_project_init -from platformio.commands.project import validate_boards from platformio.commands.run.command import cli as cmd_run from platformio.exception import CIBuildEnvsEmpty +from platformio.project.commands.init import project_init_cmd, validate_boards from platformio.project.config import ProjectConfig @@ -44,7 +43,7 @@ def validate_path(ctx, param, value): # pylint: disable=unused-argument raise click.BadParameter("Found invalid path: %s" % invalid_path) -@click.command("ci", short_help="Continuous integration") +@click.command("ci", short_help="Continuous Integration") @click.argument("src", nargs=-1, callback=validate_path) @click.option("-l", "--lib", multiple=True, callback=validate_path, metavar="DIRECTORY") @click.option("--exclude", multiple=True) @@ -109,7 +108,7 @@ def cli( # pylint: disable=too-many-arguments, too-many-branches # initialise project ctx.invoke( - cmd_project_init, + project_init_cmd, project_dir=build_dir, board=board, project_option=project_option, diff --git a/platformio/commands/debug.py b/platformio/commands/debug.py index 48d86891..3ab61d9b 100644 --- a/platformio/commands/debug.py +++ b/platformio/commands/debug.py @@ -12,169 +12,6 @@ # See the License for the specific language governing permissions and # limitations under the License. -# pylint: disable=too-many-arguments, too-many-locals -# pylint: disable=too-many-branches, too-many-statements +# pylint: disable=unused-import -import asyncio -import os -import signal -import subprocess - -import click - -from platformio import app, exception, fs, proc -from platformio.commands.platform import init_platform -from platformio.compat import IS_WINDOWS -from platformio.debug import helpers -from platformio.debug.config.factory import DebugConfigFactory -from platformio.debug.exception import DebugInvalidOptionsError -from platformio.debug.process.gdb import GDBClientProcess -from platformio.project.config import ProjectConfig -from platformio.project.exception import ProjectEnvsNotAvailableError -from platformio.project.helpers import is_platformio_project -from platformio.project.options import ProjectOptions - - -@click.command( - "debug", - context_settings=dict(ignore_unknown_options=True), - short_help="Unified debugger", -) -@click.option( - "-d", - "--project-dir", - default=os.getcwd, - type=click.Path( - exists=True, file_okay=False, dir_okay=True, writable=True, resolve_path=True - ), -) -@click.option( - "-c", - "--project-conf", - type=click.Path( - exists=True, file_okay=True, dir_okay=False, readable=True, resolve_path=True - ), -) -@click.option("--environment", "-e", metavar="") -@click.option("--load-mode", type=ProjectOptions["env.debug_load_mode"].type) -@click.option("--verbose", "-v", is_flag=True) -@click.option("--interface", type=click.Choice(["gdb"])) -@click.argument("__unprocessed", nargs=-1, type=click.UNPROCESSED) -@click.pass_context -def cli( - ctx, - project_dir, - project_conf, - environment, - load_mode, - verbose, - interface, - __unprocessed, -): - app.set_session_var("custom_project_conf", project_conf) - - # use env variables from Eclipse or CLion - for name in ("CWD", "PWD", "PLATFORMIO_PROJECT_DIR"): - if is_platformio_project(project_dir): - break - if os.getenv(name): - project_dir = os.getenv(name) - - with fs.cd(project_dir): - project_config = ProjectConfig.get_instance(project_conf) - project_config.validate(envs=[environment] if environment else None) - env_name = environment or helpers.get_default_debug_env(project_config) - - if not interface: - return helpers.predebug_project( - ctx, project_dir, project_config, env_name, False, verbose - ) - - env_options = project_config.items(env=env_name, as_dict=True) - if "platform" not in env_options: - raise ProjectEnvsNotAvailableError() - - with fs.cd(project_dir): - debug_config = DebugConfigFactory.new( - init_platform(env_options["platform"]), project_config, env_name - ) - - if "--version" in __unprocessed: - return subprocess.run( - [debug_config.client_executable_path, "--version"], check=True - ) - - try: - fs.ensure_udev_rules() - except exception.InvalidUdevRules as e: - click.echo( - helpers.escape_gdbmi_stream("~", str(e) + "\n") - if helpers.is_gdbmi_mode() - else str(e) + "\n", - nl=False, - ) - - rebuild_prog = False - preload = debug_config.load_cmds == ["preload"] - load_mode = load_mode or debug_config.load_mode - if load_mode == "always": - rebuild_prog = preload or not helpers.has_debug_symbols( - debug_config.program_path - ) - elif load_mode == "modified": - rebuild_prog = helpers.is_prog_obsolete( - debug_config.program_path - ) or not helpers.has_debug_symbols(debug_config.program_path) - - if not (debug_config.program_path and os.path.isfile(debug_config.program_path)): - rebuild_prog = True - - if preload or (not rebuild_prog and load_mode != "always"): - # don't load firmware through debug server - debug_config.load_cmds = [] - - if rebuild_prog: - if helpers.is_gdbmi_mode(): - click.echo( - helpers.escape_gdbmi_stream( - "~", "Preparing firmware for debugging...\n" - ), - nl=False, - ) - stream = helpers.GDBMIConsoleStream() - with proc.capture_std_streams(stream): - helpers.predebug_project( - ctx, project_dir, project_config, env_name, preload, verbose - ) - stream.close() - else: - click.echo("Preparing firmware for debugging...") - helpers.predebug_project( - ctx, project_dir, project_config, env_name, preload, verbose - ) - - # save SHA sum of newly created prog - if load_mode == "modified": - helpers.is_prog_obsolete(debug_config.program_path) - - if not os.path.isfile(debug_config.program_path): - raise DebugInvalidOptionsError("Program/firmware is missed") - - loop = asyncio.ProactorEventLoop() if IS_WINDOWS else asyncio.get_event_loop() - asyncio.set_event_loop(loop) - - with fs.cd(project_dir): - client = GDBClientProcess(project_dir, debug_config) - coro = client.run(__unprocessed) - try: - signal.signal(signal.SIGINT, signal.SIG_IGN) - loop.run_until_complete(coro) - if IS_WINDOWS: - # an issue with `asyncio` executor and STIDIN, - # it cannot be closed gracefully - proc.force_exit() - finally: - del client - loop.close() - - return True +from platformio.debug.command import debug_cmd as cli diff --git a/platformio/commands/device/__init__.py b/platformio/commands/device/__init__.py index bcee03cc..1af0f8d4 100644 --- a/platformio/commands/device/__init__.py +++ b/platformio/commands/device/__init__.py @@ -12,4 +12,7 @@ # See the License for the specific language governing permissions and # limitations under the License. -from platformio.commands.device.filters.base import DeviceMonitorFilter +# pylint: disable=unused-import +from platformio.device.filters.base import ( + DeviceMonitorFilterBase as DeviceMonitorFilter, +) diff --git a/platformio/commands/device/command.py b/platformio/commands/device/command.py index fb7877e4..5865c43e 100644 --- a/platformio/commands/device/command.py +++ b/platformio/commands/device/command.py @@ -12,225 +12,19 @@ # See the License for the specific language governing permissions and # limitations under the License. -import json -import os -import sys -from fnmatch import fnmatch - import click -from serial.tools import miniterm -from platformio import exception, fs, util -from platformio.commands.device import helpers as device_helpers -from platformio.platform.factory import PlatformFactory -from platformio.project.exception import NotPlatformIOProjectError +from platformio.device.commands.list import device_list_cmd +from platformio.device.commands.monitor import device_monitor_cmd -@click.group(short_help="Device manager & serial/socket monitor") +@click.group( + "device", + commands=[ + device_list_cmd, + device_monitor_cmd, + ], + short_help="Device manager & Serial/Socket monitor", +) def cli(): pass - - -@cli.command("list", short_help="List devices") -@click.option("--serial", is_flag=True, help="List serial ports, default") -@click.option("--logical", is_flag=True, help="List logical devices") -@click.option("--mdns", is_flag=True, help="List multicast DNS services") -@click.option("--json-output", is_flag=True) -def device_list( # pylint: disable=too-many-branches - serial, logical, mdns, json_output -): - if not logical and not mdns: - serial = True - data = {} - if serial: - data["serial"] = util.get_serial_ports() - if logical: - data["logical"] = util.get_logical_devices() - if mdns: - data["mdns"] = util.get_mdns_services() - - single_key = list(data)[0] if len(list(data)) == 1 else None - - if json_output: - return click.echo(json.dumps(data[single_key] if single_key else data)) - - titles = { - "serial": "Serial Ports", - "logical": "Logical Devices", - "mdns": "Multicast DNS Services", - } - - for key, value in data.items(): - if not single_key: - click.secho(titles[key], bold=True) - click.echo("=" * len(titles[key])) - - if key == "serial": - for item in value: - click.secho(item["port"], fg="cyan") - click.echo("-" * len(item["port"])) - click.echo("Hardware ID: %s" % item["hwid"]) - click.echo("Description: %s" % item["description"]) - click.echo("") - - if key == "logical": - for item in value: - click.secho(item["path"], fg="cyan") - click.echo("-" * len(item["path"])) - click.echo("Name: %s" % item["name"]) - click.echo("") - - if key == "mdns": - for item in value: - click.secho(item["name"], fg="cyan") - click.echo("-" * len(item["name"])) - click.echo("Type: %s" % item["type"]) - click.echo("IP: %s" % item["ip"]) - click.echo("Port: %s" % item["port"]) - if item["properties"]: - click.echo( - "Properties: %s" - % ( - "; ".join( - [ - "%s=%s" % (k, v) - for k, v in item["properties"].items() - ] - ) - ) - ) - click.echo("") - - if single_key: - click.echo("") - - return True - - -@cli.command("monitor", short_help="Monitor device (Serial)") -@click.option("--port", "-p", help="Port, a number or a device name") -@click.option("--baud", "-b", type=int, help="Set baud rate, default=9600") -@click.option( - "--parity", - default="N", - type=click.Choice(["N", "E", "O", "S", "M"]), - help="Set parity, default=N", -) -@click.option("--rtscts", is_flag=True, help="Enable RTS/CTS flow control, default=Off") -@click.option( - "--xonxoff", is_flag=True, help="Enable software flow control, default=Off" -) -@click.option( - "--rts", default=None, type=click.IntRange(0, 1), help="Set initial RTS line state" -) -@click.option( - "--dtr", default=None, type=click.IntRange(0, 1), help="Set initial DTR line state" -) -@click.option("--echo", is_flag=True, help="Enable local echo, default=Off") -@click.option( - "--encoding", - default="UTF-8", - help="Set the encoding for the serial port (e.g. hexlify, " - "Latin1, UTF-8), default: UTF-8", -) -@click.option("--filter", "-f", multiple=True, help="Add filters/text transformations") -@click.option( - "--eol", - default="CRLF", - type=click.Choice(["CR", "LF", "CRLF"]), - help="End of line mode, default=CRLF", -) -@click.option("--raw", is_flag=True, help="Do not apply any encodings/transformations") -@click.option( - "--exit-char", - type=int, - default=3, - help="ASCII code of special character that is used to exit " - "the application, default=3 (Ctrl+C)", -) -@click.option( - "--menu-char", - type=int, - default=20, - help="ASCII code of special character that is used to " - "control miniterm (menu), default=20 (DEC)", -) -@click.option( - "--quiet", - is_flag=True, - help="Diagnostics: suppress non-error messages, default=Off", -) -@click.option( - "-d", - "--project-dir", - default=os.getcwd, - type=click.Path(exists=True, file_okay=False, dir_okay=True, resolve_path=True), -) -@click.option( - "-e", - "--environment", - help="Load configuration from `platformio.ini` and specified environment", -) -def device_monitor(**kwargs): # pylint: disable=too-many-branches - project_options = {} - platform = None - try: - with fs.cd(kwargs["project_dir"]): - project_options = device_helpers.get_project_options(kwargs["environment"]) - kwargs = device_helpers.apply_project_monitor_options( - kwargs, project_options - ) - if "platform" in project_options: - platform = PlatformFactory.new(project_options["platform"]) - except NotPlatformIOProjectError: - pass - - with fs.cd(kwargs["project_dir"]): - device_helpers.register_filters(platform=platform, options=kwargs) - - if not kwargs["port"]: - ports = util.get_serial_ports(filter_hwid=True) - if len(ports) == 1: - kwargs["port"] = ports[0]["port"] - elif "platform" in project_options and "board" in project_options: - board_hwids = device_helpers.get_board_hwids( - kwargs["project_dir"], - platform, - project_options["board"], - ) - for item in ports: - for hwid in board_hwids: - hwid_str = ("%s:%s" % (hwid[0], hwid[1])).replace("0x", "") - if hwid_str in item["hwid"]: - kwargs["port"] = item["port"] - break - if kwargs["port"]: - break - elif kwargs["port"] and (set(["*", "?", "[", "]"]) & set(kwargs["port"])): - for item in util.get_serial_ports(): - if fnmatch(item["port"], kwargs["port"]): - kwargs["port"] = item["port"] - break - - # override system argv with patched options - sys.argv = ["monitor"] + device_helpers.options_to_argv( - kwargs, - project_options, - ignore=("port", "baud", "rts", "dtr", "environment", "project_dir"), - ) - - if not kwargs["quiet"]: - click.echo( - "--- Available filters and text transformations: %s" - % ", ".join(sorted(miniterm.TRANSFORMATIONS.keys())) - ) - click.echo("--- More details at https://bit.ly/pio-monitor-filters") - try: - miniterm.main( - default_port=kwargs["port"], - default_baudrate=kwargs["baud"] or 9600, - default_rts=kwargs["rts"], - default_dtr=kwargs["dtr"], - ) - except Exception as e: - raise exception.MinitermException(e) diff --git a/platformio/commands/device/filters/base.py b/platformio/commands/device/filters/base.py deleted file mode 100644 index bf95352e..00000000 --- a/platformio/commands/device/filters/base.py +++ /dev/null @@ -1,43 +0,0 @@ -# Copyright (c) 2014-present PlatformIO -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -from serial.tools import miniterm - -from platformio.project.config import ProjectConfig - - -class DeviceMonitorFilter(miniterm.Transform): - def __init__(self, options=None): - """Called by PlatformIO to pass context""" - miniterm.Transform.__init__(self) - - self.options = options or {} - self.project_dir = self.options.get("project_dir") - self.environment = self.options.get("environment") - - self.config = ProjectConfig.get_instance() - if not self.environment: - default_envs = self.config.default_envs() - if default_envs: - self.environment = default_envs[0] - elif self.config.envs(): - self.environment = self.config.envs()[0] - - def __call__(self): - """Called by the miniterm library when the filter is actually used""" - return self - - @property - def NAME(self): - raise NotImplementedError("Please declare NAME attribute for the filter class") diff --git a/platformio/commands/home/helpers.py b/platformio/commands/home/helpers.py index e7407eb9..494e2709 100644 --- a/platformio/commands/home/helpers.py +++ b/platformio/commands/home/helpers.py @@ -26,7 +26,7 @@ class AsyncSession(requests.Session): async def request( # pylint: disable=signature-differs,invalid-overridden-method self, *args, **kwargs ): - func = super(AsyncSession, self).request + func = super().request return await run_in_threadpool(func, *args, **kwargs) diff --git a/platformio/commands/home/rpc/handlers/os.py b/platformio/commands/home/rpc/handlers/os.py index f1042978..7342b669 100644 --- a/platformio/commands/home/rpc/handlers/os.py +++ b/platformio/commands/home/rpc/handlers/os.py @@ -22,10 +22,11 @@ from functools import cmp_to_key import click -from platformio import __default_requests_timeout__, fs, util +from platformio import __default_requests_timeout__, fs from platformio.cache import ContentCache from platformio.clients.http import ensure_internet_on from platformio.commands.home import helpers +from platformio.device.list import list_logical_devices class OSRPC: @@ -154,7 +155,7 @@ class OSRPC: @staticmethod def get_logical_devices(): items = [] - for item in util.get_logical_devices(): + for item in list_logical_devices(): if item["name"]: item["name"] = item["name"] items.append(item) diff --git a/platformio/commands/home/rpc/handlers/piocore.py b/platformio/commands/home/rpc/handlers/piocore.py index 52a1b126..01bbf90d 100644 --- a/platformio/commands/home/rpc/handlers/piocore.py +++ b/platformio/commands/home/rpc/handlers/piocore.py @@ -14,10 +14,11 @@ from __future__ import absolute_import +import io import json import os import sys -from io import StringIO +import threading import click from ajsonrpc.core import JSONRPC20DispatchException @@ -27,27 +28,22 @@ from platformio import __main__, __version__, fs, proc from platformio.commands.home import helpers from platformio.compat import get_locale_encoding, is_bytes -try: - from thread import get_ident as thread_get_ident -except ImportError: - from threading import get_ident as thread_get_ident - class MultiThreadingStdStream(object): def __init__(self, parent_stream): - self._buffers = {thread_get_ident(): parent_stream} + self._buffers = {threading.get_ident(): parent_stream} def __getattr__(self, name): - thread_id = thread_get_ident() + thread_id = threading.get_ident() self._ensure_thread_buffer(thread_id) return getattr(self._buffers[thread_id], name) def _ensure_thread_buffer(self, thread_id): if thread_id not in self._buffers: - self._buffers[thread_id] = StringIO() + self._buffers[thread_id] = io.StringIO() def write(self, value): - thread_id = thread_get_ident() + thread_id = threading.get_ident() self._ensure_thread_buffer(thread_id) return self._buffers[thread_id].write( value.decode() if is_bytes(value) else value diff --git a/platformio/commands/lib/command.py b/platformio/commands/lib/command.py index d29b7388..76210090 100644 --- a/platformio/commands/lib/command.py +++ b/platformio/commands/lib/command.py @@ -15,8 +15,10 @@ # pylint: disable=too-many-branches, too-many-locals import json +import logging import os import time +from urllib.parse import quote import click from tabulate import tabulate @@ -31,11 +33,6 @@ from platformio.proc import is_ci from platformio.project.config import ProjectConfig from platformio.project.helpers import get_project_dir, is_platformio_project -try: - from urllib.parse import quote -except ImportError: - from urllib import quote - CTX_META_INPUT_DIRS_KEY = __name__ + ".input_dirs" CTX_META_PROJECT_ENVIRONMENTS_KEY = __name__ + ".project_environments" CTX_META_STORAGE_DIRS_KEY = __name__ + ".storage_dirs" @@ -46,7 +43,7 @@ def get_project_global_lib_dir(): return ProjectConfig.get_instance().get("platformio", "globallib_dir") -@click.group(short_help="Library manager") +@click.group(short_help="Library manager", hidden=True) @click.option( "-d", "--storage-dir", @@ -152,16 +149,16 @@ def lib_install( # pylint: disable=too-many-arguments,unused-argument if not silent and (libraries or storage_dir in storage_libdeps): print_storage_header(storage_dirs, storage_dir) lm = LibraryPackageManager(storage_dir) + lm.set_log_level(logging.WARN if silent else logging.DEBUG) if libraries: installed_pkgs = { - library: lm.install(library, silent=silent, force=force) - for library in libraries + library: lm.install(library, force=force) for library in libraries } elif storage_dir in storage_libdeps: for library in storage_libdeps[storage_dir]: - lm.install(library, silent=silent, force=force) + lm.install(library, force=force) if save and installed_pkgs: _save_deps(ctx, installed_pkgs) @@ -212,9 +209,8 @@ def lib_uninstall(ctx, libraries, save, silent): for storage_dir in storage_dirs: print_storage_header(storage_dirs, storage_dir) lm = LibraryPackageManager(storage_dir) - uninstalled_pkgs = { - library: lm.uninstall(library, silent=silent) for library in libraries - } + lm.set_log_level(logging.WARN if silent else logging.DEBUG) + uninstalled_pkgs = {library: lm.uninstall(library) for library in libraries} if save and uninstalled_pkgs: _save_deps(ctx, uninstalled_pkgs, action="remove") @@ -237,14 +233,20 @@ def lib_uninstall(ctx, libraries, save, silent): def lib_update( # pylint: disable=too-many-arguments ctx, libraries, only_check, dry_run, silent, json_output ): - storage_dirs = ctx.meta[CTX_META_STORAGE_DIRS_KEY] only_check = dry_run or only_check + if only_check and not json_output: + raise exception.UserSideException( + "This command is deprecated, please use `pio pkg outdated` instead" + ) + + storage_dirs = ctx.meta[CTX_META_STORAGE_DIRS_KEY] json_result = {} for storage_dir in storage_dirs: if not json_output: print_storage_header(storage_dirs, storage_dir) lib_deps = ctx.meta.get(CTX_META_STORAGE_LIBDEPS_KEY, {}).get(storage_dir, []) lm = LibraryPackageManager(storage_dir) + lm.set_log_level(logging.WARN if silent else logging.DEBUG) _libraries = libraries or lib_deps or lm.get_installed() if only_check and json_output: @@ -277,9 +279,7 @@ def lib_update( # pylint: disable=too-many-arguments None if isinstance(library, PackageItem) else PackageSpec(library) ) try: - lm.update( - library, to_spec=to_spec, only_check=only_check, silent=silent - ) + lm.update(library, to_spec=to_spec) except UnknownPackageError as e: if library not in lib_deps: raise e @@ -438,7 +438,8 @@ def lib_builtin(storage, json_output): @click.option("--json-output", is_flag=True) def lib_show(library, json_output): lm = LibraryPackageManager() - lib_id = lm.reveal_registry_package_id(library, silent=json_output) + lm.set_log_level(logging.ERROR if json_output else logging.DEBUG) + lib_id = lm.reveal_registry_package_id(library) regclient = lm.get_registry_client_instance() lib = regclient.fetch_json_data( "get", "/v2/lib/info/%d" % lib_id, x_cache_valid="1h" @@ -457,7 +458,7 @@ def lib_show(library, json_output): "Version: %s, released %s" % ( lib["version"]["name"], - time.strftime("%c", util.parse_date(lib["version"]["released"])), + util.parse_datetime(lib["version"]["released"]).strftime("%c"), ) ) click.echo("Manifest: %s" % lib["confurl"]) @@ -465,9 +466,9 @@ def lib_show(library, json_output): if key not in lib or not lib[key]: continue if isinstance(lib[key], list): - click.echo("%s: %s" % (key.title(), ", ".join(lib[key]))) + click.echo("%s: %s" % (key.capitalize(), ", ".join(lib[key]))) else: - click.echo("%s: %s" % (key.title(), lib[key])) + click.echo("%s: %s" % (key.capitalize(), lib[key])) blocks = [] @@ -499,7 +500,7 @@ def lib_show(library, json_output): "Versions", [ "%s, released %s" - % (v["name"], time.strftime("%c", util.parse_date(v["released"]))) + % (v["name"], util.parse_datetime(v["released"]).strftime("%c")) for v in lib["versions"] ], ) @@ -529,7 +530,7 @@ def lib_show(library, json_output): @click.argument("config_url") def lib_register(config_url): # pylint: disable=unused-argument raise exception.UserSideException( - "This command is deprecated. Please use `pio package publish` command." + "This command is deprecated. Please use `pio pkg publish` command." ) @@ -546,7 +547,7 @@ def lib_stats(json_output): tabular_data = [ ( click.style(item["name"], fg="cyan"), - time.strftime("%c", util.parse_date(item["date"])), + util.parse_datetime(item["date"]).strftime("%c"), "https://platformio.org/lib/show/%s/%s" % (item["id"], quote(item["name"])), ) @@ -621,9 +622,9 @@ def print_lib_item(item): if key not in item or not item[key]: continue if isinstance(item[key], list): - click.echo("%s: %s" % (key.title(), ", ".join(item[key]))) + click.echo("%s: %s" % (key.capitalize(), ", ".join(item[key]))) else: - click.echo("%s: %s" % (key.title(), item[key])) + click.echo("%s: %s" % (key.capitalize(), item[key])) for key in ("frameworks", "platforms"): if key not in item: diff --git a/platformio/commands/lib/helpers.py b/platformio/commands/lib/helpers.py index c720b10d..9b99cd6c 100644 --- a/platformio/commands/lib/helpers.py +++ b/platformio/commands/lib/helpers.py @@ -14,6 +14,7 @@ import os +from platformio import util from platformio.compat import ci_strings_are_equal from platformio.package.manager.platform import PlatformPackageManager from platformio.package.meta import PackageSpec @@ -22,6 +23,7 @@ from platformio.project.config import ProjectConfig from platformio.project.exception import InvalidProjectConfError +@util.memoized(expire="60s") def get_builtin_libs(storage_names=None): # pylint: disable=import-outside-toplevel from platformio.package.manager.library import LibraryPackageManager @@ -45,8 +47,8 @@ def get_builtin_libs(storage_names=None): return items -def is_builtin_lib(name, storages=None): - for storage in storages or get_builtin_libs(): +def is_builtin_lib(name): + for storage in get_builtin_libs(): for lib in storage["items"]: if lib.get("name") == name: return True diff --git a/platformio/commands/pkg.py b/platformio/commands/pkg.py new file mode 100644 index 00000000..b2df9b6a --- /dev/null +++ b/platformio/commands/pkg.py @@ -0,0 +1,48 @@ +# Copyright (c) 2014-present PlatformIO +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import click + +from platformio.package.commands.exec import package_exec_cmd +from platformio.package.commands.install import package_install_cmd +from platformio.package.commands.list import package_list_cmd +from platformio.package.commands.outdated import package_outdated_cmd +from platformio.package.commands.pack import package_pack_cmd +from platformio.package.commands.publish import package_publish_cmd +from platformio.package.commands.search import package_search_cmd +from platformio.package.commands.show import package_show_cmd +from platformio.package.commands.uninstall import package_uninstall_cmd +from platformio.package.commands.unpublish import package_unpublish_cmd +from platformio.package.commands.update import package_update_cmd + + +@click.group( + "pkg", + commands=[ + package_exec_cmd, + package_install_cmd, + package_list_cmd, + package_outdated_cmd, + package_pack_cmd, + package_publish_cmd, + package_search_cmd, + package_show_cmd, + package_uninstall_cmd, + package_unpublish_cmd, + package_update_cmd, + ], + short_help="Unified Package Manager", +) +def cli(): + pass diff --git a/platformio/commands/platform.py b/platformio/commands/platform.py index 287f5760..b35f056c 100644 --- a/platformio/commands/platform.py +++ b/platformio/commands/platform.py @@ -13,12 +13,14 @@ # limitations under the License. import json +import logging import os import click -from platformio.cache import cleanup_content_cache from platformio.commands.boards import print_boards +from platformio.exception import UserSideException +from platformio.package.exception import UnknownPackageError from platformio.package.manager.platform import PlatformPackageManager from platformio.package.meta import PackageItem, PackageSpec from platformio.package.version import get_original_version @@ -26,7 +28,7 @@ from platformio.platform.exception import UnknownPlatform from platformio.platform.factory import PlatformFactory -@click.group(short_help="Platform manager") +@click.group(short_help="Platform manager", hidden=True) def cli(): pass @@ -178,7 +180,7 @@ def platform_show(platform, json_output): # pylint: disable=too-many-branches is_flag=True, help="Reinstall/redownload dev/platform and its packages if exist", ) -def platform_install( # pylint: disable=too-many-arguments +def platform_install( # pylint: disable=too-many-arguments,too-many-locals platforms, with_package, without_package, @@ -187,37 +189,50 @@ def platform_install( # pylint: disable=too-many-arguments silent, force, ): - return _platform_install( - platforms, - with_package, - without_package, - skip_default_package, - with_all_packages, - silent, - force, - ) + def _find_pkg_names(p, candidates): + result = [] + for candidate in candidates: + found = False + # lookup by package types + for _name, _opts in p.packages.items(): + if _opts.get("type") == candidate: + result.append(_name) + found = True + if ( + p.frameworks + and candidate.startswith("framework-") + and candidate[10:] in p.frameworks + ): + result.append(p.frameworks[candidate[10:]]["package"]) + found = True + if not found: + result.append(candidate) + return result - -def _platform_install( # pylint: disable=too-many-arguments - platforms, - with_package=None, - without_package=None, - skip_default_package=False, - with_all_packages=False, - silent=False, - force=False, -): pm = PlatformPackageManager() + pm.set_log_level(logging.WARN if silent else logging.DEBUG) for platform in platforms: - pkg = pm.install( - spec=platform, - with_packages=with_package or [], - without_packages=without_package or [], - skip_default_package=skip_default_package, - with_all_packages=with_all_packages, - silent=silent, - force=force, - ) + if with_package or without_package or with_all_packages: + pkg = pm.install(platform, skip_dependencies=True) + p = PlatformFactory.new(pkg) + if with_all_packages: + with_package = list(p.packages) + with_package = set(_find_pkg_names(p, with_package or [])) + without_package = set(_find_pkg_names(p, without_package or [])) + upkgs = with_package | without_package + ppkgs = set(p.packages) + if not upkgs.issubset(ppkgs): + raise UnknownPackageError(", ".join(upkgs - ppkgs)) + for name, options in p.packages.items(): + if name in without_package: + continue + if name in with_package or not ( + skip_default_package or options.get("optional", False) + ): + p.pm.install(p.get_package_spec(name), force=force) + else: + pkg = pm.install(platform, skip_dependencies=skip_default_package) + if pkg and not silent: click.secho( "The platform '%s' has been successfully installed!\n" @@ -231,6 +246,7 @@ def _platform_install( # pylint: disable=too-many-arguments @click.argument("platforms", nargs=-1, required=True, metavar="[PLATFORM...]") def platform_uninstall(platforms): pm = PlatformPackageManager() + pm.set_log_level(logging.DEBUG) for platform in platforms: if pm.uninstall(platform): click.secho( @@ -256,9 +272,15 @@ def platform_uninstall(platforms): @click.option("-s", "--silent", is_flag=True, help="Suppress progress reporting") @click.option("--json-output", is_flag=True) def platform_update( # pylint: disable=too-many-locals, too-many-arguments - platforms, only_packages, only_check, dry_run, silent, json_output + platforms, only_check, dry_run, silent, json_output, **_ ): + if only_check and not json_output: + raise UserSideException( + "This command is deprecated, please use `pio pkg outdated` instead" + ) + pm = PlatformPackageManager() + pm.set_log_level(logging.WARN if silent else logging.DEBUG) platforms = platforms or pm.get_installed() only_check = dry_run or only_check @@ -290,9 +312,6 @@ def platform_update( # pylint: disable=too-many-locals, too-many-arguments result.append(data) return click.echo(json.dumps(result)) - # cleanup cached board and platform lists - cleanup_content_cache("http") - for platform in platforms: click.echo( "Platform %s" @@ -304,9 +323,7 @@ def platform_update( # pylint: disable=too-many-locals, too-many-arguments ) ) click.echo("--------") - pm.update( - platform, only_packages=only_packages, only_check=only_check, silent=silent - ) + pm.update(platform) click.echo() return True @@ -317,15 +334,6 @@ def platform_update( # pylint: disable=too-many-locals, too-many-arguments # -def init_platform(name, skip_default_package=True, auto_install=True): - try: - return PlatformFactory.new(name) - except UnknownPlatform: - if auto_install: - _platform_install([name], skip_default_package=skip_default_package) - return PlatformFactory.new(name) - - def _print_platforms(platforms): for platform in platforms: click.echo( diff --git a/platformio/commands/project.py b/platformio/commands/project.py index 22b06ee2..534bb72b 100644 --- a/platformio/commands/project.py +++ b/platformio/commands/project.py @@ -12,429 +12,21 @@ # See the License for the specific language governing permissions and # limitations under the License. -# pylint: disable=too-many-arguments,too-many-locals,too-many-branches,line-too-long - -import json -import os - import click -from tabulate import tabulate -from platformio import fs -from platformio.commands.platform import platform_install as cli_platform_install -from platformio.package.manager.platform import PlatformPackageManager -from platformio.platform.exception import UnknownBoard -from platformio.project.config import ProjectConfig -from platformio.project.exception import NotPlatformIOProjectError -from platformio.project.generator import ProjectGenerator -from platformio.project.helpers import is_platformio_project, load_project_ide_data +from platformio.project.commands.config import project_config_cmd +from platformio.project.commands.init import project_init_cmd +from platformio.project.commands.metadata import project_metadata_cmd -@click.group(short_help="Project manager") +@click.group( + "project", + commands=[ + project_config_cmd, + project_init_cmd, + project_metadata_cmd, + ], + short_help="Project Manager", +) def cli(): pass - - -@cli.command("config", short_help="Show computed configuration") -@click.option( - "-d", - "--project-dir", - default=os.getcwd, - type=click.Path(exists=True, file_okay=False, dir_okay=True, resolve_path=True), -) -@click.option("--json-output", is_flag=True) -def project_config(project_dir, json_output): - if not is_platformio_project(project_dir): - raise NotPlatformIOProjectError(project_dir) - with fs.cd(project_dir): - config = ProjectConfig.get_instance() - if json_output: - return click.echo(config.to_json()) - click.echo( - "Computed project configuration for %s" % click.style(project_dir, fg="cyan") - ) - for section, options in config.as_tuple(): - click.secho(section, fg="cyan") - click.echo("-" * len(section)) - click.echo( - tabulate( - [ - (name, "=", "\n".join(value) if isinstance(value, list) else value) - for name, value in options - ], - tablefmt="plain", - ) - ) - click.echo() - return None - - -@cli.command("data", short_help="Dump data intended for IDE extensions/plugins") -@click.option( - "-d", - "--project-dir", - default=os.getcwd, - type=click.Path(exists=True, file_okay=False, dir_okay=True, resolve_path=True), -) -@click.option("-e", "--environment", multiple=True) -@click.option("--json-output", is_flag=True) -def project_data(project_dir, environment, json_output): - if not is_platformio_project(project_dir): - raise NotPlatformIOProjectError(project_dir) - with fs.cd(project_dir): - config = ProjectConfig.get_instance() - config.validate(environment) - environment = list(environment or config.envs()) - - if json_output: - return click.echo(json.dumps(load_project_ide_data(project_dir, environment))) - - for envname in environment: - click.echo("Environment: " + click.style(envname, fg="cyan", bold=True)) - click.echo("=" * (13 + len(envname))) - click.echo( - tabulate( - [ - (click.style(name, bold=True), "=", json.dumps(value, indent=2)) - for name, value in load_project_ide_data( - project_dir, envname - ).items() - ], - tablefmt="plain", - ) - ) - click.echo() - - return None - - -def validate_boards(ctx, param, value): # pylint: disable=W0613 - pm = PlatformPackageManager() - for id_ in value: - try: - pm.board_config(id_) - except UnknownBoard: - raise click.BadParameter( - "`%s`. Please search for board ID using `platformio boards` " - "command" % id_ - ) - return value - - -@cli.command("init", short_help="Initialize a project or update existing") -@click.option( - "--project-dir", - "-d", - default=os.getcwd, - type=click.Path( - exists=True, file_okay=False, dir_okay=True, writable=True, resolve_path=True - ), -) -@click.option("-b", "--board", multiple=True, metavar="ID", callback=validate_boards) -@click.option("--ide", type=click.Choice(ProjectGenerator.get_supported_ides())) -@click.option("-e", "--environment", help="Update using existing environment") -@click.option("-O", "--project-option", multiple=True) -@click.option("--env-prefix", default="") -@click.option("-s", "--silent", is_flag=True) -@click.pass_context -def project_init( - ctx, # pylint: disable=R0913 - project_dir, - board, - ide, - environment, - project_option, - env_prefix, - silent, -): - if not silent: - if project_dir == os.getcwd(): - click.secho("\nThe current working directory ", fg="yellow", nl=False) - try: - click.secho(project_dir, fg="cyan", nl=False) - except UnicodeEncodeError: - click.secho(json.dumps(project_dir), fg="cyan", nl=False) - click.secho(" will be used for the project.", fg="yellow") - click.echo("") - - click.echo("The next files/directories have been created in ", nl=False) - try: - click.secho(project_dir, fg="cyan") - except UnicodeEncodeError: - click.secho(json.dumps(project_dir), fg="cyan") - click.echo( - "%s - Put project header files here" % click.style("include", fg="cyan") - ) - click.echo( - "%s - Put here project specific (private) libraries" - % click.style("lib", fg="cyan") - ) - click.echo("%s - Put project source files here" % click.style("src", fg="cyan")) - click.echo( - "%s - Project Configuration File" % click.style("platformio.ini", fg="cyan") - ) - - is_new_project = not is_platformio_project(project_dir) - if is_new_project: - init_base_project(project_dir) - - if environment: - update_project_env(project_dir, environment, project_option) - elif board: - update_board_envs( - ctx, project_dir, board, project_option, env_prefix, ide is not None - ) - - if ide: - with fs.cd(project_dir): - config = ProjectConfig.get_instance( - os.path.join(project_dir, "platformio.ini") - ) - config.validate() - ProjectGenerator(config, environment, ide, board).generate() - - if is_new_project: - init_cvs_ignore(project_dir) - - if silent: - return - - if ide: - click.secho( - "\nProject has been successfully %s including configuration files " - "for `%s` IDE." % ("initialized" if is_new_project else "updated", ide), - fg="green", - ) - else: - click.secho( - "\nProject has been successfully %s! Useful commands:\n" - "`pio run` - process/build project from the current directory\n" - "`pio run --target upload` or `pio run -t upload` " - "- upload firmware to a target\n" - "`pio run --target clean` - clean project (remove compiled files)" - "\n`pio run --help` - additional information" - % ("initialized" if is_new_project else "updated"), - fg="green", - ) - - -def init_base_project(project_dir): - with fs.cd(project_dir): - config = ProjectConfig() - config.save() - dir_to_readme = [ - (config.get("platformio", "src_dir"), None), - (config.get("platformio", "include_dir"), init_include_readme), - (config.get("platformio", "lib_dir"), init_lib_readme), - (config.get("platformio", "test_dir"), init_test_readme), - ] - for (path, cb) in dir_to_readme: - if os.path.isdir(path): - continue - os.makedirs(path) - if cb: - cb(path) - - -def init_include_readme(include_dir): - with open(os.path.join(include_dir, "README"), mode="w", encoding="utf8") as fp: - fp.write( - """ -This directory is intended for project header files. - -A header file is a file containing C declarations and macro definitions -to be shared between several project source files. You request the use of a -header file in your project source file (C, C++, etc) located in `src` folder -by including it, with the C preprocessing directive `#include'. - -```src/main.c - -#include "header.h" - -int main (void) -{ - ... -} -``` - -Including a header file produces the same results as copying the header file -into each source file that needs it. Such copying would be time-consuming -and error-prone. With a header file, the related declarations appear -in only one place. If they need to be changed, they can be changed in one -place, and programs that include the header file will automatically use the -new version when next recompiled. The header file eliminates the labor of -finding and changing all the copies as well as the risk that a failure to -find one copy will result in inconsistencies within a program. - -In C, the usual convention is to give header files names that end with `.h'. -It is most portable to use only letters, digits, dashes, and underscores in -header file names, and at most one dot. - -Read more about using header files in official GCC documentation: - -* Include Syntax -* Include Operation -* Once-Only Headers -* Computed Includes - -https://gcc.gnu.org/onlinedocs/cpp/Header-Files.html -""", - ) - - -def init_lib_readme(lib_dir): - with open(os.path.join(lib_dir, "README"), mode="w", encoding="utf8") as fp: - fp.write( - """ -This directory is intended for project specific (private) libraries. -PlatformIO will compile them to static libraries and link into executable file. - -The source code of each library should be placed in a an own separate directory -("lib/your_library_name/[here are source files]"). - -For example, see a structure of the following two libraries `Foo` and `Bar`: - -|--lib -| | -| |--Bar -| | |--docs -| | |--examples -| | |--src -| | |- Bar.c -| | |- Bar.h -| | |- library.json (optional, custom build options, etc) https://docs.platformio.org/page/librarymanager/config.html -| | -| |--Foo -| | |- Foo.c -| | |- Foo.h -| | -| |- README --> THIS FILE -| -|- platformio.ini -|--src - |- main.c - -and a contents of `src/main.c`: -``` -#include -#include - -int main (void) -{ - ... -} - -``` - -PlatformIO Library Dependency Finder will find automatically dependent -libraries scanning project source files. - -More information about PlatformIO Library Dependency Finder -- https://docs.platformio.org/page/librarymanager/ldf.html -""", - ) - - -def init_test_readme(test_dir): - with open(os.path.join(test_dir, "README"), mode="w", encoding="utf8") as fp: - fp.write( - """ -This directory is intended for PlatformIO Unit Testing and project tests. - -Unit Testing is a software testing method by which individual units of -source code, sets of one or more MCU program modules together with associated -control data, usage procedures, and operating procedures, are tested to -determine whether they are fit for use. Unit testing finds problems early -in the development cycle. - -More information about PlatformIO Unit Testing: -- https://docs.platformio.org/page/plus/unit-testing.html -""", - ) - - -def init_cvs_ignore(project_dir): - conf_path = os.path.join(project_dir, ".gitignore") - if os.path.isfile(conf_path): - return - with open(conf_path, mode="w", encoding="utf8") as fp: - fp.write(".pio\n") - - -def update_board_envs( - ctx, project_dir, board_ids, project_option, env_prefix, force_download -): - config = ProjectConfig( - os.path.join(project_dir, "platformio.ini"), parse_extra=False - ) - used_boards = [] - for section in config.sections(): - cond = [section.startswith("env:"), config.has_option(section, "board")] - if all(cond): - used_boards.append(config.get(section, "board")) - - pm = PlatformPackageManager() - used_platforms = [] - modified = False - for id_ in board_ids: - board_config = pm.board_config(id_) - used_platforms.append(board_config["platform"]) - if id_ in used_boards: - continue - used_boards.append(id_) - modified = True - - envopts = {"platform": board_config["platform"], "board": id_} - # find default framework for board - frameworks = board_config.get("frameworks") - if frameworks: - envopts["framework"] = frameworks[0] - - for item in project_option: - if "=" not in item: - continue - _name, _value = item.split("=", 1) - envopts[_name.strip()] = _value.strip() - - section = "env:%s%s" % (env_prefix, id_) - config.add_section(section) - - for option, value in envopts.items(): - config.set(section, option, value) - - if force_download and used_platforms: - _install_dependent_platforms(ctx, used_platforms) - - if modified: - config.save() - - -def _install_dependent_platforms(ctx, platforms): - installed_platforms = [ - pkg.metadata.name for pkg in PlatformPackageManager().get_installed() - ] - if set(platforms) <= set(installed_platforms): - return - ctx.invoke( - cli_platform_install, platforms=list(set(platforms) - set(installed_platforms)) - ) - - -def update_project_env(project_dir, environment, project_option): - if not project_option: - return - config = ProjectConfig( - os.path.join(project_dir, "platformio.ini"), parse_extra=False - ) - - section = "env:%s" % environment - if not config.has_section(section): - config.add_section(section) - - for item in project_option: - if "=" not in item: - continue - _name, _value = item.split("=", 1) - config.set(section, _name.strip(), _value.strip()) - - config.save() diff --git a/platformio/commands/remote/ac/psync.py b/platformio/commands/remote/ac/psync.py index 6773615c..87789ed8 100644 --- a/platformio/commands/remote/ac/psync.py +++ b/platformio/commands/remote/ac/psync.py @@ -25,7 +25,7 @@ class ProjectSyncAsyncCmd(AsyncCommandBase): def __init__(self, *args, **kwargs): self.psync = None self._upstream = None - super(ProjectSyncAsyncCmd, self).__init__(*args, **kwargs) + super().__init__(*args, **kwargs) def start(self): project_dir = os.path.join( diff --git a/platformio/commands/remote/client/agent_service.py b/platformio/commands/remote/client/agent_service.py index 01af44da..b24f605d 100644 --- a/platformio/commands/remote/client/agent_service.py +++ b/platformio/commands/remote/client/agent_service.py @@ -17,11 +17,12 @@ import os from twisted.logger import LogLevel # pylint: disable=import-error from twisted.spread import pb # pylint: disable=import-error -from platformio import proc, util +from platformio import proc from platformio.commands.remote.ac.process import ProcessAsyncCmd from platformio.commands.remote.ac.psync import ProjectSyncAsyncCmd from platformio.commands.remote.ac.serial import SerialPortAsyncCmd from platformio.commands.remote.client.base import RemoteClientBase +from platformio.device.list import list_serial_ports from platformio.project.config import ProjectConfig from platformio.project.exception import NotPlatformIOProjectError @@ -84,11 +85,11 @@ class RemoteAgentService(RemoteClientBase): return (self.id, ac.id) def _process_cmd_device_list(self, _): - return (self.name, util.get_serialports()) + return (self.name, list_serial_ports()) def _process_cmd_device_monitor(self, options): if not options["port"]: - for item in util.get_serialports(): + for item in list_serial_ports(): if "VID:PID" in item["hwid"]: options["port"] = item["port"] break diff --git a/platformio/commands/remote/command.py b/platformio/commands/remote/command.py index 248d66d2..5f97d983 100644 --- a/platformio/commands/remote/command.py +++ b/platformio/commands/remote/command.py @@ -24,15 +24,20 @@ from time import sleep import click from platformio import fs, proc -from platformio.commands.device import helpers as device_helpers -from platformio.commands.device.command import device_monitor as cmd_device_monitor from platformio.commands.run.command import cli as cmd_run -from platformio.commands.test.command import cli as cmd_test +from platformio.device.commands.monitor import ( + apply_project_monitor_options, + device_monitor_cmd, + get_project_options, + project_options_to_monitor_argv, +) from platformio.package.manager.core import inject_contrib_pysite from platformio.project.exception import NotPlatformIOProjectError +from platformio.project.options import ProjectOptions +from platformio.test.command import test_cmd -@click.group("remote", short_help="Remote development") +@click.group("remote", short_help="Remote Development") @click.option("-a", "--agent", multiple=True) @click.pass_context def cli(ctx, agent): @@ -163,7 +168,20 @@ def remote_run( @cli.command("test", short_help="Remote Unit Testing") @click.option("--environment", "-e", multiple=True, metavar="") -@click.option("--ignore", "-i", multiple=True, metavar="") +@click.option( + "--filter", + "-f", + multiple=True, + metavar="", + help="Filter tests by a pattern", +) +@click.option( + "--ignore", + "-i", + multiple=True, + metavar="", + help="Ignore tests by a pattern", +) @click.option("--upload-port") @click.option("--test-port") @click.option( @@ -180,10 +198,11 @@ def remote_run( @click.option("--verbose", "-v", is_flag=True) @click.pass_obj @click.pass_context -def remote_test( +def remote_test( # pylint: disable=redefined-builtin ctx, agents, environment, + filter, ignore, upload_port, test_port, @@ -201,6 +220,7 @@ def remote_test( agents, dict( environment=environment, + filter=filter, ignore=ignore, upload_port=upload_port, test_port=test_port, @@ -217,8 +237,9 @@ def remote_test( click.secho("Building project locally", bold=True) ctx.invoke( - cmd_test, + test_cmd, environment=environment, + filter=filter, ignore=ignore, project_dir=project_dir, without_uploading=True, @@ -249,7 +270,12 @@ def device_list(agents, json_output): @remote_device.command("monitor", short_help="Monitor remote device") @click.option("--port", "-p", help="Port, a number or a device name") -@click.option("--baud", "-b", type=int, help="Set baud rate, default=9600") +@click.option( + "--baud", + "-b", + type=int, + help="Set baud rate, default=%d" % ProjectOptions["env.monitor_speed"].default, +) @click.option( "--parity", default="N", @@ -328,19 +354,19 @@ def device_monitor(ctx, agents, **kwargs): project_options = {} try: with fs.cd(kwargs["project_dir"]): - project_options = device_helpers.get_project_options(kwargs["environment"]) - kwargs = device_helpers.apply_project_monitor_options(kwargs, project_options) + project_options = get_project_options(kwargs["environment"]) + kwargs = apply_project_monitor_options(kwargs, project_options) except NotPlatformIOProjectError: pass - kwargs["baud"] = kwargs["baud"] or 9600 + kwargs["baud"] = kwargs["baud"] or ProjectOptions["env.monitor_speed"].default def _tx_target(sock_dir): subcmd_argv = ["remote"] for agent in agents: subcmd_argv.extend(["--agent", agent]) subcmd_argv.extend(["device", "monitor"]) - subcmd_argv.extend(device_helpers.options_to_argv(kwargs, project_options)) + subcmd_argv.extend(project_options_to_monitor_argv(kwargs, project_options)) subcmd_argv.extend(["--sock", sock_dir]) subprocess.call([proc.where_is_program("platformio")] + subcmd_argv) @@ -355,7 +381,7 @@ def device_monitor(ctx, agents, **kwargs): return with open(sock_file, encoding="utf8") as fp: kwargs["port"] = fp.read() - ctx.invoke(cmd_device_monitor, **kwargs) + ctx.invoke(device_monitor_cmd, **kwargs) t.join(2) finally: fs.rmtree(sock_dir) diff --git a/platformio/commands/remote/factory/ssl.py b/platformio/commands/remote/factory/ssl.py index a4233a69..78aa5d79 100644 --- a/platformio/commands/remote/factory/ssl.py +++ b/platformio/commands/remote/factory/ssl.py @@ -23,7 +23,7 @@ class SSLContextFactory(ssl.ClientContextFactory): self.certificate_verified = False def getContext(self): - ctx = super(SSLContextFactory, self).getContext() + ctx = super().getContext() ctx.set_verify( SSL.VERIFY_PEER | SSL.VERIFY_FAIL_IF_NO_PEER_CERT, self.verifyHostname ) diff --git a/platformio/commands/run/command.py b/platformio/commands/run/command.py index 85fddd72..16d71bc9 100644 --- a/platformio/commands/run/command.py +++ b/platformio/commands/run/command.py @@ -22,12 +22,12 @@ import click from tabulate import tabulate from platformio import app, exception, fs, util -from platformio.commands.device.command import device_monitor as cmd_device_monitor from platformio.commands.run.helpers import clean_build_dir, handle_legacy_libdeps from platformio.commands.run.processor import EnvironmentProcessor -from platformio.commands.test.processor import CTX_META_TEST_IS_RUNNING +from platformio.device.commands.monitor import device_monitor_cmd from platformio.project.config import ProjectConfig -from platformio.project.helpers import find_project_dir_above, load_project_ide_data +from platformio.project.helpers import find_project_dir_above, load_build_metadata +from platformio.test.runners.base import CTX_META_TEST_IS_RUNNING # pylint: disable=too-many-arguments,too-many-locals,too-many-branches @@ -66,10 +66,17 @@ except NotImplementedError: "Default is a number of CPUs in a system (N=%d)" % DEFAULT_JOB_NUMS ), ) -@click.option("-s", "--silent", is_flag=True) -@click.option("-v", "--verbose", is_flag=True) +@click.option( + "-a", + "--program-arg", + "program_args", + multiple=True, + help="A program argument (multiple are allowed)", +) @click.option("--disable-auto-clean", is_flag=True) @click.option("--list-targets", is_flag=True) +@click.option("-s", "--silent", is_flag=True) +@click.option("-v", "--verbose", is_flag=True) @click.pass_context def cli( ctx, @@ -79,10 +86,11 @@ def cli( project_dir, project_conf, jobs, - silent, - verbose, + program_args, disable_auto_clean, list_targets, + silent, + verbose, ): app.set_session_var("custom_project_conf", project_conf) @@ -92,6 +100,7 @@ def cli( is_test_running = CTX_META_TEST_IS_RUNNING in ctx.meta + results = [] with fs.cd(project_dir): config = ProjectConfig.get_instance(project_conf) config.validate(environment) @@ -114,7 +123,6 @@ def cli( handle_legacy_libdeps(project_dir, config) default_envs = config.default_envs() - results = [] for env in config.envs(): skipenv = any( [ @@ -138,21 +146,25 @@ def cli( environment, target, upload_port, + jobs, + program_args, + is_test_running, silent, verbose, - jobs, - is_test_running, ) ) - command_failed = any(r.get("succeeded") is False for r in results) + command_failed = any(r.get("succeeded") is False for r in results) - if not is_test_running and (command_failed or not silent) and len(results) > 1: - print_processing_summary(results, verbose) + if not is_test_running and (command_failed or not silent) and len(results) > 1: + print_processing_summary(results, verbose) - if command_failed: - raise exception.ReturnErrorCode(1) - return True + # Reset custom project config + app.set_session_var("custom_project_conf", None) + + if command_failed: + raise exception.ReturnErrorCode(1) + return True def process_env( @@ -162,16 +174,25 @@ def process_env( environments, targets, upload_port, + jobs, + program_args, + is_test_running, silent, verbose, - jobs, - is_test_running, ): if not is_test_running and not silent: print_processing_header(name, config, verbose) ep = EnvironmentProcessor( - ctx, name, config, targets, upload_port, silent, verbose, jobs + ctx, + name, + config, + targets, + upload_port, + jobs, + program_args, + silent, + verbose, ) result = {"env": name, "duration": time(), "succeeded": ep.process()} result["duration"] = time() - result["duration"] @@ -186,7 +207,7 @@ def process_env( and "nobuild" not in ep.get_build_targets() ): ctx.invoke( - cmd_device_monitor, environment=environments[0] if environments else None + device_monitor_cmd, environment=environments[0] if environments else None ) return result @@ -273,7 +294,7 @@ def print_processing_summary(results, verbose=False): def print_target_list(envs): tabular_data = [] - for env, data in load_project_ide_data(os.getcwd(), envs).items(): + for env, data in load_build_metadata(os.getcwd(), envs).items(): tabular_data.extend( sorted( [ diff --git a/platformio/commands/run/processor.py b/platformio/commands/run/processor.py index 191a071f..4751d352 100644 --- a/platformio/commands/run/processor.py +++ b/platformio/commands/run/processor.py @@ -12,29 +12,44 @@ # See the License for the specific language governing permissions and # limitations under the License. -from platformio.commands.platform import init_platform -from platformio.commands.test.processor import CTX_META_TEST_RUNNING_NAME +from platformio.package.commands.install import install_project_env_dependencies +from platformio.platform.factory import PlatformFactory from platformio.project.exception import UndefinedEnvPlatformError +from platformio.test.runners.base import CTX_META_TEST_RUNNING_NAME # pylint: disable=too-many-instance-attributes class EnvironmentProcessor(object): def __init__( # pylint: disable=too-many-arguments - self, cmd_ctx, name, config, targets, upload_port, silent, verbose, jobs + self, + cmd_ctx, + name, + config, + targets, + upload_port, + jobs, + program_args, + silent, + verbose, ): self.cmd_ctx = cmd_ctx self.name = name self.config = config self.targets = [str(t) for t in targets] self.upload_port = upload_port + self.jobs = jobs + self.program_args = program_args self.silent = silent self.verbose = verbose - self.jobs = jobs self.options = config.items(env=name, as_dict=True) def get_build_variables(self): - variables = {"pioenv": self.name, "project_config": self.config.path} + variables = dict( + pioenv=self.name, + project_config=self.config.path, + program_args=self.program_args, + ) if CTX_META_TEST_RUNNING_NAME in self.cmd_ctx.meta: variables["piotest_running_name"] = self.cmd_ctx.meta[ @@ -64,7 +79,16 @@ class EnvironmentProcessor(object): if "monitor" in build_targets: build_targets.remove("monitor") - result = init_platform(self.options["platform"]).run( + if "clean" not in build_targets: + install_project_env_dependencies( + self.name, + { + "project_targets": build_targets, + "piotest_running_name": build_vars.get("piotest_running_name"), + }, + ) + + result = PlatformFactory.new(self.options["platform"], autoinstall=True).run( build_vars, build_targets, self.silent, self.verbose, self.jobs ) return result["returncode"] == 0 diff --git a/platformio/commands/test/helpers.py b/platformio/commands/test.py similarity index 59% rename from platformio/commands/test/helpers.py rename to platformio/commands/test.py index e490ea7c..df641161 100644 --- a/platformio/commands/test/helpers.py +++ b/platformio/commands/test.py @@ -12,19 +12,6 @@ # See the License for the specific language governing permissions and # limitations under the License. -import os +# pylint: disable=unused-import -from platformio import exception - - -def get_test_names(config): - test_dir = config.get("platformio", "test_dir") - if not os.path.isdir(test_dir): - raise exception.TestDirNotExists(test_dir) - names = [] - for item in sorted(os.listdir(test_dir)): - if os.path.isdir(os.path.join(test_dir, item)): - names.append(item) - if not names: - names = ["*"] - return names +from platformio.test.command import test_cmd as cli diff --git a/platformio/commands/test/command.py b/platformio/commands/test/command.py deleted file mode 100644 index c4b90499..00000000 --- a/platformio/commands/test/command.py +++ /dev/null @@ -1,266 +0,0 @@ -# Copyright (c) 2014-present PlatformIO -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -# pylint: disable=too-many-arguments, too-many-locals, too-many-branches - -import fnmatch -import os -import shutil -from time import time - -import click -from tabulate import tabulate - -from platformio import app, exception, fs, util -from platformio.commands.platform import init_platform -from platformio.commands.test.embedded import EmbeddedTestProcessor -from platformio.commands.test.helpers import get_test_names -from platformio.commands.test.native import NativeTestProcessor -from platformio.project.config import ProjectConfig - - -@click.command("test", short_help="Unit testing") -@click.option("--environment", "-e", multiple=True, metavar="") -@click.option( - "--filter", - "-f", - multiple=True, - metavar="", - help="Filter tests by a pattern", -) -@click.option( - "--ignore", - "-i", - multiple=True, - metavar="", - help="Ignore tests by a pattern", -) -@click.option("--upload-port") -@click.option("--test-port") -@click.option( - "-d", - "--project-dir", - default=os.getcwd, - type=click.Path( - exists=True, file_okay=False, dir_okay=True, writable=True, resolve_path=True - ), -) -@click.option( - "-c", - "--project-conf", - type=click.Path( - exists=True, file_okay=True, dir_okay=False, readable=True, resolve_path=True - ), -) -@click.option("--without-building", is_flag=True) -@click.option("--without-uploading", is_flag=True) -@click.option("--without-testing", is_flag=True) -@click.option("--no-reset", is_flag=True) -@click.option( - "--monitor-rts", - default=None, - type=click.IntRange(0, 1), - help="Set initial RTS line state for Serial Monitor", -) -@click.option( - "--monitor-dtr", - default=None, - type=click.IntRange(0, 1), - help="Set initial DTR line state for Serial Monitor", -) -@click.option("--verbose", "-v", is_flag=True) -@click.pass_context -def cli( # pylint: disable=redefined-builtin - ctx, - environment, - ignore, - filter, - upload_port, - test_port, - project_dir, - project_conf, - without_building, - without_uploading, - without_testing, - no_reset, - monitor_rts, - monitor_dtr, - verbose, -): - app.set_session_var("custom_project_conf", project_conf) - - with fs.cd(project_dir): - config = ProjectConfig.get_instance(project_conf) - config.validate(envs=environment) - test_names = get_test_names(config) - - if not verbose: - click.echo("Verbose mode can be enabled via `-v, --verbose` option") - click.secho("Collected %d items" % len(test_names), bold=True) - - results = [] - default_envs = config.default_envs() - for testname in test_names: - - for envname in config.envs(): - section = "env:%s" % envname - - # filter and ignore patterns - patterns = dict(filter=list(filter), ignore=list(ignore)) - for key in patterns: - patterns[key].extend(config.get(section, "test_%s" % key, [])) - - skip_conditions = [ - environment and envname not in environment, - not environment and default_envs and envname not in default_envs, - testname != "*" - and patterns["filter"] - and not any( - fnmatch.fnmatch(testname, p) for p in patterns["filter"] - ), - testname != "*" - and any(fnmatch.fnmatch(testname, p) for p in patterns["ignore"]), - ] - if any(skip_conditions): - results.append({"env": envname, "test": testname}) - continue - - click.echo() - print_processing_header(testname, envname) - - cls = ( - EmbeddedTestProcessor - if config.get(section, "platform") - and init_platform(config.get(section, "platform")).is_embedded() - else NativeTestProcessor - ) - tp = cls( - ctx, - testname, - envname, - dict( - project_config=config, - project_dir=project_dir, - upload_port=upload_port, - test_port=test_port, - without_building=without_building, - without_uploading=without_uploading, - without_testing=without_testing, - no_reset=no_reset, - monitor_rts=monitor_rts, - monitor_dtr=monitor_dtr, - verbose=verbose, - silent=not verbose, - ), - ) - result = { - "env": envname, - "test": testname, - "duration": time(), - "succeeded": tp.process(), - } - result["duration"] = time() - result["duration"] - results.append(result) - - print_processing_footer(result) - - if without_testing: - return - - print_testing_summary(results, verbose) - - command_failed = any(r.get("succeeded") is False for r in results) - if command_failed: - raise exception.ReturnErrorCode(1) - - -def print_processing_header(test, env): - click.echo( - "Processing %s in %s environment" - % ( - click.style(test, fg="yellow", bold=True), - click.style(env, fg="cyan", bold=True), - ) - ) - terminal_width, _ = shutil.get_terminal_size() - click.secho("-" * terminal_width, bold=True) - - -def print_processing_footer(result): - is_failed = not result.get("succeeded") - util.print_labeled_bar( - "[%s] Took %.2f seconds" - % ( - ( - click.style("FAILED", fg="red", bold=True) - if is_failed - else click.style("PASSED", fg="green", bold=True) - ), - result["duration"], - ), - is_error=is_failed, - ) - - -def print_testing_summary(results, verbose=False): - click.echo() - - tabular_data = [] - succeeded_nums = 0 - failed_nums = 0 - duration = 0 - - for result in results: - duration += result.get("duration", 0) - if result.get("succeeded") is False: - failed_nums += 1 - status_str = click.style("FAILED", fg="red") - elif result.get("succeeded") is None: - if not verbose: - continue - status_str = "IGNORED" - else: - succeeded_nums += 1 - status_str = click.style("PASSED", fg="green") - - tabular_data.append( - ( - result["test"], - click.style(result["env"], fg="cyan"), - status_str, - util.humanize_duration_time(result.get("duration")), - ) - ) - - click.echo( - tabulate( - tabular_data, - headers=[ - click.style(s, bold=True) - for s in ("Test", "Environment", "Status", "Duration") - ], - ), - err=failed_nums, - ) - - util.print_labeled_bar( - "%s%d succeeded in %s" - % ( - "%d failed, " % failed_nums if failed_nums else "", - succeeded_nums, - util.humanize_duration_time(duration), - ), - is_error=failed_nums, - fg="red" if failed_nums else "green", - ) diff --git a/platformio/commands/test/embedded.py b/platformio/commands/test/embedded.py deleted file mode 100644 index d0b53390..00000000 --- a/platformio/commands/test/embedded.py +++ /dev/null @@ -1,150 +0,0 @@ -# Copyright (c) 2014-present PlatformIO -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -from time import sleep - -import click -import serial - -from platformio import exception, util -from platformio.commands.test.processor import TestProcessorBase -from platformio.platform.factory import PlatformFactory - - -class EmbeddedTestProcessor(TestProcessorBase): - - SERIAL_TIMEOUT = 600 - - def process(self): - if not self.options["without_building"]: - self.print_progress("Building...") - target = ["__test"] - if self.options["without_uploading"]: - target.append("checkprogsize") - if not self.build_or_upload(target): - return False - - if not self.options["without_uploading"]: - self.print_progress("Uploading...") - target = ["upload"] - if self.options["without_building"]: - target.append("nobuild") - else: - target.append("__test") - if not self.build_or_upload(target): - return False - - if self.options["without_testing"]: - return True - - self.print_progress("Testing...") - return self.run() - - def run(self): - click.echo( - "If you don't see any output for the first 10 secs, " - "please reset board (press reset button)" - ) - click.echo() - - try: - ser = serial.Serial( - baudrate=self.get_baudrate(), timeout=self.SERIAL_TIMEOUT - ) - ser.port = self.get_test_port() - ser.rts = self.options["monitor_rts"] - ser.dtr = self.options["monitor_dtr"] - ser.open() - except serial.SerialException as e: - click.secho(str(e), fg="red", err=True) - return False - - if not self.options["no_reset"]: - ser.flushInput() - ser.setDTR(False) - ser.setRTS(False) - sleep(0.1) - ser.setDTR(True) - ser.setRTS(True) - sleep(0.1) - - while True: - line = ser.readline().strip() - - # fix non-ascii output from device - for i, c in enumerate(line[::-1]): - if not isinstance(c, int): - c = ord(c) - if c > 127: - line = line[-i:] - break - - if not line: - continue - if isinstance(line, bytes): - line = line.decode("utf8", "ignore") - self.on_run_out(line) - if all(l in line for l in ("Tests", "Failures", "Ignored")): - break - ser.close() - return not self._run_failed - - def get_test_port(self): - # if test port is specified manually or in config - if self.options.get("test_port"): - return self.options.get("test_port") - if self.env_options.get("test_port"): - return self.env_options.get("test_port") - - assert set(["platform", "board"]) & set(self.env_options.keys()) - p = PlatformFactory.new(self.env_options["platform"]) - board_hwids = p.board_config(self.env_options["board"]).get("build.hwids", []) - port = None - elapsed = 0 - while elapsed < 5 and not port: - for item in util.get_serialports(): - port = item["port"] - for hwid in board_hwids: - hwid_str = ("%s:%s" % (hwid[0], hwid[1])).replace("0x", "") - if hwid_str in item["hwid"] and self.is_serial_port_ready(port): - return port - - if port and not self.is_serial_port_ready(port): - port = None - - if not port: - sleep(0.25) - elapsed += 0.25 - - if not port: - raise exception.PlatformioException( - "Please specify `test_port` for environment or use " - "global `--test-port` option." - ) - return port - - @staticmethod - def is_serial_port_ready(port, timeout=3): - if not port: - return False - elapsed = 0 - while elapsed < timeout: - try: - serial.Serial(port, timeout=1).close() - return True - except: # pylint: disable=bare-except - pass - sleep(1) - elapsed += 1 - return False diff --git a/platformio/commands/test/native.py b/platformio/commands/test/native.py deleted file mode 100644 index 3c30e97b..00000000 --- a/platformio/commands/test/native.py +++ /dev/null @@ -1,41 +0,0 @@ -# Copyright (c) 2014-present PlatformIO -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -from os.path import join - -from platformio import proc -from platformio.commands.test.processor import TestProcessorBase -from platformio.proc import LineBufferedAsyncPipe - - -class NativeTestProcessor(TestProcessorBase): - def process(self): - if not self.options["without_building"]: - self.print_progress("Building...") - if not self.build_or_upload(["__test"]): - return False - if self.options["without_testing"]: - return None - self.print_progress("Testing...") - return self.run() - - def run(self): - build_dir = self.options["project_config"].get("platformio", "build_dir") - result = proc.exec_command( - [join(build_dir, self.env_name, "program")], - stdout=LineBufferedAsyncPipe(self.on_run_out), - stderr=LineBufferedAsyncPipe(self.on_run_out), - ) - assert "returncode" in result - return result["returncode"] == 0 and not self._run_failed diff --git a/platformio/commands/test/processor.py b/platformio/commands/test/processor.py deleted file mode 100644 index 0e1e367f..00000000 --- a/platformio/commands/test/processor.py +++ /dev/null @@ -1,230 +0,0 @@ -# Copyright (c) 2014-present PlatformIO -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import atexit -from os import listdir, remove -from os.path import isdir, isfile, join -from string import Template - -import click - -from platformio import exception - -TRANSPORT_OPTIONS = { - "arduino": { - "include": "#include ", - "object": "", - "putchar": "Serial.write(c);", - "flush": "Serial.flush();", - "begin": "Serial.begin($baudrate);", - "end": "Serial.end();", - "language": "cpp", - }, - "mbed": { - "include": "#include ", - "object": ( - "#if MBED_MAJOR_VERSION == 6\nUnbufferedSerial pc(USBTX, USBRX);\n" - "#else\nRawSerial pc(USBTX, USBRX);\n#endif" - ), - "putchar": ( - "#if MBED_MAJOR_VERSION == 6\npc.write(&c, 1);\n" - "#else\npc.putc(c);\n#endif" - ), - "flush": "", - "begin": "pc.baud($baudrate);", - "end": "", - "language": "cpp", - }, - "espidf": { - "include": "#include ", - "object": "", - "putchar": "putchar(c);", - "flush": "fflush(stdout);", - "begin": "", - "end": "", - }, - "zephyr": { - "include": "#include ", - "object": "", - "putchar": 'printk("%c", c);', - "flush": "", - "begin": "", - "end": "", - }, - "native": { - "include": "#include ", - "object": "", - "putchar": "putchar(c);", - "flush": "fflush(stdout);", - "begin": "", - "end": "", - }, - "custom": { - "include": '#include "unittest_transport.h"', - "object": "", - "putchar": "unittest_uart_putchar(c);", - "flush": "unittest_uart_flush();", - "begin": "unittest_uart_begin();", - "end": "unittest_uart_end();", - "language": "cpp", - }, -} - -CTX_META_TEST_IS_RUNNING = __name__ + ".test_running" -CTX_META_TEST_RUNNING_NAME = __name__ + ".test_running_name" - - -class TestProcessorBase(object): - - DEFAULT_BAUDRATE = 115200 - - def __init__(self, cmd_ctx, testname, envname, options): - self.cmd_ctx = cmd_ctx - self.cmd_ctx.meta[CTX_META_TEST_IS_RUNNING] = True - self.test_name = testname - self.options = options - self.env_name = envname - self.env_options = options["project_config"].items(env=envname, as_dict=True) - self._run_failed = False - self._output_file_generated = False - - def get_transport(self): - transport = None - if self.env_options.get("platform") == "native": - transport = "native" - elif "framework" in self.env_options: - transport = self.env_options.get("framework")[0] - if "test_transport" in self.env_options: - transport = self.env_options["test_transport"] - if transport not in TRANSPORT_OPTIONS: - raise exception.PlatformioException( - "Unknown Unit Test transport `%s`. Please check a documentation how " - "to create an own 'Test Transport':\n" - "- https://docs.platformio.org/page/plus/unit-testing.html" % transport - ) - return transport.lower() - - def get_baudrate(self): - return int(self.env_options.get("test_speed", self.DEFAULT_BAUDRATE)) - - def print_progress(self, text): - click.secho(text, bold=self.options.get("verbose")) - - def build_or_upload(self, target): - if not self._output_file_generated: - self.generate_output_file( - self.options["project_config"].get("platformio", "test_dir") - ) - self._output_file_generated = True - - if self.test_name != "*": - self.cmd_ctx.meta[CTX_META_TEST_RUNNING_NAME] = self.test_name - - try: - # pylint: disable=import-outside-toplevel - from platformio.commands.run.command import cli as cmd_run - - return self.cmd_ctx.invoke( - cmd_run, - project_dir=self.options["project_dir"], - project_conf=self.options["project_config"].path, - upload_port=self.options.get("upload_port"), - verbose=self.options["verbose"], - silent=self.options.get("silent"), - environment=[self.env_name], - disable_auto_clean="nobuild" in target, - target=target, - ) - except exception.ReturnErrorCode: - return False - - def process(self): - raise NotImplementedError - - def run(self): - raise NotImplementedError - - def on_run_out(self, line): - line = line.strip() - if line.endswith(":PASS"): - click.echo("%s\t[%s]" % (line[:-5], click.style("PASSED", fg="green"))) - elif ":FAIL" in line: - self._run_failed = True - click.echo("%s\t[%s]" % (line, click.style("FAILED", fg="red"))) - else: - click.echo(line) - - def generate_output_file(self, test_dir): - assert isdir(test_dir) - - file_tpl = "\n".join( - [ - "$include", - "#include ", - "", - "$object", - "", - "#ifdef __GNUC__", - "void output_start(unsigned int baudrate __attribute__((unused)))", - "#else", - "void output_start(unsigned int baudrate)", - "#endif", - "{", - " $begin", - "}", - "", - "void output_char(int c)", - "{", - " $putchar", - "}", - "", - "void output_flush(void)", - "{", - " $flush", - "}", - "", - "void output_complete(void)", - "{", - " $end", - "}", - ] - ) - - tmp_file_prefix = "tmp_pio_test_transport" - - def delete_tmptest_files(test_dir): - for item in listdir(test_dir): - if item.startswith(tmp_file_prefix) and isfile(join(test_dir, item)): - try: - remove(join(test_dir, item)) - except: # pylint: disable=bare-except - click.secho( - "Warning: Could not remove temporary file '%s'. " - "Please remove it manually." % join(test_dir, item), - fg="yellow", - ) - - transport_options = TRANSPORT_OPTIONS[self.get_transport()] - tpl = Template(file_tpl).substitute(transport_options) - data = Template(tpl).substitute(baudrate=self.get_baudrate()) - - delete_tmptest_files(test_dir) - tmp_file = join( - test_dir, - "%s.%s" % (tmp_file_prefix, transport_options.get("language", "c")), - ) - with open(tmp_file, mode="w", encoding="utf8") as fp: - fp.write(data) - - atexit.register(delete_tmptest_files, test_dir) diff --git a/platformio/commands/update.py b/platformio/commands/update.py index ff88723e..c0da8055 100644 --- a/platformio/commands/update.py +++ b/platformio/commands/update.py @@ -14,7 +14,6 @@ import click -from platformio.cache import cleanup_content_cache from platformio.commands.lib.command import CTX_META_STORAGE_DIRS_KEY from platformio.commands.lib.command import lib_update as cmd_lib_update from platformio.commands.platform import platform_update as cmd_platform_update @@ -23,7 +22,9 @@ from platformio.package.manager.library import LibraryPackageManager @click.command( - "update", short_help="Update installed platforms, packages and libraries" + "update", + short_help="Update installed platforms, packages and libraries", + hidden=True, ) @click.option("--core-packages", is_flag=True, help="Update only the core packages") @click.option( @@ -37,12 +38,10 @@ from platformio.package.manager.library import LibraryPackageManager ) @click.pass_context def cli(ctx, core_packages, only_check, dry_run): - # cleanup lib search results, cached board and platform lists - cleanup_content_cache("http") - only_check = dry_run or only_check - update_core_packages(only_check) + if not only_check: + update_core_packages() if core_packages: return diff --git a/platformio/commands/upgrade.py b/platformio/commands/upgrade.py index 25e5bd01..0cba9e74 100644 --- a/platformio/commands/upgrade.py +++ b/platformio/commands/upgrade.py @@ -22,13 +22,15 @@ import click from platformio import VERSION, __version__, app, exception from platformio.clients.http import fetch_remote_content from platformio.compat import IS_WINDOWS +from platformio.package.manager.core import update_core_packages from platformio.proc import exec_command, get_pythonexe_path from platformio.project.helpers import get_project_cache_dir -@click.command("upgrade", short_help="Upgrade PlatformIO to the latest version") +@click.command("upgrade", short_help="Upgrade PlatformIO Core to the latest version") @click.option("--dev", is_flag=True, help="Use development branch") def cli(dev): + update_core_packages() if not dev and __version__ == get_latest_version(): return click.secho( "You're up-to-date!\nPlatformIO %s is currently the " diff --git a/platformio/compat.py b/platformio/compat.py index f3f79ea6..43eb98dd 100644 --- a/platformio/compat.py +++ b/platformio/compat.py @@ -14,25 +14,26 @@ # pylint: disable=unused-import,no-name-in-module +import importlib.util import inspect import locale import sys from platformio.exception import UserSideException -if sys.version_info >= (3,): - if sys.version_info >= (3, 7): - from asyncio import create_task as aio_create_task - from asyncio import get_running_loop as aio_get_running_loop - else: - from asyncio import ensure_future as aio_create_task - from asyncio import get_event_loop as aio_get_running_loop +if sys.version_info >= (3, 7): + from asyncio import create_task as aio_create_task + from asyncio import get_running_loop as aio_get_running_loop +else: + from asyncio import ensure_future as aio_create_task + from asyncio import get_event_loop as aio_get_running_loop -PY2 = sys.version_info[0] == 2 +PY2 = sys.version_info[0] == 2 # DO NOT REMOVE IT. ESP8266/ESP32 depend on it IS_CYGWIN = sys.platform.startswith("cygwin") IS_WINDOWS = WINDOWS = sys.platform.startswith("win") IS_MACOS = sys.platform.startswith("darwin") +MISSING = object() string_types = (str,) @@ -57,8 +58,6 @@ def hashlib_encode_data(data): def load_python_module(name, pathname): - import importlib.util # pylint: disable=import-outside-toplevel - spec = importlib.util.spec_from_file_location(name, pathname) module = importlib.util.module_from_spec(spec) spec.loader.exec_module(module) diff --git a/platformio/debug/command.py b/platformio/debug/command.py new file mode 100644 index 00000000..e8fd6290 --- /dev/null +++ b/platformio/debug/command.py @@ -0,0 +1,203 @@ +# Copyright (c) 2014-present PlatformIO +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# pylint: disable=too-many-arguments, too-many-locals +# pylint: disable=too-many-branches, too-many-statements + +import asyncio +import os +import signal +import subprocess + +import click + +from platformio import app, exception, fs, proc +from platformio.compat import IS_WINDOWS +from platformio.debug import helpers +from platformio.debug.config.factory import DebugConfigFactory +from platformio.debug.exception import DebugInvalidOptionsError +from platformio.debug.process.gdb import GDBClientProcess +from platformio.platform.factory import PlatformFactory +from platformio.project.config import ProjectConfig +from platformio.project.exception import ProjectEnvsNotAvailableError +from platformio.project.helpers import is_platformio_project +from platformio.project.options import ProjectOptions + + +@click.command( + "debug", + context_settings=dict(ignore_unknown_options=True), + short_help="Unified Debugger", +) +@click.option( + "-d", + "--project-dir", + default=os.getcwd, + type=click.Path( + exists=True, file_okay=False, dir_okay=True, writable=True, resolve_path=True + ), +) +@click.option( + "-c", + "--project-conf", + type=click.Path( + exists=True, file_okay=True, dir_okay=False, readable=True, resolve_path=True + ), +) +@click.option("--environment", "-e", metavar="") +@click.option("--load-mode", type=ProjectOptions["env.debug_load_mode"].type) +@click.option("--verbose", "-v", is_flag=True) +@click.option("--interface", type=click.Choice(["gdb"])) +@click.argument("__unprocessed", nargs=-1, type=click.UNPROCESSED) +@click.pass_context +def debug_cmd( + ctx, + project_dir, + project_conf, + environment, + load_mode, + verbose, + interface, + __unprocessed, +): + app.set_session_var("custom_project_conf", project_conf) + + # use env variables from Eclipse or CLion + for name in ("CWD", "PWD", "PLATFORMIO_PROJECT_DIR"): + if is_platformio_project(project_dir): + break + if os.getenv(name): + project_dir = os.getenv(name) + + with fs.cd(project_dir): + return _debug_in_project_dir( + ctx, + project_dir, + project_conf, + environment, + load_mode, + verbose, + interface, + __unprocessed, + ) + + +def _debug_in_project_dir( + ctx, + project_dir, + project_conf, + environment, + load_mode, + verbose, + interface, + __unprocessed, +): + project_config = ProjectConfig.get_instance(project_conf) + project_config.validate(envs=[environment] if environment else None) + env_name = environment or helpers.get_default_debug_env(project_config) + + if not interface: + return helpers.predebug_project( + ctx, project_dir, project_config, env_name, False, verbose + ) + + env_options = project_config.items(env=env_name, as_dict=True) + if "platform" not in env_options: + raise ProjectEnvsNotAvailableError() + + debug_config = DebugConfigFactory.new( + PlatformFactory.new(env_options["platform"], autoinstall=True), + project_config, + env_name, + ) + + if "--version" in __unprocessed: + return subprocess.run( + [debug_config.client_executable_path, "--version"], check=True + ) + + try: + fs.ensure_udev_rules() + except exception.InvalidUdevRules as e: + click.echo( + helpers.escape_gdbmi_stream("~", str(e) + "\n") + if helpers.is_gdbmi_mode() + else str(e) + "\n", + nl=False, + ) + + rebuild_prog = False + preload = debug_config.load_cmds == ["preload"] + load_mode = load_mode or debug_config.load_mode + if load_mode == "always": + rebuild_prog = preload or not helpers.has_debug_symbols( + debug_config.program_path + ) + elif load_mode == "modified": + rebuild_prog = helpers.is_prog_obsolete( + debug_config.program_path + ) or not helpers.has_debug_symbols(debug_config.program_path) + + if not (debug_config.program_path and os.path.isfile(debug_config.program_path)): + rebuild_prog = True + + if preload or (not rebuild_prog and load_mode != "always"): + # don't load firmware through debug server + debug_config.load_cmds = [] + + if rebuild_prog: + if helpers.is_gdbmi_mode(): + click.echo( + helpers.escape_gdbmi_stream( + "~", "Preparing firmware for debugging...\n" + ), + nl=False, + ) + stream = helpers.GDBMIConsoleStream() + with proc.capture_std_streams(stream): + helpers.predebug_project( + ctx, project_dir, project_config, env_name, preload, verbose + ) + stream.close() + else: + click.echo("Preparing firmware for debugging...") + helpers.predebug_project( + ctx, project_dir, project_config, env_name, preload, verbose + ) + + # save SHA sum of newly created prog + if load_mode == "modified": + helpers.is_prog_obsolete(debug_config.program_path) + + if not os.path.isfile(debug_config.program_path): + raise DebugInvalidOptionsError("Program/firmware is missed") + + loop = asyncio.ProactorEventLoop() if IS_WINDOWS else asyncio.get_event_loop() + asyncio.set_event_loop(loop) + + client = GDBClientProcess(project_dir, debug_config) + coro = client.run(__unprocessed) + try: + signal.signal(signal.SIGINT, signal.SIG_IGN) + loop.run_until_complete(coro) + if IS_WINDOWS: + client.close() + # an issue with `asyncio` executor and STIDIN, + # it cannot be closed gracefully + proc.force_exit() + finally: + client.close() + loop.close() + + return True diff --git a/platformio/debug/config/base.py b/platformio/debug/config/base.py index db7ddfba..c53c7d0f 100644 --- a/platformio/debug/config/base.py +++ b/platformio/debug/config/base.py @@ -20,7 +20,7 @@ from platformio.compat import string_types from platformio.debug.exception import DebugInvalidOptionsError from platformio.debug.helpers import reveal_debug_port from platformio.project.config import ProjectConfig -from platformio.project.helpers import load_project_ide_data +from platformio.project.helpers import load_build_metadata from platformio.project.options import ProjectOptions @@ -147,7 +147,7 @@ class DebugConfigBase: # pylint: disable=too-many-instance-attributes ) def _load_build_data(self): - data = load_project_ide_data(os.getcwd(), self.env_name, cache=True) + data = load_build_metadata(os.getcwd(), self.env_name, cache=True) if data: return data raise DebugInvalidOptionsError("Could not load a build configuration") @@ -186,11 +186,7 @@ class DebugConfigBase: # pylint: disable=too-many-instance-attributes else None ) if server_package and not server_package_dir: - self.platform.install_packages( - with_packages=[server_package], - skip_default_package=True, - silent=True, - ) + self.platform.install_package(server_package) server_package_dir = self.platform.get_package_dir(server_package) result.update( dict( diff --git a/platformio/debug/config/factory.py b/platformio/debug/config/factory.py index d74dad38..4741b800 100644 --- a/platformio/debug/config/factory.py +++ b/platformio/debug/config/factory.py @@ -23,7 +23,7 @@ class DebugConfigFactory(object): @staticmethod def get_clsname(name): name = re.sub(r"[^\da-z\_\-]+", "", name, flags=re.I) - return "%s%sDebugConfig" % (name.upper()[0], name.lower()[1:]) + return "%sDebugConfig" % name.lower().capitalize() @classmethod def new(cls, platform, project_config, env_name): diff --git a/platformio/debug/config/generic.py b/platformio/debug/config/generic.py index a8c6c410..870aad7b 100644 --- a/platformio/debug/config/generic.py +++ b/platformio/debug/config/generic.py @@ -34,5 +34,5 @@ $INIT_BREAK """ def __init__(self, *args, **kwargs): - super(GenericDebugConfig, self).__init__(*args, **kwargs) + super().__init__(*args, **kwargs) self.port = ":3333" diff --git a/platformio/debug/config/jlink.py b/platformio/debug/config/jlink.py index 020decd7..ed5f9966 100644 --- a/platformio/debug/config/jlink.py +++ b/platformio/debug/config/jlink.py @@ -38,11 +38,9 @@ $INIT_BREAK """ def __init__(self, *args, **kwargs): - super(JlinkDebugConfig, self).__init__(*args, **kwargs) + super().__init__(*args, **kwargs) self.port = ":2331" @property def server_ready_pattern(self): - return super(JlinkDebugConfig, self).server_ready_pattern or ( - "Waiting for GDB connection" - ) + return super().server_ready_pattern or ("Waiting for GDB connection") diff --git a/platformio/debug/config/mspdebug.py b/platformio/debug/config/mspdebug.py index e71b09ca..86ee8d6a 100644 --- a/platformio/debug/config/mspdebug.py +++ b/platformio/debug/config/mspdebug.py @@ -32,5 +32,5 @@ $INIT_BREAK """ def __init__(self, *args, **kwargs): - super(MspdebugDebugConfig, self).__init__(*args, **kwargs) + super().__init__(*args, **kwargs) self.port = ":2000" diff --git a/platformio/debug/config/qemu.py b/platformio/debug/config/qemu.py index d32af5a2..e272a373 100644 --- a/platformio/debug/config/qemu.py +++ b/platformio/debug/config/qemu.py @@ -33,5 +33,5 @@ $INIT_BREAK """ def __init__(self, *args, **kwargs): - super(QemuDebugConfig, self).__init__(*args, **kwargs) + super().__init__(*args, **kwargs) self.port = ":1234" diff --git a/platformio/debug/config/renode.py b/platformio/debug/config/renode.py index 3aef5ef8..0a4164de 100644 --- a/platformio/debug/config/renode.py +++ b/platformio/debug/config/renode.py @@ -35,11 +35,11 @@ monitor start """ def __init__(self, *args, **kwargs): - super(RenodeDebugConfig, self).__init__(*args, **kwargs) + super().__init__(*args, **kwargs) self.port = ":3333" @property def server_ready_pattern(self): - return super(RenodeDebugConfig, self).server_ready_pattern or ( + return super().server_ready_pattern or ( "GDB server with all CPUs started on port" ) diff --git a/platformio/debug/helpers.py b/platformio/debug/helpers.py index 5bac5d61..cd87f141 100644 --- a/platformio/debug/helpers.py +++ b/platformio/debug/helpers.py @@ -12,22 +12,24 @@ # See the License for the specific language governing permissions and # limitations under the License. +import os import re import sys import time from fnmatch import fnmatch from hashlib import sha1 from io import BytesIO -from os.path import isfile -from platformio import util from platformio.commands import PlatformioCLI from platformio.commands.run.command import cli as cmd_run from platformio.commands.run.command import print_processing_header -from platformio.commands.test.helpers import get_test_names -from platformio.commands.test.processor import TestProcessorBase from platformio.compat import IS_WINDOWS, is_bytes from platformio.debug.exception import DebugInvalidOptionsError +from platformio.device.list import list_serial_ports +from platformio.test.helpers import list_test_names +from platformio.test.result import TestSuite +from platformio.test.runners.base import TestRunnerOptions +from platformio.test.runners.factory import TestRunnerFactory class GDBMIConsoleStream(BytesIO): # pylint: disable=too-few-public-methods @@ -80,27 +82,25 @@ def predebug_project( ): # pylint: disable=too-many-arguments debug_testname = project_config.get("env:" + env_name, "debug_test") if debug_testname: - test_names = get_test_names(project_config) + test_names = list_test_names(project_config) if debug_testname not in test_names: raise DebugInvalidOptionsError( "Unknown test name `%s`. Valid names are `%s`" % (debug_testname, ", ".join(test_names)) ) print_processing_header(env_name, project_config, verbose) - tp = TestProcessorBase( - ctx, - debug_testname, - env_name, - dict( - project_config=project_config, - project_dir=project_dir, + test_runner = TestRunnerFactory.new( + TestSuite(env_name, debug_testname), + project_config, + TestRunnerOptions( + verbose=verbose, without_building=False, - without_uploading=True, + without_debugging=False, + without_uploading=not preload, without_testing=True, - verbose=False, ), ) - tp.build_or_upload(["__debug", "__test"] + (["upload"] if preload else [])) + test_runner.start(ctx) else: ctx.invoke( cmd_run, @@ -116,7 +116,7 @@ def predebug_project( def has_debug_symbols(prog_path): - if not isfile(prog_path): + if not os.path.isfile(prog_path): return False matched = { b".debug_info": False, @@ -142,7 +142,7 @@ def has_debug_symbols(prog_path): def is_prog_obsolete(prog_path): prog_hash_path = prog_path + ".sha1" - if not isfile(prog_path): + if not os.path.isfile(prog_path): return True shasum = sha1() with open(prog_path, "rb") as fp: @@ -153,7 +153,7 @@ def is_prog_obsolete(prog_path): shasum.update(data) new_digest = shasum.hexdigest() old_digest = None - if isfile(prog_hash_path): + if os.path.isfile(prog_hash_path): with open(prog_hash_path, encoding="utf8") as fp: old_digest = fp.read() if new_digest == old_digest: @@ -178,7 +178,7 @@ def reveal_debug_port(env_debug_port, tool_name, tool_settings): return fnmatch(port, pattern) def _look_for_serial_port(hwids): - for item in util.get_serialports(filter_hwid=True): + for item in list_serial_ports(filter_hwid=True): if not _is_match_pattern(item["port"]): continue port = item["port"] diff --git a/platformio/debug/process/client.py b/platformio/debug/process/client.py index 6a6f1b9e..d508bdae 100644 --- a/platformio/debug/process/client.py +++ b/platformio/debug/process/client.py @@ -27,7 +27,7 @@ from platformio.project.helpers import get_project_cache_dir class DebugClientProcess(DebugBaseProcess): def __init__(self, project_dir, debug_config): - super(DebugClientProcess, self).__init__() + super().__init__() self.project_dir = project_dir self.debug_config = debug_config @@ -55,7 +55,7 @@ class DebugClientProcess(DebugBaseProcess): self.debug_config.port = await self._server_process.run() def connection_made(self, transport): - super(DebugClientProcess, self).connection_made(transport) + super().connection_made(transport) self._lock_session(transport.get_pid()) # Disable SIGINT and allow GDB's Ctrl+C interrupt signal.signal(signal.SIGINT, lambda *args, **kwargs: None) @@ -64,7 +64,15 @@ class DebugClientProcess(DebugBaseProcess): def process_exited(self): if self._server_process: self._server_process.terminate() - super(DebugClientProcess, self).process_exited() + super().process_exited() + + def close(self): + self._unlock_session() + if self.working_dir and os.path.isdir(self.working_dir): + fs.rmtree(self.working_dir) + + def __del__(self): + self.close() def _kill_previous_session(self): assert self._session_id @@ -94,8 +102,3 @@ class DebugClientProcess(DebugBaseProcess): return with ContentCache() as cc: cc.delete(self._session_id) - - def __del__(self): - self._unlock_session() - if self.working_dir and os.path.isdir(self.working_dir): - fs.rmtree(self.working_dir) diff --git a/platformio/debug/process/gdb.py b/platformio/debug/process/gdb.py index 1ff395f5..4ce9aebe 100644 --- a/platformio/debug/process/gdb.py +++ b/platformio/debug/process/gdb.py @@ -29,12 +29,12 @@ class GDBClientProcess(DebugClientProcess): INIT_COMPLETED_BANNER = "PlatformIO: Initialization completed" def __init__(self, *args, **kwargs): - super(GDBClientProcess, self).__init__(*args, **kwargs) + super().__init__(*args, **kwargs) self._target_is_running = False self._errors_buffer = b"" async def run(self, extra_args): # pylint: disable=arguments-differ - await super(GDBClientProcess, self).run() + await super().run() self.generate_init_script(os.path.join(self.working_dir, self.PIO_SRC_NAME)) gdb_path = self.debug_config.client_executable_path or "gdb" @@ -109,7 +109,7 @@ class GDBClientProcess(DebugClientProcess): fp.write("\n".join(self.debug_config.reveal_patterns(commands))) def stdin_data_received(self, data): - super(GDBClientProcess, self).stdin_data_received(data) + super().stdin_data_received(data) if b"-exec-run" in data: if self._target_is_running: token, _ = data.split(b"-", 1) @@ -127,7 +127,7 @@ class GDBClientProcess(DebugClientProcess): self.transport.get_pipe_transport(0).write(data) def stdout_data_received(self, data): - super(GDBClientProcess, self).stdout_data_received(data) + super().stdout_data_received(data) self._handle_error(data) # go to init break automatically if self.INIT_COMPLETED_BANNER.encode() in data: @@ -170,7 +170,7 @@ class GDBClientProcess(DebugClientProcess): self._target_is_running = True def stderr_data_received(self, data): - super(GDBClientProcess, self).stderr_data_received(data) + super().stderr_data_received(data) self._handle_error(data) def _handle_error(self, data): diff --git a/platformio/debug/process/server.py b/platformio/debug/process/server.py index b2653511..89b4095f 100644 --- a/platformio/debug/process/server.py +++ b/platformio/debug/process/server.py @@ -30,7 +30,7 @@ class DebugServerProcess(DebugBaseProcess): STD_BUFFER_SIZE = 1024 def __init__(self, debug_config): - super(DebugServerProcess, self).__init__() + super().__init__() self.debug_config = debug_config self._ready = False self._std_buffer = {"out": b"", "err": b""} @@ -134,7 +134,7 @@ class DebugServerProcess(DebugBaseProcess): return self._ready def stdout_data_received(self, data): - super(DebugServerProcess, self).stdout_data_received( + super().stdout_data_received( escape_gdbmi_stream("@", data) if is_gdbmi_mode() else data ) self._std_buffer["out"] += data @@ -142,7 +142,7 @@ class DebugServerProcess(DebugBaseProcess): self._std_buffer["out"] = self._std_buffer["out"][-1 * self.STD_BUFFER_SIZE :] def stderr_data_received(self, data): - super(DebugServerProcess, self).stderr_data_received(data) + super().stderr_data_received(data) self._std_buffer["err"] += data self._check_ready_by_pattern(self._std_buffer["err"]) self._std_buffer["err"] = self._std_buffer["err"][-1 * self.STD_BUFFER_SIZE :] diff --git a/platformio/commands/device/filters/__init__.py b/platformio/device/__init__.py similarity index 100% rename from platformio/commands/device/filters/__init__.py rename to platformio/device/__init__.py diff --git a/platformio/commands/test/__init__.py b/platformio/device/commands/__init__.py similarity index 100% rename from platformio/commands/test/__init__.py rename to platformio/device/commands/__init__.py diff --git a/platformio/device/commands/list.py b/platformio/device/commands/list.py new file mode 100644 index 00000000..9cd3364f --- /dev/null +++ b/platformio/device/commands/list.py @@ -0,0 +1,99 @@ +# Copyright (c) 2014-present PlatformIO +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import json + +import click + +from platformio.device.list import ( + list_logical_devices, + list_mdns_services, + list_serial_ports, +) + + +@click.command("list", short_help="List devices") +@click.option("--serial", is_flag=True, help="List serial ports, default") +@click.option("--logical", is_flag=True, help="List logical devices") +@click.option("--mdns", is_flag=True, help="List multicast DNS services") +@click.option("--json-output", is_flag=True) +def device_list_cmd( # pylint: disable=too-many-branches + serial, logical, mdns, json_output +): + if not logical and not mdns: + serial = True + data = {} + if serial: + data["serial"] = list_serial_ports() + if logical: + data["logical"] = list_logical_devices() + if mdns: + data["mdns"] = list_mdns_services() + + single_key = list(data)[0] if len(list(data)) == 1 else None + + if json_output: + return click.echo(json.dumps(data[single_key] if single_key else data)) + + titles = { + "serial": "Serial Ports", + "logical": "Logical Devices", + "mdns": "Multicast DNS Services", + } + + for key, value in data.items(): + if not single_key: + click.secho(titles[key], bold=True) + click.echo("=" * len(titles[key])) + + if key == "serial": + for item in value: + click.secho(item["port"], fg="cyan") + click.echo("-" * len(item["port"])) + click.echo("Hardware ID: %s" % item["hwid"]) + click.echo("Description: %s" % item["description"]) + click.echo("") + + if key == "logical": + for item in value: + click.secho(item["path"], fg="cyan") + click.echo("-" * len(item["path"])) + click.echo("Name: %s" % item["name"]) + click.echo("") + + if key == "mdns": + for item in value: + click.secho(item["name"], fg="cyan") + click.echo("-" * len(item["name"])) + click.echo("Type: %s" % item["type"]) + click.echo("IP: %s" % item["ip"]) + click.echo("Port: %s" % item["port"]) + if item["properties"]: + click.echo( + "Properties: %s" + % ( + "; ".join( + [ + "%s=%s" % (k, v) + for k, v in item["properties"].items() + ] + ) + ) + ) + click.echo("") + + if single_key: + click.echo("") + + return True diff --git a/platformio/device/commands/monitor.py b/platformio/device/commands/monitor.py new file mode 100644 index 00000000..f3d4ace5 --- /dev/null +++ b/platformio/device/commands/monitor.py @@ -0,0 +1,184 @@ +# Copyright (c) 2014-present PlatformIO +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import os +import sys + +import click +from serial.tools import miniterm + +from platformio import exception, fs +from platformio.device.filters.base import register_filters +from platformio.device.finder import find_serial_port +from platformio.platform.factory import PlatformFactory +from platformio.project.config import ProjectConfig +from platformio.project.exception import NotPlatformIOProjectError +from platformio.project.options import ProjectOptions + + +@click.command("monitor", short_help="Monitor device (Serial/Socket)") +@click.option("--port", "-p", help="Port, a number or a device name") +@click.option( + "--baud", + "-b", + type=int, + help="Set baud rate, default=%d" % ProjectOptions["env.monitor_speed"].default, +) +@click.option( + "--parity", + default="N", + type=click.Choice(["N", "E", "O", "S", "M"]), + help="Set parity, default=N", +) +@click.option("--rtscts", is_flag=True, help="Enable RTS/CTS flow control, default=Off") +@click.option( + "--xonxoff", is_flag=True, help="Enable software flow control, default=Off" +) +@click.option( + "--rts", default=None, type=click.IntRange(0, 1), help="Set initial RTS line state" +) +@click.option( + "--dtr", default=None, type=click.IntRange(0, 1), help="Set initial DTR line state" +) +@click.option("--echo", is_flag=True, help="Enable local echo, default=Off") +@click.option( + "--encoding", + default="UTF-8", + help="Set the encoding for the serial port (e.g. hexlify, " + "Latin1, UTF-8), default: UTF-8", +) +@click.option("--filter", "-f", multiple=True, help="Add filters/text transformations") +@click.option( + "--eol", + default="CRLF", + type=click.Choice(["CR", "LF", "CRLF"]), + help="End of line mode, default=CRLF", +) +@click.option("--raw", is_flag=True, help="Do not apply any encodings/transformations") +@click.option( + "--exit-char", + type=int, + default=3, + help="ASCII code of special character that is used to exit " + "the application, default=3 (Ctrl+C)", +) +@click.option( + "--menu-char", + type=int, + default=20, + help="ASCII code of special character that is used to " + "control miniterm (menu), default=20 (DEC)", +) +@click.option( + "--quiet", + is_flag=True, + help="Diagnostics: suppress non-error messages, default=Off", +) +@click.option( + "-d", + "--project-dir", + default=os.getcwd, + type=click.Path(exists=True, file_okay=False, dir_okay=True, resolve_path=True), +) +@click.option( + "-e", + "--environment", + help="Load configuration from `platformio.ini` and specified environment", +) +def device_monitor_cmd(**kwargs): # pylint: disable=too-many-branches + project_options = {} + platform = None + with fs.cd(kwargs["project_dir"]): + try: + project_options = get_project_options(kwargs["environment"]) + kwargs = apply_project_monitor_options(kwargs, project_options) + if "platform" in project_options: + platform = PlatformFactory.new(project_options["platform"]) + except NotPlatformIOProjectError: + pass + register_filters(platform=platform, options=kwargs) + kwargs["port"] = find_serial_port( + initial_port=kwargs["port"], + board_config=platform.board_config(project_options.get("board")) + if platform and project_options.get("board") + else None, + upload_protocol=project_options.get("upload_port"), + ) + + # override system argv with patched options + sys.argv = ["monitor"] + project_options_to_monitor_argv( + kwargs, + project_options, + ignore=("port", "baud", "rts", "dtr", "environment", "project_dir"), + ) + + if not kwargs["quiet"]: + click.echo( + "--- Available filters and text transformations: %s" + % ", ".join(sorted(miniterm.TRANSFORMATIONS.keys())) + ) + click.echo("--- More details at https://bit.ly/pio-monitor-filters") + try: + miniterm.main( + default_port=kwargs["port"], + default_baudrate=kwargs["baud"] + or ProjectOptions["env.monitor_speed"].default, + default_rts=kwargs["rts"], + default_dtr=kwargs["dtr"], + ) + except Exception as e: + raise exception.MinitermException(e) + + +def get_project_options(environment=None): + config = ProjectConfig.get_instance() + config.validate(envs=[environment] if environment else None) + environment = environment or config.get_default_env() + return config.items(env=environment, as_dict=True) + + +def apply_project_monitor_options(cli_options, project_options): + for k in ("port", "speed", "rts", "dtr"): + k2 = "monitor_%s" % k + if k == "speed": + k = "baud" + if cli_options[k] is None and k2 in project_options: + cli_options[k] = project_options[k2] + if k != "port": + cli_options[k] = int(cli_options[k]) + return cli_options + + +def project_options_to_monitor_argv(cli_options, project_options, ignore=None): + confmon_flags = project_options.get("monitor_flags", []) + result = confmon_flags[::] + + for f in project_options.get("monitor_filters", []): + result.extend(["--filter", f]) + + for k, v in cli_options.items(): + if v is None or (ignore and k in ignore): + continue + k = "--" + k.replace("_", "-") + if k in confmon_flags: + continue + if isinstance(v, bool): + if v: + result.append(k) + elif isinstance(v, tuple): + for i in v: + result.extend([k, i]) + else: + result.extend([k, str(v)]) + return result diff --git a/platformio/device/filters/__init__.py b/platformio/device/filters/__init__.py new file mode 100644 index 00000000..b0514903 --- /dev/null +++ b/platformio/device/filters/__init__.py @@ -0,0 +1,13 @@ +# Copyright (c) 2014-present PlatformIO +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. diff --git a/platformio/commands/device/helpers.py b/platformio/device/filters/base.py similarity index 56% rename from platformio/commands/device/helpers.py rename to platformio/device/filters/base.py index 558d6a2e..1c02e8b5 100644 --- a/platformio/commands/device/helpers.py +++ b/platformio/device/filters/base.py @@ -18,91 +18,35 @@ import os from serial.tools import miniterm from platformio import fs -from platformio.commands.device import DeviceMonitorFilter from platformio.compat import get_object_members, load_python_module from platformio.package.manager.tool import ToolPackageManager from platformio.project.config import ProjectConfig -def apply_project_monitor_options(cli_options, project_options): - for k in ("port", "speed", "rts", "dtr"): - k2 = "monitor_%s" % k - if k == "speed": - k = "baud" - if cli_options[k] is None and k2 in project_options: - cli_options[k] = project_options[k2] - if k != "port": - cli_options[k] = int(cli_options[k]) - return cli_options +class DeviceMonitorFilterBase(miniterm.Transform): + def __init__(self, options=None): + """Called by PlatformIO to pass context""" + miniterm.Transform.__init__(self) + self.options = options or {} + self.project_dir = self.options.get("project_dir") + self.environment = self.options.get("environment") -def options_to_argv(cli_options, project_options, ignore=None): - confmon_flags = project_options.get("monitor_flags", []) - result = confmon_flags[::] + self.config = ProjectConfig.get_instance() + if not self.environment: + default_envs = self.config.default_envs() + if default_envs: + self.environment = default_envs[0] + elif self.config.envs(): + self.environment = self.config.envs()[0] - for f in project_options.get("monitor_filters", []): - result.extend(["--filter", f]) + def __call__(self): + """Called by the miniterm library when the filter is actually used""" + return self - for k, v in cli_options.items(): - if v is None or (ignore and k in ignore): - continue - k = "--" + k.replace("_", "-") - if k in confmon_flags: - continue - if isinstance(v, bool): - if v: - result.append(k) - elif isinstance(v, tuple): - for i in v: - result.extend([k, i]) - else: - result.extend([k, str(v)]) - return result - - -def get_project_options(environment=None): - config = ProjectConfig.get_instance() - config.validate(envs=[environment] if environment else None) - if not environment: - default_envs = config.default_envs() - if default_envs: - environment = default_envs[0] - else: - environment = config.envs()[0] - return config.items(env=environment, as_dict=True) - - -def get_board_hwids(project_dir, platform, board): - with fs.cd(project_dir): - return platform.board_config(board).get("build.hwids", []) - - -def load_monitor_filter(path, options=None): - name = os.path.basename(path) - name = name[: name.find(".")] - module = load_python_module("platformio.commands.device.filters.%s" % name, path) - for cls in get_object_members(module).values(): - if ( - not inspect.isclass(cls) - or not issubclass(cls, DeviceMonitorFilter) - or cls == DeviceMonitorFilter - ): - continue - obj = cls(options) - miniterm.TRANSFORMATIONS[obj.NAME] = obj - return True - - -def load_monitor_filters(monitor_dir, prefix=None, options=None): - if not os.path.isdir(monitor_dir): - return - for name in os.listdir(monitor_dir): - if (prefix and not name.startswith(prefix)) or not name.endswith(".py"): - continue - path = os.path.join(monitor_dir, name) - if not os.path.isfile(path): - continue - load_monitor_filter(path, options) + @property + def NAME(self): + raise NotImplementedError("Please declare NAME attribute for the filter class") def register_filters(platform=None, options=None): @@ -130,3 +74,31 @@ def register_filters(platform=None, options=None): os.path.join(fs.get_source_dir(), "commands", "device", "filters"), options=options, ) + + +def load_monitor_filters(monitor_dir, prefix=None, options=None): + if not os.path.isdir(monitor_dir): + return + for name in os.listdir(monitor_dir): + if (prefix and not name.startswith(prefix)) or not name.endswith(".py"): + continue + path = os.path.join(monitor_dir, name) + if not os.path.isfile(path): + continue + load_monitor_filter(path, options) + + +def load_monitor_filter(path, options=None): + name = os.path.basename(path) + name = name[: name.find(".")] + module = load_python_module("platformio.device.filters.%s" % name, path) + for cls in get_object_members(module).values(): + if ( + not inspect.isclass(cls) + or not issubclass(cls, DeviceMonitorFilterBase) + or cls == DeviceMonitorFilterBase + ): + continue + obj = cls(options) + miniterm.TRANSFORMATIONS[obj.NAME] = obj + return True diff --git a/platformio/commands/device/filters/hexlify.py b/platformio/device/filters/hexlify.py similarity index 88% rename from platformio/commands/device/filters/hexlify.py rename to platformio/device/filters/hexlify.py index 1023b573..045f637e 100644 --- a/platformio/commands/device/filters/hexlify.py +++ b/platformio/device/filters/hexlify.py @@ -14,14 +14,14 @@ import serial -from platformio.commands.device import DeviceMonitorFilter +from platformio.device.filters.base import DeviceMonitorFilterBase -class Hexlify(DeviceMonitorFilter): +class Hexlify(DeviceMonitorFilterBase): NAME = "hexlify" def __init__(self, *args, **kwargs): - super(Hexlify, self).__init__(*args, **kwargs) + super().__init__(*args, **kwargs) self._counter = 0 def rx(self, text): diff --git a/platformio/commands/device/filters/log2file.py b/platformio/device/filters/log2file.py similarity index 89% rename from platformio/commands/device/filters/log2file.py rename to platformio/device/filters/log2file.py index d7199a19..e4c622d1 100644 --- a/platformio/commands/device/filters/log2file.py +++ b/platformio/device/filters/log2file.py @@ -16,14 +16,14 @@ import io import os.path from datetime import datetime -from platformio.commands.device import DeviceMonitorFilter +from platformio.device.filters.base import DeviceMonitorFilterBase -class LogToFile(DeviceMonitorFilter): +class LogToFile(DeviceMonitorFilterBase): NAME = "log2file" def __init__(self, *args, **kwargs): - super(LogToFile, self).__init__(*args, **kwargs) + super().__init__(*args, **kwargs) self._log_fp = None def __call__(self): diff --git a/platformio/commands/device/filters/send_on_enter.py b/platformio/device/filters/send_on_enter.py similarity index 87% rename from platformio/commands/device/filters/send_on_enter.py rename to platformio/device/filters/send_on_enter.py index 50b730cc..ec002295 100644 --- a/platformio/commands/device/filters/send_on_enter.py +++ b/platformio/device/filters/send_on_enter.py @@ -12,14 +12,14 @@ # See the License for the specific language governing permissions and # limitations under the License. -from platformio.commands.device import DeviceMonitorFilter +from platformio.device.filters.base import DeviceMonitorFilterBase -class SendOnEnter(DeviceMonitorFilter): +class SendOnEnter(DeviceMonitorFilterBase): NAME = "send_on_enter" def __init__(self, *args, **kwargs): - super(SendOnEnter, self).__init__(*args, **kwargs) + super().__init__(*args, **kwargs) self._buffer = "" if self.options.get("eol") == "CR": diff --git a/platformio/commands/device/filters/time.py b/platformio/device/filters/time.py similarity index 88% rename from platformio/commands/device/filters/time.py rename to platformio/device/filters/time.py index 0c2d8884..d7ba1c7f 100644 --- a/platformio/commands/device/filters/time.py +++ b/platformio/device/filters/time.py @@ -14,14 +14,14 @@ from datetime import datetime -from platformio.commands.device import DeviceMonitorFilter +from platformio.device.filters.base import DeviceMonitorFilterBase -class Timestamp(DeviceMonitorFilter): +class Timestamp(DeviceMonitorFilterBase): NAME = "time" def __init__(self, *args, **kwargs): - super(Timestamp, self).__init__(*args, **kwargs) + super().__init__(*args, **kwargs) self._line_started = False def rx(self, text): diff --git a/platformio/device/finder.py b/platformio/device/finder.py new file mode 100644 index 00000000..0fe98baa --- /dev/null +++ b/platformio/device/finder.py @@ -0,0 +1,111 @@ +# Copyright (c) 2014-present PlatformIO +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import os +from fnmatch import fnmatch + +import serial + +from platformio.compat import IS_WINDOWS +from platformio.device.list import list_logical_devices, list_serial_ports + + +def is_pattern_port(port): + if not port: + return False + return set(["*", "?", "[", "]"]) & set(port) + + +def match_serial_port(pattern): + for item in list_serial_ports(): + if fnmatch(item["port"], pattern): + return item["port"] + return None + + +def is_serial_port_ready(port, timeout=1): + try: + serial.Serial(port, timeout=timeout).close() + return True + except: # pylint: disable=bare-except + pass + return False + + +def find_serial_port( + initial_port, board_config=None, upload_protocol=None, ensure_ready=False +): + if initial_port: + if not is_pattern_port(initial_port): + return initial_port + return match_serial_port(initial_port) + port = None + if upload_protocol and upload_protocol.startswith("blackmagic"): + port = find_blackmagic_serial_port() + if not port and board_config: + port = find_board_serial_port(board_config) + if port: + return port + + # pick the last PID:VID USB device + usb_port = None + for item in list_serial_ports(): + if ensure_ready and not is_serial_port_ready(item["port"]): + continue + port = item["port"] + if "VID:PID" in item["hwid"]: + usb_port = port + return usb_port or port + + +def find_blackmagic_serial_port(): + for item in list_serial_ports(): + port = item["port"] + if IS_WINDOWS and port.startswith("COM") and len(port) > 4: + port = "\\\\.\\%s" % port + if "GDB" in item["description"]: + return port + return None + + +def find_board_serial_port(board_config): + board_hwids = board_config.get("build.hwids", []) + if not board_hwids: + return None + for item in list_serial_ports(filter_hwid=True): + port = item["port"] + for hwid in board_hwids: + hwid_str = ("%s:%s" % (hwid[0], hwid[1])).replace("0x", "") + if hwid_str in item["hwid"]: + return port + return None + + +def find_mbed_disk(initial_port): + msdlabels = ("mbed", "nucleo", "frdm", "microbit") + for item in list_logical_devices(): + if item["path"].startswith("/net"): + continue + if ( + initial_port + and is_pattern_port(initial_port) + and not fnmatch(item["path"], initial_port) + ): + continue + mbed_pages = [os.path.join(item["path"], n) for n in ("mbed.htm", "mbed.html")] + if any(os.path.isfile(p) for p in mbed_pages): + return item["path"] + if item["name"] and any(l in item["name"].lower() for l in msdlabels): + return item["path"] + return None diff --git a/platformio/device/list.py b/platformio/device/list.py new file mode 100644 index 00000000..3695f760 --- /dev/null +++ b/platformio/device/list.py @@ -0,0 +1,154 @@ +# Copyright (c) 2014-present PlatformIO +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import json +import os +import re +import time +from glob import glob + +import zeroconf + +from platformio import __version__, exception, proc +from platformio.compat import IS_MACOS, IS_WINDOWS + + +def list_serial_ports(filter_hwid=False): + try: + # pylint: disable=import-outside-toplevel + from serial.tools.list_ports import comports + except ImportError: + raise exception.GetSerialPortsError(os.name) + + result = [] + for p, d, h in comports(): + if not p: + continue + if not filter_hwid or "VID:PID" in h: + result.append({"port": p, "description": d, "hwid": h}) + + if filter_hwid: + return result + + # fix for PySerial + if not result and IS_MACOS: + for p in glob("/dev/tty.*"): + result.append({"port": p, "description": "n/a", "hwid": "n/a"}) + return result + + +def list_logical_devices(): + items = [] + if IS_WINDOWS: + try: + result = proc.exec_command( + ["wmic", "logicaldisk", "get", "name,VolumeName"] + ).get("out", "") + devicenamere = re.compile(r"^([A-Z]{1}\:)\s*(\S+)?") + for line in result.split("\n"): + match = devicenamere.match(line.strip()) + if not match: + continue + items.append({"path": match.group(1) + "\\", "name": match.group(2)}) + return items + except WindowsError: # pylint: disable=undefined-variable + pass + # try "fsutil" + result = proc.exec_command(["fsutil", "fsinfo", "drives"]).get("out", "") + for device in re.findall(r"[A-Z]:\\", result): + items.append({"path": device, "name": None}) + return items + + result = proc.exec_command(["df"]).get("out") + devicenamere = re.compile(r"^/.+\d+\%\s+([a-z\d\-_/]+)$", flags=re.I) + for line in result.split("\n"): + match = devicenamere.match(line.strip()) + if not match: + continue + items.append({"path": match.group(1), "name": os.path.basename(match.group(1))}) + return items + + +def list_mdns_services(): + class mDNSListener(object): + def __init__(self): + self._zc = zeroconf.Zeroconf(interfaces=zeroconf.InterfaceChoice.All) + self._found_types = [] + self._found_services = [] + + def __enter__(self): + zeroconf.ServiceBrowser( + self._zc, + [ + "_http._tcp.local.", + "_hap._tcp.local.", + "_services._dns-sd._udp.local.", + ], + self, + ) + return self + + def __exit__(self, etype, value, traceback): + self._zc.close() + + def add_service(self, zc, type_, name): + try: + assert zeroconf.service_type_name(name) + assert str(name) + except (AssertionError, UnicodeError, zeroconf.BadTypeInNameException): + return + if name not in self._found_types: + self._found_types.append(name) + zeroconf.ServiceBrowser(self._zc, name, self) + if type_ in self._found_types: + s = zc.get_service_info(type_, name) + if s: + self._found_services.append(s) + + def remove_service(self, zc, type_, name): + pass + + def update_service(self, zc, type_, name): + pass + + def get_services(self): + return self._found_services + + items = [] + with mDNSListener() as mdns: + time.sleep(3) + for service in mdns.get_services(): + properties = None + if service.properties: + try: + properties = { + k.decode("utf8"): v.decode("utf8") + if isinstance(v, bytes) + else v + for k, v in service.properties.items() + } + json.dumps(properties) + except UnicodeDecodeError: + properties = None + + items.append( + { + "type": service.type, + "name": service.name, + "ip": ", ".join(service.parsed_addresses()), + "port": service.port, + "properties": properties, + } + ) + return items diff --git a/platformio/exception.py b/platformio/exception.py index ef1d3bab..03382a55 100644 --- a/platformio/exception.py +++ b/platformio/exception.py @@ -22,7 +22,7 @@ class PlatformioException(Exception): # pylint: disable=not-an-iterable return self.MESSAGE.format(*self.args) - return super(PlatformioException, self).__str__() + return super().__str__() class ReturnErrorCode(PlatformioException): @@ -48,7 +48,7 @@ class AbortedByUser(UserSideException): # -class InvalidUdevRules(PlatformioException): +class InvalidUdevRules(UserSideException): pass @@ -135,14 +135,3 @@ class CygwinEnvDetected(PlatformioException): "PlatformIO does not work within Cygwin environment. " "Use native Terminal instead." ) - - -class TestDirNotExists(UserSideException): - - MESSAGE = ( - "A test folder '{0}' does not exist.\nPlease create 'test' " - "directory in project's root and put a test set.\n" - "More details about Unit " - "Testing: https://docs.platformio.org/page/plus/" - "unit-testing.html" - ) diff --git a/platformio/fs.py b/platformio/fs.py index 6acfc726..2ede27b7 100644 --- a/platformio/fs.py +++ b/platformio/fs.py @@ -146,33 +146,40 @@ def path_endswith_ext(path, extensions): def match_src_files(src_dir, src_filter=None, src_exts=None, followlinks=True): - def _append_build_item(items, item, src_dir): + def _add_candidate(items, item, src_dir): if not src_exts or path_endswith_ext(item, src_exts): items.add(os.path.relpath(item, src_dir)) + def _find_candidates(pattern): + candidates = set() + for item in glob.glob( + os.path.join(glob.escape(src_dir), pattern), recursive=True + ): + if not os.path.isdir(item): + _add_candidate(candidates, item, src_dir) + continue + for root, dirs, files in os.walk(item, followlinks=followlinks): + for d in dirs if not followlinks else []: + if os.path.islink(os.path.join(root, d)): + _add_candidate(candidates, os.path.join(root, d), src_dir) + for f in files: + _add_candidate(candidates, os.path.join(root, f), src_dir) + return candidates + src_filter = src_filter or "" if isinstance(src_filter, (list, tuple)): src_filter = " ".join(src_filter) - matches = set() + result = set() # correct fs directory separator src_filter = src_filter.replace("/", os.sep).replace("\\", os.sep) for (action, pattern) in re.findall(r"(\+|\-)<([^>]+)>", src_filter): - items = set() - for item in glob.glob( - os.path.join(glob.escape(src_dir), pattern), recursive=True - ): - if os.path.isdir(item): - for root, _, files in os.walk(item, followlinks=followlinks): - for f in files: - _append_build_item(items, os.path.join(root, f), src_dir) - else: - _append_build_item(items, item, src_dir) + candidates = _find_candidates(pattern) if action == "+": - matches |= items + result |= candidates else: - matches -= items - return sorted(list(matches)) + result -= candidates + return sorted(list(result)) def to_unix_path(path): diff --git a/platformio/maintenance.py b/platformio/maintenance.py index 82aa4a0f..dccd8086 100644 --- a/platformio/maintenance.py +++ b/platformio/maintenance.py @@ -23,18 +23,13 @@ from platformio import __version__, app, exception, fs, telemetry from platformio.cache import cleanup_content_cache from platformio.clients import http from platformio.commands import PlatformioCLI -from platformio.commands.lib.command import CTX_META_STORAGE_DIRS_KEY -from platformio.commands.lib.command import lib_update as cmd_lib_update from platformio.commands.platform import platform_update as cmd_platform_update from platformio.commands.system.prune import calculate_unnecessary_system_data from platformio.commands.upgrade import get_latest_version from platformio.package.manager.core import update_core_packages -from platformio.package.manager.library import LibraryPackageManager -from platformio.package.manager.platform import PlatformPackageManager from platformio.package.manager.tool import ToolPackageManager from platformio.package.meta import PackageSpec from platformio.package.version import pepver_to_semver -from platformio.platform.factory import PlatformFactory def on_platformio_start(ctx, force, caller): @@ -54,8 +49,6 @@ def on_platformio_end(ctx, result): # pylint: disable=unused-argument try: check_platformio_upgrade() - check_internal_updates(ctx, "platforms") - check_internal_updates(ctx, "libraries") check_prune_system() except ( http.HTTPClientError, @@ -157,13 +150,10 @@ def after_upgrade(ctx): return else: click.secho("Please wait while upgrading PlatformIO...", fg="yellow") - try: - cleanup_content_cache("http") - except: # pylint: disable=bare-except - pass # Update PlatformIO's Core packages - update_core_packages(silent=True) + cleanup_content_cache("http") + update_core_packages() u = Upgrader(last_version, __version__) if u.run(ctx): @@ -212,18 +202,21 @@ def after_upgrade(ctx): def check_platformio_upgrade(): - last_check = app.get_state_item("last_check", {}) interval = int(app.get_setting("check_platformio_interval")) * 3600 * 24 - if (time() - interval) < last_check.get("platformio_upgrade", 0): + check_state = app.get_state_item("last_check", {}) + last_checked_time = check_state.get("platformio_upgrade", 0) + if (time() - interval) < last_checked_time: return - last_check["platformio_upgrade"] = int(time()) - app.set_state_item("last_check", last_check) + check_state["platformio_upgrade"] = int(time()) + app.set_state_item("last_check", check_state) + if not last_checked_time: + return http.ensure_internet_on(raise_exception=True) - # Update PlatformIO's Core packages - update_core_packages(silent=True) + # Update PlatformIO Core packages + update_core_packages() latest_version = get_latest_version() if pepver_to_semver(latest_version) <= pepver_to_semver(__version__): @@ -239,10 +232,7 @@ def check_platformio_upgrade(): fg="yellow", nl=False, ) - if os.getenv("PLATFORMIO_IDE"): - click.secho("PlatformIO IDE Menu: Upgrade PlatformIO", fg="cyan", nl=False) - click.secho("`.", fg="yellow") - elif os.path.join("Cellar", "platformio") in fs.get_source_dir(): + if os.path.join("Cellar", "platformio") in fs.get_source_dir(): click.secho("brew update && brew upgrade", fg="cyan", nl=False) click.secho("` command.", fg="yellow") else: @@ -256,86 +246,19 @@ def check_platformio_upgrade(): click.echo("") -def check_internal_updates(ctx, what): # pylint: disable=too-many-branches - last_check = app.get_state_item("last_check", {}) - interval = int(app.get_setting("check_%s_interval" % what)) * 3600 * 24 - if (time() - interval) < last_check.get(what + "_update", 0): - return - - last_check[what + "_update"] = int(time()) - app.set_state_item("last_check", last_check) - - http.ensure_internet_on(raise_exception=True) - - outdated_items = [] - pm = PlatformPackageManager() if what == "platforms" else LibraryPackageManager() - for pkg in pm.get_installed(): - if pkg.metadata.name in outdated_items: - continue - conds = [ - pm.outdated(pkg).is_outdated(), - what == "platforms" and PlatformFactory.new(pkg).are_outdated_packages(), - ] - if any(conds): - outdated_items.append(pkg.metadata.name) - - if not outdated_items: - return - - terminal_width, _ = shutil.get_terminal_size() - - click.echo("") - click.echo("*" * terminal_width) - click.secho( - "There are the new updates for %s (%s)" % (what, ", ".join(outdated_items)), - fg="yellow", - ) - - if not app.get_setting("auto_update_" + what): - click.secho("Please update them via ", fg="yellow", nl=False) - click.secho( - "`platformio %s update`" - % ("lib --global" if what == "libraries" else "platform"), - fg="cyan", - nl=False, - ) - click.secho(" command.\n", fg="yellow") - click.secho( - "If you want to manually check for the new versions " - "without updating, please use ", - fg="yellow", - nl=False, - ) - click.secho( - "`platformio %s update --dry-run`" - % ("lib --global" if what == "libraries" else "platform"), - fg="cyan", - nl=False, - ) - click.secho(" command.", fg="yellow") - else: - click.secho("Please wait while updating %s ..." % what, fg="yellow") - if what == "platforms": - ctx.invoke(cmd_platform_update, platforms=outdated_items) - elif what == "libraries": - ctx.meta[CTX_META_STORAGE_DIRS_KEY] = [pm.package_dir] - ctx.invoke(cmd_lib_update, libraries=outdated_items) - click.echo() - - telemetry.send_event(category="Auto", action="Update", label=what.title()) - - click.echo("*" * terminal_width) - click.echo("") - - def check_prune_system(): - last_check = app.get_state_item("last_check", {}) interval = 30 * 3600 * 24 # 1 time per month - if (time() - interval) < last_check.get("prune_system", 0): + check_state = app.get_state_item("last_check", {}) + last_checked_time = check_state.get("prune_system", 0) + if (time() - interval) < last_checked_time: + return + + check_state["prune_system"] = int(time()) + app.set_state_item("last_check", check_state) + + if not last_checked_time: return - last_check["prune_system"] = int(time()) - app.set_state_item("last_check", last_check) threshold_mb = int(app.get_setting("check_prune_system_threshold") or 0) if threshold_mb <= 0: return diff --git a/platformio/package/commands/__init__.py b/platformio/package/commands/__init__.py new file mode 100644 index 00000000..b0514903 --- /dev/null +++ b/platformio/package/commands/__init__.py @@ -0,0 +1,13 @@ +# Copyright (c) 2014-present PlatformIO +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. diff --git a/platformio/package/commands/exec.py b/platformio/package/commands/exec.py new file mode 100644 index 00000000..b5484f73 --- /dev/null +++ b/platformio/package/commands/exec.py @@ -0,0 +1,105 @@ +# Copyright (c) 2014-present PlatformIO +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import os +import subprocess + +import click + +from platformio.compat import IS_MACOS, IS_WINDOWS +from platformio.exception import ReturnErrorCode, UserSideException +from platformio.package.manager.tool import ToolPackageManager +from platformio.proc import get_pythonexe_path + + +@click.command("exec", short_help="Run command from package tool") +@click.option("-p", "--package", metavar="SPECIFICATION") +@click.option("-c", "--call", metavar=" [args...]") +@click.argument("args", nargs=-1, type=click.UNPROCESSED) +@click.pass_obj +def package_exec_cmd(obj, package, call, args): + if not call and not args: + raise click.BadArgumentUsage("Please provide command name") + pkg = None + if package: + pm = ToolPackageManager() + pkg = pm.get_package(package) + if not pkg: + pkg = pm.install(package) + else: + executable = args[0] if args else call.split(" ")[0] + pkg = find_pkg_by_executable(executable) + if not pkg: + raise UserSideException( + "Could not find a package with '%s' executable file" % executable + ) + + click.echo( + "Using %s package" + % click.style("%s@%s" % (pkg.metadata.name, pkg.metadata.version), fg="cyan") + ) + + inject_pkg_to_environ(pkg) + os.environ["PIO_PYTHON_EXE"] = get_pythonexe_path() + # inject current python interpreter on Windows + if IS_WINDOWS and args and args[0].endswith(".py"): + args = [os.environ["PIO_PYTHON_EXE"]] + list(args) + result = None + try: + run_options = dict(shell=call is not None, env=os.environ) + force_click_stream = (obj or {}).get("force_click_stream") + if force_click_stream: + run_options.update(stdout=subprocess.PIPE, stderr=subprocess.STDOUT) + result = subprocess.run( # pylint: disable=subprocess-run-check + call or args, **run_options + ) + if force_click_stream: + click.echo(result.stdout.decode().strip(), err=result.returncode != 0) + except Exception as exc: + raise UserSideException(exc) + + if result and result.returncode != 0: + raise ReturnErrorCode(result.returncode) + + +def find_pkg_by_executable(executable): + exes = [executable] + if IS_WINDOWS and not executable.endswith(".exe"): + exes.append(f"{executable}.exe") + for pkg in ToolPackageManager().get_installed(): + for exe in exes: + if os.path.exists(os.path.join(pkg.path, exe)) or os.path.exists( + os.path.join(pkg.path, "bin", exe) + ): + return pkg + return None + + +def inject_pkg_to_environ(pkg): + bin_dir = os.path.join(pkg.path, "bin") + lib_dir = os.path.join(pkg.path, "lib") + + paths = [bin_dir, pkg.path] if os.path.isdir(bin_dir) else [pkg.path] + if os.environ.get("PATH"): + paths.append(os.environ.get("PATH")) + os.environ["PATH"] = os.pathsep.join(paths) + + if IS_WINDOWS or not os.path.isdir(lib_dir) or "toolchain" in pkg.metadata.name: + return + + lib_path_key = "DYLD_LIBRARY_PATH" if IS_MACOS else "LD_LIBRARY_PATH" + lib_paths = [lib_dir] + if os.environ.get(lib_path_key): + lib_paths.append(os.environ.get(lib_path_key)) + os.environ[lib_path_key] = os.pathsep.join(lib_paths) diff --git a/platformio/package/commands/install.py b/platformio/package/commands/install.py new file mode 100644 index 00000000..472d0ae8 --- /dev/null +++ b/platformio/package/commands/install.py @@ -0,0 +1,316 @@ +# Copyright (c) 2014-present PlatformIO +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import logging +import os +from pathlib import Path + +import click + +from platformio import fs +from platformio.package.exception import UnknownPackageError +from platformio.package.manager.library import LibraryPackageManager +from platformio.package.manager.platform import PlatformPackageManager +from platformio.package.manager.tool import ToolPackageManager +from platformio.package.meta import PackageSpec +from platformio.project.config import ProjectConfig +from platformio.project.savedeps import pkg_to_save_spec, save_project_dependencies +from platformio.test.result import TestSuite +from platformio.test.runners.factory import TestRunnerFactory + + +@click.command( + "install", short_help="Install the project dependencies or custom packages" +) +@click.option( + "-d", + "--project-dir", + default=os.getcwd, + type=click.Path(exists=True, file_okay=False, dir_okay=True, resolve_path=True), +) +@click.option("-e", "--environment", "environments", multiple=True) +@click.option("-p", "--platform", "platforms", metavar="SPECIFICATION", multiple=True) +@click.option("-t", "--tool", "tools", metavar="SPECIFICATION", multiple=True) +@click.option("-l", "--library", "libraries", metavar="SPECIFICATION", multiple=True) +@click.option( + "--no-save", + is_flag=True, + help="Prevent saving specified packages to `platformio.ini`", +) +@click.option("--skip-dependencies", is_flag=True, help="Skip package dependencies") +@click.option("-g", "--global", is_flag=True, help="Install package globally") +@click.option( + "--storage-dir", + default=None, + type=click.Path(exists=True, file_okay=False, dir_okay=True, resolve_path=True), + help="Custom Package Manager storage for global packages", +) +@click.option("-f", "--force", is_flag=True, help="Reinstall package if it exists") +@click.option("-s", "--silent", is_flag=True, help="Suppress progress reporting") +def package_install_cmd(**options): + if options.get("global"): + install_global_dependencies(options) + else: + install_project_dependencies(options) + + +def install_global_dependencies(options): + pm = PlatformPackageManager(options.get("storage_dir")) + tm = ToolPackageManager(options.get("storage_dir")) + lm = LibraryPackageManager(options.get("storage_dir")) + for obj in (pm, tm, lm): + obj.set_log_level(logging.WARN if options.get("silent") else logging.DEBUG) + for spec in options.get("platforms"): + pm.install( + spec, + skip_dependencies=options.get("skip_dependencies"), + force=options.get("force"), + ) + for spec in options.get("tools"): + tm.install( + spec, + skip_dependencies=options.get("skip_dependencies"), + force=options.get("force"), + ) + for spec in options.get("libraries", []): + lm.install( + spec, + skip_dependencies=options.get("skip_dependencies"), + force=options.get("force"), + ) + + +def install_project_dependencies(options): + environments = options["environments"] + with fs.cd(options["project_dir"]): + config = ProjectConfig.get_instance() + config.validate(environments) + for env in config.envs(): + if environments and env not in environments: + continue + if not options.get("silent"): + click.echo( + "Resolving %s environment packages..." % click.style(env, fg="cyan") + ) + already_up_to_date = not install_project_env_dependencies(env, options) + if not options.get("silent") and already_up_to_date: + click.secho("Already up-to-date.", fg="green") + + +def install_project_env_dependencies(project_env, options=None): + """Used in `pio run` -> Processor""" + options = options or {} + installed_conds = [] + # custom platforms + if options.get("platforms"): + installed_conds.append( + _install_project_env_custom_platforms(project_env, options) + ) + # custom tools + if options.get("tools"): + installed_conds.append(_install_project_env_custom_tools(project_env, options)) + # custom ibraries + if options.get("libraries"): + installed_conds.append( + _install_project_env_custom_libraries(project_env, options) + ) + # declared dependencies + if not installed_conds: + installed_conds = [ + _install_project_env_platform(project_env, options), + _install_project_env_libraries(project_env, options), + ] + return any(installed_conds) + + +def _install_project_env_platform(project_env, options): + config = ProjectConfig.get_instance() + pm = PlatformPackageManager() + if options.get("silent"): + pm.set_log_level(logging.WARN) + spec = config.get(f"env:{project_env}", "platform") + if not spec: + return False + already_up_to_date = not options.get("force") + if not pm.get_package(spec): + already_up_to_date = False + PlatformPackageManager().install( + spec, + project_env=project_env, + project_targets=options.get("project_targets"), + skip_dependencies=options.get("skip_dependencies"), + force=options.get("force"), + ) + return not already_up_to_date + + +def _install_project_env_custom_platforms(project_env, options): + already_up_to_date = not options.get("force") + pm = PlatformPackageManager() + if not options.get("silent"): + pm.set_log_level(logging.DEBUG) + for spec in options.get("platforms"): + if not pm.get_package(spec): + already_up_to_date = False + pm.install( + spec, + project_env=project_env, + project_targets=options.get("project_targets"), + skip_dependencies=options.get("skip_dependencies"), + force=options.get("force"), + ) + return not already_up_to_date + + +def _install_project_env_custom_tools(project_env, options): + already_up_to_date = not options.get("force") + tm = ToolPackageManager() + if not options.get("silent"): + tm.set_log_level(logging.DEBUG) + specs_to_save = [] + for tool in options.get("tools"): + spec = PackageSpec(tool) + if not tm.get_package(spec): + already_up_to_date = False + pkg = tm.install( + spec, + skip_dependencies=options.get("skip_dependencies"), + force=options.get("force"), + ) + specs_to_save.append(pkg_to_save_spec(pkg, spec)) + if not options.get("no_save") and specs_to_save: + save_project_dependencies( + os.getcwd(), + specs_to_save, + scope="platform_packages", + action="add", + environments=[project_env], + ) + return not already_up_to_date + + +def _install_project_env_libraries(project_env, options): + _uninstall_project_unused_libdeps(project_env, options) + already_up_to_date = not options.get("force") + config = ProjectConfig.get_instance() + env_lm = LibraryPackageManager( + os.path.join(config.get("platformio", "libdeps_dir"), project_env) + ) + private_lm = LibraryPackageManager( + os.path.join(config.get("platformio", "lib_dir")) + ) + if options.get("silent"): + env_lm.set_log_level(logging.WARN) + private_lm.set_log_level(logging.WARN) + + lib_deps = config.get(f"env:{project_env}", "lib_deps") + if "__test" in options.get("project_targets", []): + test_runner = TestRunnerFactory.new( + TestSuite(project_env, options.get("piotest_running_name", "*")), config + ) + lib_deps.extend(test_runner.EXTRA_LIB_DEPS or []) + + for library in lib_deps: + spec = PackageSpec(library) + # skip built-in dependencies + if not spec.external and not spec.owner: + continue + if not env_lm.get_package(spec): + already_up_to_date = False + env_lm.install( + spec, + skip_dependencies=options.get("skip_dependencies"), + force=options.get("force"), + ) + + # install dependencies from the private libraries + for pkg in private_lm.get_installed(): + _install_project_private_library_deps(pkg, private_lm, env_lm, options) + + return not already_up_to_date + + +def _uninstall_project_unused_libdeps(project_env, options): + config = ProjectConfig.get_instance() + lib_deps = set(config.get(f"env:{project_env}", "lib_deps")) + if not lib_deps: + return + storage_dir = Path(config.get("platformio", "libdeps_dir"), project_env) + integrity_dat = storage_dir / "integrity.dat" + if integrity_dat.is_file(): + prev_lib_deps = set( + integrity_dat.read_text(encoding="utf-8").strip().split("\n") + ) + if lib_deps == prev_lib_deps: + return + lm = LibraryPackageManager(str(storage_dir)) + if options.get("silent"): + lm.set_log_level(logging.WARN) + else: + click.secho("Removing unused dependencies...") + for spec in set(prev_lib_deps) - set(lib_deps): + try: + lm.uninstall(spec) + except UnknownPackageError: + pass + storage_dir.mkdir(parents=True, exist_ok=True) + integrity_dat.write_text("\n".join(lib_deps), encoding="utf-8") + + +def _install_project_private_library_deps(private_pkg, private_lm, env_lm, options): + for dependency in private_lm.get_pkg_dependencies(private_pkg) or []: + spec = private_lm.dependency_to_spec(dependency) + # skip built-in dependencies + if not spec.external and not spec.owner: + continue + pkg = private_lm.get_package(spec) + if not pkg and not env_lm.get_package(spec): + pkg = env_lm.install( + spec, + skip_dependencies=True, + force=options.get("force"), + ) + if not pkg: + continue + _install_project_private_library_deps(pkg, private_lm, env_lm, options) + + +def _install_project_env_custom_libraries(project_env, options): + already_up_to_date = not options.get("force") + config = ProjectConfig.get_instance() + lm = LibraryPackageManager( + os.path.join(config.get("platformio", "libdeps_dir"), project_env) + ) + if not options.get("silent"): + lm.set_log_level(logging.DEBUG) + specs_to_save = [] + for library in options.get("libraries") or []: + spec = PackageSpec(library) + if not lm.get_package(spec): + already_up_to_date = False + pkg = lm.install( + spec, + skip_dependencies=options.get("skip_dependencies"), + force=options.get("force"), + ) + specs_to_save.append(pkg_to_save_spec(pkg, spec)) + if not options.get("no_save") and specs_to_save: + save_project_dependencies( + os.getcwd(), + specs_to_save, + scope="lib_deps", + action="add", + environments=[project_env], + ) + return not already_up_to_date diff --git a/platformio/package/commands/list.py b/platformio/package/commands/list.py new file mode 100644 index 00000000..578ecd20 --- /dev/null +++ b/platformio/package/commands/list.py @@ -0,0 +1,221 @@ +# Copyright (c) 2014-present PlatformIO +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import os +from typing import List + +import click + +from platformio import fs +from platformio.package.manager.library import LibraryPackageManager +from platformio.package.manager.platform import PlatformPackageManager +from platformio.package.manager.tool import ToolPackageManager +from platformio.package.meta import PackageItem, PackageSpec +from platformio.platform.factory import PlatformFactory +from platformio.project.config import ProjectConfig + + +@click.command("list", short_help="List installed packages") +@click.option( + "-d", + "--project-dir", + default=os.getcwd, + type=click.Path(exists=True, file_okay=False, dir_okay=True, resolve_path=True), +) +@click.option("-e", "--environment", "environments", multiple=True) +@click.option("-p", "--platform", "platforms", metavar="SPECIFICATION", multiple=True) +@click.option("-t", "--tool", "tools", metavar="SPECIFICATION", multiple=True) +@click.option("-l", "--library", "libraries", metavar="SPECIFICATION", multiple=True) +@click.option("-g", "--global", is_flag=True, help="List globally installed packages") +@click.option( + "--storage-dir", + default=None, + type=click.Path(exists=True, file_okay=False, dir_okay=True, resolve_path=True), + help="Custom Package Manager storage for global packages", +) +@click.option("--only-platforms", is_flag=True, help="List only platform packages") +@click.option("--only-tools", is_flag=True, help="List only tool packages") +@click.option("--only-libraries", is_flag=True, help="List only library packages") +@click.option("-v", "--verbose", is_flag=True) +def package_list_cmd(**options): + if options.get("global"): + list_global_packages(options) + else: + list_project_packages(options) + + +def humanize_package(pkg, spec=None, verbose=False): + if spec and not isinstance(spec, PackageSpec): + spec = PackageSpec(spec) + data = [ + click.style("{name} @ {version}".format(**pkg.metadata.as_dict()), fg="cyan") + ] + extra_data = ["required: %s" % (spec.humanize() if spec else "Any")] + if verbose: + extra_data.append(pkg.path) + data.append("(%s)" % ", ".join(extra_data)) + return " ".join(data) + + +def print_dependency_tree(pm, specs=None, filter_specs=None, level=0, verbose=False): + filtered_pkgs = [ + pm.get_package(spec) for spec in filter_specs or [] if pm.get_package(spec) + ] + candidates = {} + if specs: + for spec in specs: + pkg = pm.get_package(spec) + if not pkg: + continue + candidates[pkg.path] = (pkg, spec) + else: + candidates = {pkg.path: (pkg, pkg.metadata.spec) for pkg in pm.get_installed()} + if not candidates: + return + candidates = sorted(candidates.values(), key=lambda item: item[0].metadata.name) + for index, (pkg, spec) in enumerate(candidates): + if filtered_pkgs and not _pkg_tree_contains(pm, pkg, filtered_pkgs): + continue + dependencies = pm.get_pkg_dependencies(pkg) + click.echo( + "%s%s %s" + % ( + "│ " * level, + "├──" if index < len(candidates) - 1 else "└──", + humanize_package( + pkg, + spec=spec, + verbose=verbose, + ), + ) + ) + if dependencies: + print_dependency_tree( + pm, + specs=[pm.dependency_to_spec(item) for item in dependencies], + filter_specs=filter_specs, + level=level + 1, + verbose=verbose, + ) + + +def _pkg_tree_contains(pm, root: PackageItem, children: List[PackageItem]): + if root in children: + return True + for dependency in pm.get_pkg_dependencies(root) or []: + pkg = pm.get_package(pm.dependency_to_spec(dependency)) + if pkg and _pkg_tree_contains(pm, pkg, children): + return True + return False + + +def list_global_packages(options): + data = [ + ("platforms", PlatformPackageManager(options.get("storage_dir"))), + ("tools", ToolPackageManager(options.get("storage_dir"))), + ("libraries", LibraryPackageManager(options.get("storage_dir"))), + ] + only_packages = any( + options.get(type_) or options.get(f"only_{type_}") for (type_, _) in data + ) + for (type_, pm) in data: + skip_conds = [ + only_packages + and not options.get(type_) + and not options.get(f"only_{type_}"), + not pm.get_installed(), + ] + if any(skip_conds): + continue + click.secho(type_.capitalize(), bold=True) + print_dependency_tree( + pm, filter_specs=options.get(type_), verbose=options.get("verbose") + ) + click.echo() + + +def list_project_packages(options): + environments = options["environments"] + only_packages = any( + options.get(type_) or options.get(f"only_{type_}") + for type_ in ("platforms", "tools", "libraries") + ) + only_platform_packages = any( + options.get(type_) or options.get(f"only_{type_}") + for type_ in ("platforms", "tools") + ) + only_library_packages = options.get("libraries") or options.get("only_libraries") + + with fs.cd(options["project_dir"]): + config = ProjectConfig.get_instance() + config.validate(environments) + for env in config.envs(): + if environments and env not in environments: + continue + click.echo( + "Resolving %s environment packages..." % click.style(env, fg="cyan") + ) + found = False + if not only_packages or only_platform_packages: + _found = print_project_env_platform_packages(env, options) + found = found or _found + if not only_packages or only_library_packages: + _found = print_project_env_library_packages(env, options) + found = found or _found + if not found: + click.echo("No packages") + if (not environments and len(config.envs()) > 1) or len(environments) > 1: + click.echo() + + +def print_project_env_platform_packages(project_env, options): + config = ProjectConfig.get_instance() + platform = config.get(f"env:{project_env}", "platform") + if not platform: + return None + pkg = PlatformPackageManager().get_package(platform) + if not pkg: + return None + click.echo( + "Platform %s" + % (humanize_package(pkg, platform, verbose=options.get("verbose"))) + ) + p = PlatformFactory.new(pkg) + if project_env: + p.configure_project_packages(project_env) + print_dependency_tree( + p.pm, + specs=[p.get_package_spec(name) for name in p.packages], + filter_specs=options.get("tools"), + ) + click.echo() + return True + + +def print_project_env_library_packages(project_env, options): + config = ProjectConfig.get_instance() + lib_deps = config.get(f"env:{project_env}", "lib_deps") + lm = LibraryPackageManager( + os.path.join(config.get("platformio", "libdeps_dir"), project_env) + ) + if not lib_deps or not lm.get_installed(): + return None + click.echo("Libraries") + print_dependency_tree( + lm, + lib_deps, + filter_specs=options.get("libraries"), + verbose=options.get("verbose"), + ) + return True diff --git a/platformio/package/commands/outdated.py b/platformio/package/commands/outdated.py new file mode 100644 index 00000000..5a8c1ea4 --- /dev/null +++ b/platformio/package/commands/outdated.py @@ -0,0 +1,220 @@ +# Copyright (c) 2014-present PlatformIO +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import os + +import click +from tabulate import tabulate + +from platformio import fs +from platformio.package.manager.library import LibraryPackageManager +from platformio.package.manager.platform import PlatformPackageManager +from platformio.package.meta import PackageSpec +from platformio.platform.factory import PlatformFactory +from platformio.project.config import ProjectConfig + + +class OutdatedCandidate: + def __init__(self, pm, pkg, spec, envs=None): + self.pm = pm + self.pkg = pkg + self.spec = spec + self.envs = envs or [] + self.outdated = None + if not isinstance(self.envs, list): + self.envs = [self.envs] + + def __eq__(self, other): + return all( + [ + self.pm.package_dir == other.pm.package_dir, + self.pkg == other.pkg, + self.spec == other.spec, + ] + ) + + def check(self): + self.outdated = self.pm.outdated(self.pkg, self.spec) + + def is_outdated(self): + if not self.outdated: + self.check() + return self.outdated.is_outdated(allow_incompatible=self.pm.pkg_type != "tool") + + +@click.command("outdated", short_help="Check for outdated packages") +@click.option( + "-d", + "--project-dir", + default=os.getcwd, + type=click.Path(exists=True, file_okay=False, dir_okay=True, resolve_path=True), +) +@click.option("-e", "--environment", "environments", multiple=True) +def package_outdated_cmd(project_dir, environments): + candidates = fetch_outdated_candidates( + project_dir, environments, with_progress=True + ) + print_outdated_candidates(candidates) + + +def print_outdated_candidates(candidates): + if not candidates: + click.secho("Everything is up-to-date!", fg="green") + return + tabulate_data = [ + ( + click.style( + candidate.pkg.metadata.name, + fg=get_candidate_update_color(candidate.outdated), + ), + candidate.outdated.current, + candidate.outdated.wanted, + click.style(candidate.outdated.latest, fg="cyan"), + candidate.pm.pkg_type.capitalize(), + ", ".join(set(candidate.envs)), + ) + for candidate in candidates + ] + click.echo() + click.secho("Semantic Versioning color legend:", bold=True) + click.echo( + tabulate( + [ + ( + click.style("", fg="red"), + "backward-incompatible updates", + ), + ( + click.style("", fg="yellow"), + "backward-compatible features", + ), + ( + click.style("", fg="green"), + "backward-compatible bug fixes", + ), + ], + tablefmt="plain", + ) + ) + click.echo() + click.echo( + tabulate( + tabulate_data, + headers=["Package", "Current", "Wanted", "Latest", "Type", "Environments"], + ) + ) + + +def get_candidate_update_color(outdated): + if outdated.update_increment_type == outdated.UPDATE_INCREMENT_MAJOR: + return "red" + if outdated.update_increment_type == outdated.UPDATE_INCREMENT_MINOR: + return "yellow" + if outdated.update_increment_type == outdated.UPDATE_INCREMENT_PATCH: + return "green" + return None + + +def fetch_outdated_candidates(project_dir, environments, with_progress=False): + candidates = [] + + def _add_candidate(data): + new_candidate = OutdatedCandidate( + data["pm"], data["pkg"], data["spec"], data["env"] + ) + for candidate in candidates: + if candidate == new_candidate: + candidate.envs.append(data["env"]) + return + candidates.append(new_candidate) + + with fs.cd(project_dir): + config = ProjectConfig.get_instance() + config.validate(environments) + + # platforms + for item in find_platform_candidates(config, environments): + _add_candidate(item) + # platform package dependencies + for dep_item in find_platform_dependency_candidates(item): + _add_candidate(dep_item) + + # libraries + for item in find_library_candidates(config, environments): + _add_candidate(item) + + result = [] + if not with_progress: + for candidate in candidates: + if candidate.is_outdated(): + result.append(candidate) + return result + + with click.progressbar(candidates, label="Checking") as pb: + for candidate in pb: + if candidate.is_outdated(): + result.append(candidate) + return result + + +def find_platform_candidates(config, environments): + result = [] + pm = PlatformPackageManager() + for env in config.envs(): + platform = config.get(f"env:{env}", "platform") + if not platform or (environments and env not in environments): + continue + spec = PackageSpec(platform) + pkg = pm.get_package(spec) + if not pkg: + continue + result.append(dict(env=env, pm=pm, pkg=pkg, spec=spec)) + return result + + +def find_platform_dependency_candidates(platform_candidate): + result = [] + p = PlatformFactory.new(platform_candidate["spec"]) + p.configure_project_packages(platform_candidate["env"]) + for pkg in p.get_installed_packages(): + result.append( + dict( + env=platform_candidate["env"], + pm=p.pm, + pkg=pkg, + spec=p.get_package_spec(pkg.metadata.name), + ) + ) + return sorted(result, key=lambda item: item["pkg"].metadata.name) + + +def find_library_candidates(config, environments): + result = [] + for env in config.envs(): + if environments and env not in environments: + continue + package_dir = os.path.join(config.get("platformio", "libdeps_dir") or "", env) + lib_deps = [ + item for item in config.get(f"env:{env}", "lib_deps", []) if "/" in item + ] + if not os.path.isdir(package_dir) or not lib_deps: + continue + pm = LibraryPackageManager(package_dir) + for lib in lib_deps: + spec = PackageSpec(lib) + pkg = pm.get_package(spec) + if not pkg: + continue + result.append(dict(env=env, pm=pm, pkg=pkg, spec=spec)) + return sorted(result, key=lambda item: item["pkg"].metadata.name) diff --git a/platformio/package/commands/pack.py b/platformio/package/commands/pack.py new file mode 100644 index 00000000..c80995e4 --- /dev/null +++ b/platformio/package/commands/pack.py @@ -0,0 +1,45 @@ +# Copyright (c) 2014-present PlatformIO +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import os + +import click + +from platformio.package.manifest.parser import ManifestParserFactory +from platformio.package.manifest.schema import ManifestSchema, ManifestValidationError +from platformio.package.pack import PackagePacker + + +@click.command("pack", short_help="Create a tarball from a package") +@click.argument( + "package", + default=os.getcwd, + metavar="", + type=click.Path(exists=True, file_okay=True, dir_okay=True, resolve_path=True), +) +@click.option( + "-o", "--output", help="A destination path (folder or a full path to file)" +) +def package_pack_cmd(package, output): + p = PackagePacker(package) + archive_path = p.pack(output) + # validate manifest + try: + ManifestSchema().load_manifest( + ManifestParserFactory.new_from_archive(archive_path).as_dict() + ) + except ManifestValidationError as e: + os.remove(archive_path) + raise e + click.secho('Wrote a tarball to "%s"' % archive_path, fg="green") diff --git a/platformio/commands/package.py b/platformio/package/commands/publish.py similarity index 69% rename from platformio/commands/package.py rename to platformio/package/commands/publish.py index 3e3bdaa6..150fdb19 100644 --- a/platformio/commands/package.py +++ b/platformio/package/commands/publish.py @@ -13,6 +13,7 @@ # limitations under the License. import os +import tarfile import tempfile from datetime import datetime @@ -24,8 +25,8 @@ from platformio.clients.account import AccountClient from platformio.clients.registry import RegistryClient from platformio.exception import UserSideException from platformio.package.manifest.parser import ManifestParserFactory -from platformio.package.manifest.schema import ManifestSchema, ManifestValidationError -from platformio.package.meta import PackageSpec, PackageType +from platformio.package.manifest.schema import ManifestSchema +from platformio.package.meta import PackageType from platformio.package.pack import PackagePacker from platformio.package.unpack import FileUnpacker, TARArchiver @@ -40,10 +41,119 @@ def validate_datetime(ctx, param, value): # pylint: disable=unused-argument return value -def load_manifest_from_archive(path): - return ManifestSchema().load_manifest( - ManifestParserFactory.new_from_archive(path).as_dict() +@click.command("publish", short_help="Publish a package to the registry") +@click.argument( + "package", + default=os.getcwd, + metavar="", + type=click.Path(exists=True, file_okay=True, dir_okay=True, resolve_path=True), +) +@click.option( + "--owner", + help="PIO Account username (can be organization username). " + "Default is set to a username of the authorized PIO Account", +) +@click.option( + "--type", + "type_", + type=click.Choice(list(PackageType.items().values())), + help="Custom package type", +) +@click.option( + "--released-at", + callback=validate_datetime, + help="Custom release date and time in the next format (UTC): 2014-06-13 17:08:52", +) +@click.option("--private", is_flag=True, help="Restricted access (not a public)") +@click.option( + "--notify/--no-notify", + default=True, + help="Notify by email when package is processed", +) +@click.option( + "--non-interactive", + is_flag=True, + help="Do not show interactive prompt", +) +def package_publish_cmd( # pylint: disable=too-many-arguments, too-many-locals + package, owner, type_, released_at, private, notify, non_interactive +): + click.secho("Preparing a package...", fg="cyan") + owner = owner or AccountClient().get_logged_username() + do_not_pack = ( + not os.path.isdir(package) + and isinstance(FileUnpacker.new_archiver(package), TARArchiver) + and PackageType.from_archive(package) ) + archive_path = None + with tempfile.TemporaryDirectory() as tmp_dir: # pylint: disable=no-member + # publish .tar.gz instantly without repacking + if do_not_pack: + archive_path = package + else: + with fs.cd(tmp_dir): + p = PackagePacker(package) + archive_path = p.pack() + + type_ = type_ or PackageType.from_archive(archive_path) + manifest = ManifestSchema().load_manifest( + ManifestParserFactory.new_from_archive(archive_path).as_dict() + ) + name = manifest.get("name") + version = manifest.get("version") + data = [ + ("Type:", type_), + ("Owner:", owner), + ("Name:", name), + ("Version:", version), + ("Size:", fs.humanize_file_size(os.path.getsize(archive_path))), + ] + if manifest.get("system"): + data.insert(len(data) - 1, ("System:", ", ".join(manifest.get("system")))) + click.echo(tabulate(data, tablefmt="plain")) + + # check files containing non-ascii chars + check_archive_file_names(archive_path) + + # look for duplicates + check_package_duplicates(owner, type_, name, version, manifest.get("system")) + + if not non_interactive: + click.confirm( + "Are you sure you want to publish the %s %s to the registry?\n" + % ( + type_, + click.style( + "%s/%s@%s" % (owner, name, version), + fg="cyan", + ), + ), + abort=True, + ) + + click.secho( + "The package publishing may take some time depending " + "on your Internet connection and the package size.", + fg="yellow", + ) + click.echo("Publishing...") + response = RegistryClient().publish_package( + owner, type_, archive_path, released_at, private, notify + ) + if not do_not_pack: + os.remove(archive_path) + click.secho(response.get("message"), fg="green") + + +def check_archive_file_names(archive_path): + with tarfile.open(archive_path, mode="r:gz") as tf: + for name in tf.getnames(): + if not name.isascii(): + click.secho( + f"Warning! The `{name}` file contains non-ASCII chars and can " + "lead to the unpacking issues on a user machine", + fg="yellow", + ) def check_package_duplicates( @@ -52,7 +162,7 @@ def check_package_duplicates( found = False items = ( RegistryClient() - .list_packages(filters=dict(types=[type], names=[name])) + .list_packages(qualifiers=dict(types=[type], names=[name])) .get("items") ) if not items: @@ -86,147 +196,3 @@ def check_package_duplicates( fg="yellow", ) return True - - -@click.group("package", short_help="Package manager") -def cli(): - pass - - -@cli.command("pack", short_help="Create a tarball from a package") -@click.argument( - "package", - required=True, - default=os.getcwd, - metavar="", -) -@click.option( - "-o", "--output", help="A destination path (folder or a full path to file)" -) -def package_pack(package, output): - p = PackagePacker(package) - archive_path = p.pack(output) - # validate manifest - try: - load_manifest_from_archive(archive_path) - except ManifestValidationError as e: - os.remove(archive_path) - raise e - click.secho('Wrote a tarball to "%s"' % archive_path, fg="green") - - -@cli.command("publish", short_help="Publish a package to the registry") -@click.argument( - "package", - required=True, - default=os.getcwd, - metavar="", -) -@click.option( - "--owner", - help="PIO Account username (can be organization username). " - "Default is set to a username of the authorized PIO Account", -) -@click.option( - "--released-at", - callback=validate_datetime, - help="Custom release date and time in the next format (UTC): 2014-06-13 17:08:52", -) -@click.option("--private", is_flag=True, help="Restricted access (not a public)") -@click.option( - "--notify/--no-notify", - default=True, - help="Notify by email when package is processed", -) -@click.option( - "--non-interactive", - is_flag=True, - help="Do not show interactive prompt", -) -def package_publish( # pylint: disable=too-many-arguments, too-many-locals - package, owner, released_at, private, notify, non_interactive -): - click.secho("Preparing a package...", fg="cyan") - owner = owner or AccountClient().get_logged_username() - do_not_pack = not os.path.isdir(package) and isinstance( - FileUnpacker.new_archiver(package), TARArchiver - ) - archive_path = None - with tempfile.TemporaryDirectory() as tmp_dir: # pylint: disable=no-member - # publish .tar.gz instantly without repacking - if do_not_pack: - archive_path = package - else: - with fs.cd(tmp_dir): - p = PackagePacker(package) - archive_path = p.pack() - - type_ = PackageType.from_archive(archive_path) - manifest = load_manifest_from_archive(archive_path) - name = manifest.get("name") - version = manifest.get("version") - data = [ - ("Type:", type_), - ("Owner:", owner), - ("Name:", name), - ("Version:", version), - ] - if manifest.get("system"): - data.insert(len(data) - 1, ("System:", ", ".join(manifest.get("system")))) - click.echo(tabulate(data, tablefmt="plain")) - - # look for duplicates - check_package_duplicates(owner, type_, name, version, manifest.get("system")) - - if not non_interactive: - click.confirm( - "Are you sure you want to publish the %s %s to the registry?\n" - % ( - type_, - click.style( - "%s/%s@%s" % (owner, name, version), - fg="cyan", - ), - ), - abort=True, - ) - - click.secho( - "The package publishing may take some time depending " - "on your Internet connection and the package size.", - fg="yellow", - ) - click.echo("Publishing...") - response = RegistryClient().publish_package( - owner, type_, archive_path, released_at, private, notify - ) - if not do_not_pack: - os.remove(archive_path) - click.secho(response.get("message"), fg="green") - - -@cli.command("unpublish", short_help="Remove a pushed package from the registry") -@click.argument( - "package", required=True, metavar="[/][@]" -) -@click.option( - "--type", - type=click.Choice(list(PackageType.items().values())), - default="library", - help="Package type, default is set to `library`", -) -@click.option( - "--undo", - is_flag=True, - help="Undo a remove, putting a version back into the registry", -) -def package_unpublish(package, type, undo): # pylint: disable=redefined-builtin - spec = PackageSpec(package) - response = RegistryClient().unpublish_package( - owner=spec.owner or AccountClient().get_logged_username(), - type=type, - name=spec.name, - version=str(spec.requirements), - undo=undo, - ) - click.secho(response.get("message"), fg="green") diff --git a/platformio/package/commands/search.py b/platformio/package/commands/search.py new file mode 100644 index 00000000..57ec76ec --- /dev/null +++ b/platformio/package/commands/search.py @@ -0,0 +1,77 @@ +# Copyright (c) 2014-present PlatformIO +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import math + +import click + +from platformio import util +from platformio.clients.registry import RegistryClient + + +@click.command("search", short_help="Search for packages") +@click.argument("query") +@click.option("-p", "--page", type=click.IntRange(min=1)) +@click.option( + "-s", + "--sort", + type=click.Choice(["relevance", "popularity", "trending", "added", "updated"]), +) +def package_search_cmd(query, page, sort): + client = RegistryClient() + result = client.list_packages(query, page=page, sort=sort) + if not result["total"]: + click.secho("Nothing has been found by your request", fg="yellow") + click.echo( + "Try a less-specific search or use truncation (or wildcard) operator *" + ) + return + print_search_result(result) + + +def print_search_result(result): + click.echo( + "Found %d packages (page %d of %d)" + % ( + result["total"], + result["page"], + math.ceil(result["total"] / result["limit"]), + ) + ) + for item in result["items"]: + click.echo() + print_search_item(item) + + +def print_search_item(item): + click.echo( + "%s/%s" + % ( + click.style(item["owner"]["username"], fg="cyan"), + click.style(item["name"], fg="cyan", bold=True), + ) + ) + click.echo( + "%s • %s • Published on %s" + % ( + item["type"].capitalize() + if item["tier"] == "community" + else click.style( + ("%s %s" % (item["tier"], item["type"])).title(), bold=True + ), + item["version"]["name"], + util.parse_datetime(item["version"]["released_at"]).strftime("%c"), + ) + ) + click.echo(item["description"]) diff --git a/platformio/package/commands/show.py b/platformio/package/commands/show.py new file mode 100644 index 00000000..ce50bc88 --- /dev/null +++ b/platformio/package/commands/show.py @@ -0,0 +1,148 @@ +# Copyright (c) 2014-present PlatformIO +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from urllib.parse import quote + +import click +from tabulate import tabulate + +from platformio import fs, util +from platformio.clients.registry import RegistryClient +from platformio.exception import UserSideException +from platformio.package.manager._registry import PackageManagerRegistryMixin +from platformio.package.meta import PackageSpec, PackageType + + +@click.command("show", short_help="Show package information") +@click.argument("spec", metavar="[/][@]") +@click.option( + "-t", + "--type", + "pkg_type", + type=click.Choice(list(PackageType.items().values())), + help="Package type", +) +def package_show_cmd(spec, pkg_type): + spec = PackageSpec(spec) + data = fetch_package_data(spec, pkg_type) + if not data: + raise UserSideException( + "Could not find '%s' package in the PlatormIO Registry" % spec.humanize() + ) + + click.echo() + click.echo( + "%s/%s" + % ( + click.style(data["owner"]["username"], fg="cyan"), + click.style(data["name"], fg="cyan", bold=True), + ) + ) + click.echo( + "%s • %s • %s • Published on %s" + % ( + data["type"].capitalize(), + data["version"]["name"], + "Private" if data.get("private") else "Public", + util.parse_datetime(data["version"]["released_at"]).strftime("%c"), + ) + ) + + click.echo() + type_plural = "libraries" if data["type"] == "library" else (data["type"] + "s") + click.secho( + "https://registry.platformio.org/%s/%s/%s" + % (type_plural, data["owner"]["username"], quote(data["name"])), + fg="blue", + ) + + # Description + click.echo() + click.echo(data["description"]) + + # Extra info + click.echo() + fields = [ + ("homepage", "Homepage"), + ("repository_url", "Repository"), + ("license", "License"), + ("popularity_rank", "Popularity"), + ("stars_count", "Stars"), + ("examples_count", "Examples"), + ("version.unpacked_size", "Installed Size"), + ("dependents_count", "Used By"), + ("dependencies_count", "Dependencies"), + ("platforms", "Compatible Platforms"), + ("frameworks", "Compatible Frameworks"), + ("keywords", "Keywords"), + ] + extra = [] + for key, title in fields: + if "." in key: + k1, k2 = key.split(".") + value = data.get(k1, {}).get(k2) + else: + value = data.get(key) + if not value: + continue + if isinstance(value, list): + value = ", ".join(value) + elif key.endswith("_size"): + value = fs.humanize_file_size(value) + extra.append((title, value)) + click.echo(tabulate(extra)) + + # Versions + click.echo("") + table = tabulate( + [ + ( + version["name"], + fs.humanize_file_size(max(f["size"] for f in version["files"])), + util.parse_datetime(version["released_at"]), + ) + for version in data["versions"] + ], + headers=["Version", "Size", "Published"], + ) + click.echo(table) + click.echo("") + + +def fetch_package_data(spec, pkg_type=None): + assert isinstance(spec, PackageSpec) + client = RegistryClient() + if pkg_type and spec.owner and spec.name: + return client.get_package( + pkg_type, spec.owner, spec.name, version=spec.requirements + ) + qualifiers = dict(names=spec.name.lower()) + if pkg_type: + qualifiers["types"] = pkg_type + if spec.owner: + qualifiers["owners"] = spec.owner.lower() + packages = client.list_packages(qualifiers=qualifiers)["items"] + if not packages: + return None + if len(packages) > 1: + PackageManagerRegistryMixin.print_multi_package_issue( + click.echo, packages, spec + ) + return None + return client.get_package( + packages[0]["type"], + packages[0]["owner"]["username"], + packages[0]["name"], + version=spec.requirements, + ) diff --git a/platformio/package/commands/uninstall.py b/platformio/package/commands/uninstall.py new file mode 100644 index 00000000..5fff8160 --- /dev/null +++ b/platformio/package/commands/uninstall.py @@ -0,0 +1,239 @@ +# Copyright (c) 2014-present PlatformIO +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import logging +import os + +import click + +from platformio import fs +from platformio.package.manager.library import LibraryPackageManager +from platformio.package.manager.platform import PlatformPackageManager +from platformio.package.manager.tool import ToolPackageManager +from platformio.package.meta import PackageSpec +from platformio.project.config import ProjectConfig +from platformio.project.savedeps import pkg_to_save_spec, save_project_dependencies + + +@click.command( + "uninstall", short_help="Uninstall the project dependencies or custom packages" +) +@click.option( + "-d", + "--project-dir", + default=os.getcwd, + type=click.Path(exists=True, file_okay=False, dir_okay=True, resolve_path=True), +) +@click.option("-e", "--environment", "environments", multiple=True) +@click.option("-p", "--platform", "platforms", metavar="SPECIFICATION", multiple=True) +@click.option("-t", "--tool", "tools", metavar="SPECIFICATION", multiple=True) +@click.option("-l", "--library", "libraries", metavar="SPECIFICATION", multiple=True) +@click.option( + "--no-save", + is_flag=True, + help="Prevent removing specified packages from `platformio.ini`", +) +@click.option("--skip-dependencies", is_flag=True, help="Skip package dependencies") +@click.option("-g", "--global", is_flag=True, help="Uninstall global packages") +@click.option( + "--storage-dir", + default=None, + type=click.Path(exists=True, file_okay=False, dir_okay=True, resolve_path=True), + help="Custom Package Manager storage for global packages", +) +@click.option("-s", "--silent", is_flag=True, help="Suppress progress reporting") +def package_uninstall_cmd(**options): + if options.get("global"): + uninstall_global_dependencies(options) + else: + uninstall_project_dependencies(options) + + +def uninstall_global_dependencies(options): + pm = PlatformPackageManager(options.get("storage_dir")) + tm = ToolPackageManager(options.get("storage_dir")) + lm = LibraryPackageManager(options.get("storage_dir")) + for obj in (pm, tm, lm): + obj.set_log_level(logging.WARN if options.get("silent") else logging.DEBUG) + for spec in options.get("platforms"): + pm.uninstall( + spec, + skip_dependencies=options.get("skip_dependencies"), + ) + for spec in options.get("tools"): + tm.uninstall( + spec, + skip_dependencies=options.get("skip_dependencies"), + ) + for spec in options.get("libraries", []): + lm.uninstall( + spec, + skip_dependencies=options.get("skip_dependencies"), + ) + + +def uninstall_project_dependencies(options): + environments = options["environments"] + with fs.cd(options["project_dir"]): + config = ProjectConfig.get_instance() + config.validate(environments) + for env in config.envs(): + if environments and env not in environments: + continue + if not options["silent"]: + click.echo( + "Resolving %s environment packages..." % click.style(env, fg="cyan") + ) + already_up_to_date = not uninstall_project_env_dependencies(env, options) + if not options["silent"] and already_up_to_date: + click.secho("Already up-to-date.", fg="green") + + +def uninstall_project_env_dependencies(project_env, options=None): + options = options or {} + uninstalled_conds = [] + # custom platforms + if options.get("platforms"): + uninstalled_conds.append( + _uninstall_project_env_custom_platforms(project_env, options) + ) + # custom tools + if options.get("tools"): + uninstalled_conds.append( + _uninstall_project_env_custom_tools(project_env, options) + ) + # custom ibraries + if options.get("libraries"): + uninstalled_conds.append( + _uninstall_project_env_custom_libraries(project_env, options) + ) + # declared dependencies + if not uninstalled_conds: + uninstalled_conds = [ + _uninstall_project_env_platform(project_env, options), + _uninstall_project_env_libraries(project_env, options), + ] + return any(uninstalled_conds) + + +def _uninstall_project_env_platform(project_env, options): + config = ProjectConfig.get_instance() + pm = PlatformPackageManager() + if options.get("silent"): + pm.set_log_level(logging.WARN) + spec = config.get(f"env:{project_env}", "platform") + if not spec: + return None + already_up_to_date = True + if not pm.get_package(spec): + return None + PlatformPackageManager().uninstall( + spec, + project_env=project_env, + skip_dependencies=options.get("skip_dependencies"), + ) + return not already_up_to_date + + +def _uninstall_project_env_custom_platforms(project_env, options): + already_up_to_date = True + pm = PlatformPackageManager() + if not options.get("silent"): + pm.set_log_level(logging.DEBUG) + for spec in options.get("platforms"): + if pm.get_package(spec): + already_up_to_date = False + pm.uninstall( + spec, + project_env=project_env, + skip_dependencies=options.get("skip_dependencies"), + ) + return not already_up_to_date + + +def _uninstall_project_env_custom_tools(project_env, options): + already_up_to_date = True + tm = ToolPackageManager() + if not options.get("silent"): + tm.set_log_level(logging.DEBUG) + specs_to_save = [] + for tool in options.get("tools"): + spec = PackageSpec(tool) + if tm.get_package(spec): + already_up_to_date = False + pkg = tm.uninstall( + spec, + skip_dependencies=options.get("skip_dependencies"), + ) + specs_to_save.append(pkg_to_save_spec(pkg, spec)) + if not options.get("no_save") and specs_to_save: + save_project_dependencies( + os.getcwd(), + specs_to_save, + scope="platform_packages", + action="remove", + environments=[project_env], + ) + return not already_up_to_date + + +def _uninstall_project_env_libraries(project_env, options): + already_up_to_date = True + config = ProjectConfig.get_instance() + lm = LibraryPackageManager( + os.path.join(config.get("platformio", "libdeps_dir"), project_env) + ) + if options.get("silent"): + lm.set_log_level(logging.WARN) + for library in config.get(f"env:{project_env}", "lib_deps"): + spec = PackageSpec(library) + # skip built-in dependencies + if not spec.external and not spec.owner: + continue + if lm.get_package(spec): + already_up_to_date = False + lm.uninstall( + spec, + skip_dependencies=options.get("skip_dependencies"), + ) + return not already_up_to_date + + +def _uninstall_project_env_custom_libraries(project_env, options): + already_up_to_date = True + config = ProjectConfig.get_instance() + lm = LibraryPackageManager( + os.path.join(config.get("platformio", "libdeps_dir"), project_env) + ) + if not options.get("silent"): + lm.set_log_level(logging.DEBUG) + specs_to_save = [] + for library in options.get("libraries") or []: + spec = PackageSpec(library) + if lm.get_package(spec): + already_up_to_date = False + pkg = lm.uninstall( + spec, + skip_dependencies=options.get("skip_dependencies"), + ) + specs_to_save.append(pkg_to_save_spec(pkg, spec)) + if not options.get("no_save") and specs_to_save: + save_project_dependencies( + os.getcwd(), + specs_to_save, + scope="lib_deps", + action="remove", + environments=[project_env], + ) + return not already_up_to_date diff --git a/platformio/package/commands/unpublish.py b/platformio/package/commands/unpublish.py new file mode 100644 index 00000000..3185144e --- /dev/null +++ b/platformio/package/commands/unpublish.py @@ -0,0 +1,46 @@ +# Copyright (c) 2014-present PlatformIO +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import click + +from platformio.clients.account import AccountClient +from platformio.clients.registry import RegistryClient +from platformio.package.meta import PackageSpec, PackageType + + +@click.command("unpublish", short_help="Remove a pushed package from the registry") +@click.argument( + "package", required=True, metavar="[/][@]" +) +@click.option( + "--type", + type=click.Choice(list(PackageType.items().values())), + default="library", + help="Package type, default is set to `library`", +) +@click.option( + "--undo", + is_flag=True, + help="Undo a remove, putting a version back into the registry", +) +def package_unpublish_cmd(package, type, undo): # pylint: disable=redefined-builtin + spec = PackageSpec(package) + response = RegistryClient().unpublish_package( + owner=spec.owner or AccountClient().get_logged_username(), + type=type, + name=spec.name, + version=str(spec.requirements), + undo=undo, + ) + click.secho(response.get("message"), fg="green") diff --git a/platformio/package/commands/update.py b/platformio/package/commands/update.py new file mode 100644 index 00000000..a520b7b3 --- /dev/null +++ b/platformio/package/commands/update.py @@ -0,0 +1,252 @@ +# Copyright (c) 2014-present PlatformIO +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import logging +import os + +import click + +from platformio import fs +from platformio.package.manager.library import LibraryPackageManager +from platformio.package.manager.platform import PlatformPackageManager +from platformio.package.manager.tool import ToolPackageManager +from platformio.package.meta import PackageSpec +from platformio.project.config import ProjectConfig +from platformio.project.savedeps import pkg_to_save_spec, save_project_dependencies + + +@click.command( + "update", short_help="Update the project dependencies or custom packages" +) +@click.option( + "-d", + "--project-dir", + default=os.getcwd, + type=click.Path(exists=True, file_okay=False, dir_okay=True, resolve_path=True), +) +@click.option("-e", "--environment", "environments", multiple=True) +@click.option("-p", "--platform", "platforms", metavar="SPECIFICATION", multiple=True) +@click.option("-t", "--tool", "tools", metavar="SPECIFICATION", multiple=True) +@click.option("-l", "--library", "libraries", metavar="SPECIFICATION", multiple=True) +@click.option( + "--no-save", + is_flag=True, + help="Prevent saving specified packages to `platformio.ini`", +) +@click.option("--skip-dependencies", is_flag=True, help="Skip package dependencies") +@click.option("-g", "--global", is_flag=True, help="Update global packages") +@click.option( + "--storage-dir", + default=None, + type=click.Path(exists=True, file_okay=False, dir_okay=True, resolve_path=True), + help="Custom Package Manager storage for global packages", +) +@click.option("-s", "--silent", is_flag=True, help="Suppress progress reporting") +def package_update_cmd(**options): + if options.get("global"): + update_global_dependencies(options) + else: + update_project_dependencies(options) + + +def update_global_dependencies(options): + pm = PlatformPackageManager(options.get("storage_dir")) + tm = ToolPackageManager(options.get("storage_dir")) + lm = LibraryPackageManager(options.get("storage_dir")) + for obj in (pm, tm, lm): + obj.set_log_level(logging.WARN if options.get("silent") else logging.DEBUG) + for spec in options.get("platforms"): + pm.update( + from_spec=spec, + to_spec=spec, + skip_dependencies=options.get("skip_dependencies"), + ) + for spec in options.get("tools"): + tm.update( + from_spec=spec, + to_spec=spec, + skip_dependencies=options.get("skip_dependencies"), + ) + for spec in options.get("libraries", []): + lm.update( + from_spec=spec, + to_spec=spec, + skip_dependencies=options.get("skip_dependencies"), + ) + + +def update_project_dependencies(options): + environments = options["environments"] + with fs.cd(options["project_dir"]): + config = ProjectConfig.get_instance() + config.validate(environments) + for env in config.envs(): + if environments and env not in environments: + continue + if not options["silent"]: + click.echo( + "Resolving %s environment packages..." % click.style(env, fg="cyan") + ) + already_up_to_date = not update_project_env_dependencies(env, options) + if not options["silent"] and already_up_to_date: + click.secho("Already up-to-date.", fg="green") + + +def update_project_env_dependencies(project_env, options=None): + options = options or {} + updated_conds = [] + # custom platforms + if options.get("platforms"): + updated_conds.append(_update_project_env_custom_platforms(project_env, options)) + # custom tools + if options.get("tools"): + updated_conds.append(_update_project_env_custom_tools(project_env, options)) + # custom ibraries + if options.get("libraries"): + updated_conds.append(_update_project_env_custom_libraries(project_env, options)) + # declared dependencies + if not updated_conds: + updated_conds = [ + _update_project_env_platform(project_env, options), + _update_project_env_libraries(project_env, options), + ] + return any(updated_conds) + + +def _update_project_env_platform(project_env, options): + config = ProjectConfig.get_instance() + pm = PlatformPackageManager() + if options.get("silent"): + pm.set_log_level(logging.WARN) + spec = config.get(f"env:{project_env}", "platform") + if not spec: + return None + cur_pkg = pm.get_package(spec) + if not cur_pkg: + return None + new_pkg = PlatformPackageManager().update( + cur_pkg, + to_spec=spec, + project_env=project_env, + skip_dependencies=options.get("skip_dependencies"), + ) + return cur_pkg != new_pkg + + +def _update_project_env_custom_platforms(project_env, options): + already_up_to_date = True + pm = PlatformPackageManager() + if not options.get("silent"): + pm.set_log_level(logging.DEBUG) + for spec in options.get("platforms"): + cur_pkg = pm.get_package(spec) + new_pkg = pm.update( + cur_pkg, + to_spec=spec, + project_env=project_env, + skip_dependencies=options.get("skip_dependencies"), + ) + if cur_pkg != new_pkg: + already_up_to_date = False + return not already_up_to_date + + +def _update_project_env_custom_tools(project_env, options): + already_up_to_date = True + tm = ToolPackageManager() + if not options.get("silent"): + tm.set_log_level(logging.DEBUG) + + specs_to_save = [] + for tool in options.get("tools"): + spec = PackageSpec(tool) + cur_pkg = tm.get_package(spec) + new_pkg = tm.update( + cur_pkg, + to_spec=spec, + skip_dependencies=options.get("skip_dependencies"), + ) + if cur_pkg != new_pkg: + already_up_to_date = False + specs_to_save.append(pkg_to_save_spec(new_pkg, spec)) + + if not options.get("no_save") and specs_to_save: + save_project_dependencies( + os.getcwd(), + specs_to_save, + scope="platform_packages", + action="add", + environments=[project_env], + ) + + return not already_up_to_date + + +def _update_project_env_libraries(project_env, options): + already_up_to_date = True + config = ProjectConfig.get_instance() + lm = LibraryPackageManager( + os.path.join(config.get("platformio", "libdeps_dir"), project_env) + ) + if options.get("silent"): + lm.set_log_level(logging.WARN) + for library in config.get(f"env:{project_env}", "lib_deps"): + spec = PackageSpec(library) + # skip built-in dependencies + if not spec.external and not spec.owner: + continue + cur_pkg = lm.get_package(spec) + if cur_pkg: + new_pkg = lm.update( + cur_pkg, + to_spec=spec, + skip_dependencies=options.get("skip_dependencies"), + ) + if cur_pkg != new_pkg: + already_up_to_date = False + return not already_up_to_date + + +def _update_project_env_custom_libraries(project_env, options): + already_up_to_date = True + config = ProjectConfig.get_instance() + lm = LibraryPackageManager( + os.path.join(config.get("platformio", "libdeps_dir"), project_env) + ) + if not options.get("silent"): + lm.set_log_level(logging.DEBUG) + + specs_to_save = [] + for library in options.get("libraries") or []: + spec = PackageSpec(library) + cur_pkg = lm.get_package(spec) + new_pkg = lm.update( + cur_pkg, + to_spec=spec, + skip_dependencies=options.get("skip_dependencies"), + ) + if cur_pkg != new_pkg: + already_up_to_date = False + specs_to_save.append(pkg_to_save_spec(new_pkg, spec)) + + if not options.get("no_save") and specs_to_save: + save_project_dependencies( + os.getcwd(), + specs_to_save, + scope="lib_deps", + action="add", + environments=[project_env], + ) + + return not already_up_to_date diff --git a/platformio/package/exception.py b/platformio/package/exception.py index 5d63649e..580137a0 100644 --- a/platformio/package/exception.py +++ b/platformio/package/exception.py @@ -34,7 +34,7 @@ class ManifestParserError(ManifestException): class ManifestValidationError(ManifestException): def __init__(self, messages, data, valid_data): - super(ManifestValidationError, self).__init__() + super().__init__() self.messages = messages self.data = data self.valid_data = valid_data diff --git a/platformio/package/lockfile.py b/platformio/package/lockfile.py index a24f59e7..296036aa 100644 --- a/platformio/package/lockfile.py +++ b/platformio/package/lockfile.py @@ -69,8 +69,11 @@ class LockFile(object): if LOCKFILE_CURRENT_INTERFACE == LOCKFILE_INTERFACE_FCNTL: fcntl.flock(self._fp.fileno(), fcntl.LOCK_EX | fcntl.LOCK_NB) elif LOCKFILE_CURRENT_INTERFACE == LOCKFILE_INTERFACE_MSVCRT: - msvcrt.locking(self._fp.fileno(), msvcrt.LK_NBLCK, 1) - except IOError: + msvcrt.locking( # pylint: disable=used-before-assignment + self._fp.fileno(), msvcrt.LK_NBLCK, 1 + ) + except (BlockingIOError, IOError): + self._fp.close() self._fp = None raise LockFileExists return True diff --git a/platformio/package/manager/_download.py b/platformio/package/manager/_download.py index f48be79b..e408908a 100644 --- a/platformio/package/manager/_download.py +++ b/platformio/package/manager/_download.py @@ -13,11 +13,14 @@ # limitations under the License. import hashlib +import logging import os import tempfile import time -from platformio import app, compat +import click + +from platformio import app, compat, util from platformio.package.download import FileDownloader from platformio.package.lockfile import LockFile @@ -40,7 +43,8 @@ class PackageManagerDownloadMixin(object): with app.State(self.get_download_usagedb_path(), lock=True) as state: state[os.path.basename(path)] = int(time.time() if not utime else utime) - def cleanup_expired_downloads(self): + @util.memoized(DOWNLOAD_CACHE_EXPIRE) + def cleanup_expired_downloads(self, _=None): with app.State(self.get_download_usagedb_path(), lock=True) as state: # remove outdated for fname in list(state.keys()): @@ -51,7 +55,8 @@ class PackageManagerDownloadMixin(object): if os.path.isfile(dl_path): os.remove(dl_path) - def download(self, url, checksum=None, silent=False): + def download(self, url, checksum=None): + silent = not self.log.isEnabledFor(logging.INFO) dl_path = self.compute_download_path(url, checksum or "") if os.path.isfile(dl_path): self.set_download_utime(dl_path) @@ -75,10 +80,11 @@ class PackageManagerDownloadMixin(object): except IOError: raise_error = True if raise_error: - self.print_message( - "Error: Please read https://bit.ly/package-manager-ioerror", - fg="red", - err=True, + self.log.error( + click.style( + "Error: Please read https://bit.ly/package-manager-ioerror", + fg="red", + ) ) raise e if checksum: diff --git a/platformio/package/manager/_install.py b/platformio/package/manager/_install.py index 11684563..8129d9b0 100644 --- a/platformio/package/manager/_install.py +++ b/platformio/package/manager/_install.py @@ -20,7 +20,7 @@ import tempfile import click from platformio import app, compat, fs, util -from platformio.package.exception import PackageException +from platformio.package.exception import PackageException, UnknownPackageError from platformio.package.meta import PackageItem from platformio.package.unpack import FileUnpacker from platformio.package.vcsclient import VCSClientFactory @@ -42,23 +42,20 @@ class PackageManagerInstallMixin(object): with FileUnpacker(src) as fu: return fu.unpack(dst, with_progress=False) - def install(self, spec, silent=False, skip_dependencies=False, force=False): + def install(self, spec, skip_dependencies=False, force=False): try: self.lock() - pkg = self._install( - spec, silent=silent, skip_dependencies=skip_dependencies, force=force - ) + pkg = self._install(spec, skip_dependencies=skip_dependencies, force=force) self.memcache_reset() self.cleanup_expired_downloads() return pkg finally: self.unlock() - def _install( # pylint: disable=too-many-arguments + def _install( self, spec, - search_filters=None, - silent=False, + search_qualifiers=None, skip_dependencies=False, force=False, ): @@ -67,7 +64,7 @@ class PackageManagerInstallMixin(object): # avoid circle dependencies if not self._INSTALL_HISTORY: self._INSTALL_HISTORY = {} - if spec in self._INSTALL_HISTORY: + if not force and spec in self._INSTALL_HISTORY: return self._INSTALL_HISTORY[spec] # check if package is already installed @@ -75,28 +72,32 @@ class PackageManagerInstallMixin(object): # if a forced installation if pkg and force: - self.uninstall(pkg, silent=silent) + self.uninstall(pkg) pkg = None if pkg: - if not silent: - self.print_message( - "{name} @ {version} is already installed".format( + # avoid RecursionError for circular_dependencies + self._INSTALL_HISTORY[spec] = pkg + + self.log.debug( + click.style( + "{name}@{version} is already installed".format( **pkg.metadata.as_dict() ), fg="yellow", ) + ) + # ensure package dependencies are installed + if not skip_dependencies: + self.install_dependencies(pkg, print_header=False) return pkg - if not silent: - self.print_message( - "Installing %s" % click.style(spec.humanize(), fg="cyan") - ) + self.log.info("Installing %s" % click.style(spec.humanize(), fg="cyan")) if spec.external: - pkg = self.install_from_url(spec.url, spec, silent=silent) + pkg = self.install_from_uri(spec.uri, spec) else: - pkg = self.install_from_registry(spec, search_filters, silent=silent) + pkg = self.install_from_registry(spec, search_qualifiers) if not pkg or not pkg.metadata: raise PackageException( @@ -104,41 +105,75 @@ class PackageManagerInstallMixin(object): % (spec.humanize(), util.get_systype()) ) - if not silent: - self.print_message( - "{name} @ {version} has been installed!".format( - **pkg.metadata.as_dict() - ), + self.call_pkg_script(pkg, "postinstall") + + self.log.info( + click.style( + "{name}@{version} has been installed!".format(**pkg.metadata.as_dict()), fg="green", ) + ) self.memcache_reset() - if not skip_dependencies: - self.install_dependencies(pkg, silent) + # avoid RecursionError for circular_dependencies self._INSTALL_HISTORY[spec] = pkg + + if not skip_dependencies: + self.install_dependencies(pkg) + return pkg - def install_dependencies(self, pkg, silent=False): - pass + def install_dependencies(self, pkg, print_header=True): + assert isinstance(pkg, PackageItem) + dependencies = self.get_pkg_dependencies(pkg) + if not dependencies: + return + if print_header: + self.log.info("Resolving dependencies...") + for dependency in dependencies: + try: + self.install_dependency(dependency) + except UnknownPackageError: + if dependency.get("owner"): + self.log.warning( + click.style( + "Warning! Could not install dependency %s for package '%s'" + % (dependency, pkg.metadata.name), + fg="yellow", + ) + ) - def install_from_url(self, url, spec, checksum=None, silent=False): + def install_dependency(self, dependency): + spec = self.dependency_to_spec(dependency) + search_qualifiers = { + key: value + for key, value in dependency.items() + if key in ("authors", "platforms", "frameworks") + } + return self._install(spec, search_qualifiers=search_qualifiers or None) + + def install_from_uri(self, uri, spec, checksum=None): spec = self.ensure_spec(spec) + + if spec.symlink: + return self.install_symlink(spec) + tmp_dir = tempfile.mkdtemp(prefix="pkg-installing-", dir=self.get_tmp_dir()) vcs = None try: - if url.startswith("file://"): - _url = url[7:] - if os.path.isfile(_url): - self.unpack(_url, tmp_dir) + if uri.startswith("file://"): + _uri = uri[7:] + if os.path.isfile(_uri): + self.unpack(_uri, tmp_dir) else: fs.rmtree(tmp_dir) - shutil.copytree(_url, tmp_dir, symlinks=True) - elif url.startswith(("http://", "https://")): - dl_path = self.download(url, checksum, silent=silent) + shutil.copytree(_uri, tmp_dir, symlinks=True) + elif uri.startswith(("http://", "https://")): + dl_path = self.download(uri, checksum) assert os.path.isfile(dl_path) self.unpack(dl_path, tmp_dir) else: - vcs = VCSClientFactory.new(tmp_dir, url) + vcs = VCSClientFactory.new(tmp_dir, uri) assert vcs.export() root_dir = self.find_pkg_root(tmp_dir, spec) @@ -185,7 +220,7 @@ class PackageManagerInstallMixin(object): ) elif dst_pkg.metadata: if dst_pkg.metadata.spec.external: - if dst_pkg.metadata.spec.url != tmp_pkg.metadata.spec.url: + if dst_pkg.metadata.spec.uri != tmp_pkg.metadata.spec.uri: action = "detach-existing" elif ( dst_pkg.metadata.version != tmp_pkg.metadata.version @@ -206,11 +241,11 @@ class PackageManagerInstallMixin(object): tmp_pkg.get_safe_dirname(), dst_pkg.metadata.version, ) - if dst_pkg.metadata.spec.url: + if dst_pkg.metadata.spec.uri: target_dirname = "%s@src-%s" % ( tmp_pkg.get_safe_dirname(), hashlib.md5( - compat.hashlib_encode_data(dst_pkg.metadata.spec.url) + compat.hashlib_encode_data(dst_pkg.metadata.spec.uri) ).hexdigest(), ) # move existing into the new place @@ -231,7 +266,7 @@ class PackageManagerInstallMixin(object): target_dirname = "%s@src-%s" % ( tmp_pkg.get_safe_dirname(), hashlib.md5( - compat.hashlib_encode_data(tmp_pkg.metadata.spec.url) + compat.hashlib_encode_data(tmp_pkg.metadata.spec.uri) ).hexdigest(), ) pkg_dir = os.path.join(self.package_dir, target_dirname) diff --git a/platformio/package/manager/_legacy.py b/platformio/package/manager/_legacy.py index 5c35ebeb..978efc9c 100644 --- a/platformio/package/manager/_legacy.py +++ b/platformio/package/manager/_legacy.py @@ -33,7 +33,7 @@ class PackageManagerLegacyMixin(object): src_manifest = fs.load_json(src_manifest_path) return PackageSpec( name=src_manifest.get("name"), - url=src_manifest.get("url"), + uri=src_manifest.get("url"), requirements=src_manifest.get("requirements"), ) @@ -51,7 +51,7 @@ class PackageManagerLegacyMixin(object): if not manifest.get(key): manifest[key] = str(getattr(pkg.metadata, key)) if pkg.metadata and pkg.metadata.spec and pkg.metadata.spec.external: - manifest["__src_url"] = pkg.metadata.spec.url + manifest["__src_url"] = pkg.metadata.spec.uri manifest["version"] = str(pkg.metadata.version) if pkg.metadata and pkg.metadata.spec.owner: manifest["ownername"] = pkg.metadata.spec.owner diff --git a/platformio/package/manager/_registry.py b/platformio/package/manager/_registry.py index e488b5b3..b45a17f2 100644 --- a/platformio/package/manager/_registry.py +++ b/platformio/package/manager/_registry.py @@ -12,21 +12,20 @@ # See the License for the specific language governing permissions and # limitations under the License. +import json import time +from urllib.parse import urlparse import click +from platformio import __registry_mirror_hosts__ +from platformio.cache import ContentCache from platformio.clients.http import HTTPClient from platformio.clients.registry import RegistryClient from platformio.package.exception import UnknownPackageError from platformio.package.meta import PackageSpec from platformio.package.version import cast_version_to_semver -try: - from urllib.parse import urlparse -except ImportError: - from urlparse import urlparse - class RegistryFileMirrorIterator(object): @@ -41,56 +40,84 @@ class RegistryFileMirrorIterator(object): def __iter__(self): # pylint: disable=non-iterator-returned return self - def next(self): - """For Python 2 compatibility""" - return self.__next__() - def __next__(self): - http = self.get_http_client() - response = http.send_request( - "head", - self._url_parts.path, - allow_redirects=False, - params=dict(bypass=",".join(self._visited_mirrors)) - if self._visited_mirrors - else None, - x_with_authorization=RegistryClient.allowed_private_packages(), - ) - stop_conditions = [ - response.status_code not in (302, 307), - not response.headers.get("Location"), - not response.headers.get("X-PIO-Mirror"), - response.headers.get("X-PIO-Mirror") in self._visited_mirrors, - ] - if any(stop_conditions): - raise StopIteration - self._visited_mirrors.append(response.headers.get("X-PIO-Mirror")) - return ( - response.headers.get("Location"), - response.headers.get("X-PIO-Content-SHA256"), + cache_key = ContentCache.key_from_args( + "head", self.download_url, self._visited_mirrors ) + with ContentCache("http") as cc: + result = cc.get(cache_key) + if result is not None: + try: + headers = json.loads(result) + return ( + headers["Location"], + headers["X-PIO-Content-SHA256"], + ) + except (ValueError, KeyError): + pass + + http = self.get_http_client() + response = http.send_request( + "head", + self._url_parts.path, + allow_redirects=False, + params=dict(bypass=",".join(self._visited_mirrors)) + if self._visited_mirrors + else None, + x_with_authorization=RegistryClient.allowed_private_packages(), + ) + stop_conditions = [ + response.status_code not in (302, 307), + not response.headers.get("Location"), + not response.headers.get("X-PIO-Mirror"), + response.headers.get("X-PIO-Mirror") in self._visited_mirrors, + ] + if any(stop_conditions): + raise StopIteration + self._visited_mirrors.append(response.headers.get("X-PIO-Mirror")) + cc.set( + cache_key, + json.dumps( + { + "Location": response.headers.get("Location"), + "X-PIO-Content-SHA256": response.headers.get( + "X-PIO-Content-SHA256" + ), + } + ), + "1h", + ) + return ( + response.headers.get("Location"), + response.headers.get("X-PIO-Content-SHA256"), + ) def get_http_client(self): if self._mirror not in RegistryFileMirrorIterator.HTTP_CLIENT_INSTANCES: + endpoints = [self._mirror] + for host in __registry_mirror_hosts__: + endpoint = f"https://dl.{host}" + if endpoint not in endpoints: + endpoints.append(endpoint) RegistryFileMirrorIterator.HTTP_CLIENT_INSTANCES[self._mirror] = HTTPClient( - self._mirror + endpoints ) return RegistryFileMirrorIterator.HTTP_CLIENT_INSTANCES[self._mirror] -class PackageManageRegistryMixin(object): - def install_from_registry(self, spec, search_filters=None, silent=False): - if spec.owner and spec.name and not search_filters: +class PackageManagerRegistryMixin(object): + def install_from_registry(self, spec, search_qualifiers=None): + if spec.owner and spec.name and not search_qualifiers: package = self.fetch_registry_package(spec) if not package: raise UnknownPackageError(spec.humanize()) version = self.pick_best_registry_version(package["versions"], spec) else: - packages = self.search_registry_packages(spec, search_filters) + packages = self.search_registry_packages(spec, search_qualifiers) if not packages: raise UnknownPackageError(spec.humanize()) - if len(packages) > 1 and not silent: - self.print_multi_package_issue(packages, spec) + if len(packages) > 1: + self.print_multi_package_issue(self.log.warning, packages, spec) package, version = self.find_best_registry_version(packages, spec) if not package or not version: @@ -102,7 +129,7 @@ class PackageManageRegistryMixin(object): for url, checksum in RegistryFileMirrorIterator(pkgfile["download_url"]): try: - return self.install_from_url( + return self.install_from_uri( url, PackageSpec( owner=package["owner"]["username"], @@ -110,11 +137,14 @@ class PackageManageRegistryMixin(object): name=package["name"], ), checksum or pkgfile["checksum"]["sha256"], - silent=silent, ) except Exception as e: # pylint: disable=broad-except - self.print_message("Warning! Package Mirror: %s" % e, fg="yellow") - self.print_message("Looking for another mirror...", fg="yellow") + self.log.warning( + click.style("Warning! Package Mirror: %s" % e, fg="yellow") + ) + self.log.warning( + click.style("Looking for another mirror...", fg="yellow") + ) return None @@ -123,17 +153,17 @@ class PackageManageRegistryMixin(object): self._registry_client = RegistryClient() return self._registry_client - def search_registry_packages(self, spec, filters=None): + def search_registry_packages(self, spec, qualifiers=None): assert isinstance(spec, PackageSpec) - filters = filters or {} + qualifiers = qualifiers or {} if spec.id: - filters["ids"] = str(spec.id) + qualifiers["ids"] = str(spec.id) else: - filters["types"] = self.pkg_type - filters["names"] = spec.name.lower() + qualifiers["types"] = self.pkg_type + qualifiers["names"] = spec.name.lower() if spec.owner: - filters["owners"] = spec.owner.lower() - return self.get_registry_client_instance().list_packages(filters=filters)[ + qualifiers["owners"] = spec.owner.lower() + return self.get_registry_client_instance().list_packages(qualifiers=qualifiers)[ "items" ] @@ -153,36 +183,42 @@ class PackageManageRegistryMixin(object): raise UnknownPackageError(spec.humanize()) return result - def reveal_registry_package_id(self, spec, silent=False): + def reveal_registry_package_id(self, spec): spec = self.ensure_spec(spec) if spec.id: return spec.id packages = self.search_registry_packages(spec) if not packages: raise UnknownPackageError(spec.humanize()) - if len(packages) > 1 and not silent: - self.print_multi_package_issue(packages, spec) - click.echo("") + if len(packages) > 1: + self.print_multi_package_issue(self.log.warning, packages, spec) + self.log.info("") return packages[0]["id"] - def print_multi_package_issue(self, packages, spec): - self.print_message( - "Warning! More than one package has been found by ", fg="yellow", nl=False + @staticmethod + def print_multi_package_issue(print_func, packages, spec): + print_func( + click.style( + "Warning! More than one package has been found by ", fg="yellow" + ) + + click.style(spec.humanize(), fg="cyan") + + click.style(" requirements:", fg="yellow") ) - click.secho(spec.humanize(), fg="cyan", nl=False) - click.secho(" requirements:", fg="yellow") + for item in packages: - click.echo( - " - {owner}/{name} @ {version}".format( + print_func( + " - {owner}/{name}@{version}".format( owner=click.style(item["owner"]["username"], fg="cyan"), name=item["name"], version=item["version"]["name"], ) ) - self.print_message( - "Please specify detailed REQUIREMENTS using package owner and version " - "(shown above) to avoid name conflicts", - fg="yellow", + print_func( + click.style( + "Please specify detailed REQUIREMENTS using package owner and version " + "(shown above) to avoid name conflicts", + fg="yellow", + ) ) def find_best_registry_version(self, packages, spec): diff --git a/platformio/package/manager/_symlink.py b/platformio/package/manager/_symlink.py new file mode 100644 index 00000000..8c5eae38 --- /dev/null +++ b/platformio/package/manager/_symlink.py @@ -0,0 +1,71 @@ +# Copyright (c) 2014-present PlatformIO +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import json +import os + +from platformio import fs +from platformio.package.exception import PackageException +from platformio.package.meta import PackageItem, PackageSpec + + +class PackageManagerSymlinkMixin(object): + @staticmethod + def is_symlink(path): + return path and path.endswith(".pio-link") and os.path.isfile(path) + + @classmethod + def resolve_symlink(cls, path): + assert cls.is_symlink(path) + data = fs.load_json(path) + spec = PackageSpec(**data["spec"]) + assert spec.symlink + pkg_dir = spec.uri[10:] + if not os.path.isabs(pkg_dir): + pkg_dir = os.path.normpath(os.path.join(data["cwd"], pkg_dir)) + return (pkg_dir if os.path.isdir(pkg_dir) else None, spec) + + def get_symlinked_package(self, path): + pkg_dir, spec = self.resolve_symlink(path) + if not pkg_dir: + return None + pkg = PackageItem(os.path.realpath(pkg_dir)) + if not pkg.metadata: + pkg.metadata = self.build_metadata(pkg.path, spec) + return pkg + + def install_symlink(self, spec): + assert spec.symlink + pkg_dir = spec.uri[10:] + if not os.path.isdir(pkg_dir): + raise PackageException( + f"Can not create a symbolic link for `{pkg_dir}`, not a directory" + ) + link_path = os.path.join( + self.package_dir, + "%s.pio-link" % (spec.name or os.path.basename(os.path.abspath(pkg_dir))), + ) + with open(link_path, mode="w", encoding="utf-8") as fp: + json.dump(dict(cwd=os.getcwd(), spec=spec.as_dict()), fp) + return self.get_symlinked_package(link_path) + + def uninstall_symlink(self, spec): + assert spec.symlink + for name in os.listdir(self.package_dir): + path = os.path.join(self.package_dir, name) + if not self.is_symlink(path): + continue + pkg = self.get_symlinked_package(path) + if pkg.metadata.spec.uri == spec.uri: + os.remove(path) diff --git a/platformio/package/manager/_uninstall.py b/platformio/package/manager/_uninstall.py index 68f7a300..9c6b5772 100644 --- a/platformio/package/manager/_uninstall.py +++ b/platformio/package/manager/_uninstall.py @@ -19,33 +19,36 @@ import click from platformio import fs from platformio.package.exception import UnknownPackageError -from platformio.package.meta import PackageSpec +from platformio.package.meta import PackageItem, PackageSpec class PackageManagerUninstallMixin(object): - def uninstall(self, spec, silent=False, skip_dependencies=False): + def uninstall(self, spec, skip_dependencies=False): try: self.lock() - return self._uninstall(spec, silent, skip_dependencies) + return self._uninstall(spec, skip_dependencies) finally: self.unlock() - def _uninstall(self, spec, silent=False, skip_dependencies=False): + def _uninstall(self, spec, skip_dependencies=False): pkg = self.get_package(spec) if not pkg or not pkg.metadata: raise UnknownPackageError(spec) - if not silent: - self.print_message( - "Removing %s @ %s" - % (click.style(pkg.metadata.name, fg="cyan"), pkg.metadata.version), - ) + self.log.info( + "Removing %s @ %s" + % (click.style(pkg.metadata.name, fg="cyan"), pkg.metadata.version) + ) + + self.call_pkg_script(pkg, "preuninstall") # firstly, remove dependencies if not skip_dependencies: - self.uninstall_dependencies(pkg, silent) + self.uninstall_dependencies(pkg) - if os.path.islink(pkg.path): + if pkg.metadata.spec.symlink: + self.uninstall_symlink(pkg.metadata.spec) + elif os.path.islink(pkg.path): os.unlink(pkg.path) else: fs.rmtree(pkg.path) @@ -66,13 +69,23 @@ class PackageManagerUninstallMixin(object): ) self.memcache_reset() - if not silent: - self.print_message( - "{name} @ {version} has been removed!".format(**pkg.metadata.as_dict()), + self.log.info( + click.style( + "{name}@{version} has been removed!".format(**pkg.metadata.as_dict()), fg="green", ) + ) return pkg - def uninstall_dependencies(self, pkg, silent=False): - pass + def uninstall_dependencies(self, pkg): + assert isinstance(pkg, PackageItem) + dependencies = self.get_pkg_dependencies(pkg) + if not dependencies: + return + self.log.info("Removing dependencies...") + for dependency in dependencies: + pkg = self.get_package(self.dependency_to_spec(dependency)) + if not pkg: + continue + self._uninstall(pkg) diff --git a/platformio/package/manager/_update.py b/platformio/package/manager/_update.py index c81e7186..5d689ba6 100644 --- a/platformio/package/manager/_update.py +++ b/platformio/package/manager/_update.py @@ -16,7 +16,6 @@ import os import click -from platformio.clients.http import ensure_internet_on from platformio.package.exception import UnknownPackageError from platformio.package.meta import PackageItem, PackageOutdatedResult, PackageSpec from platformio.package.vcsclient import VCSBaseException, VCSClientFactory @@ -25,9 +24,11 @@ from platformio.package.vcsclient import VCSBaseException, VCSClientFactory class PackageManagerUpdateMixin(object): def outdated(self, pkg, spec=None): assert isinstance(pkg, PackageItem) - assert not spec or isinstance(spec, PackageSpec) assert pkg.metadata + if spec and not isinstance(spec, PackageSpec): + spec = PackageSpec(spec) + if not os.path.isdir(pkg.path): return PackageOutdatedResult(current=pkg.metadata.version) @@ -66,122 +67,71 @@ class PackageManagerUpdateMixin(object): def _fetch_vcs_latest_version(self, pkg): vcs = None try: - vcs = VCSClientFactory.new(pkg.path, pkg.metadata.spec.url, silent=True) + vcs = VCSClientFactory.new(pkg.path, pkg.metadata.spec.uri, silent=True) except VCSBaseException: return None if not vcs.can_be_updated: return None + + vcs_revision = vcs.get_latest_revision() + if not vcs_revision: + return None + return str( self.build_metadata( - pkg.path, pkg.metadata.spec, vcs_revision=vcs.get_latest_revision() + pkg.path, pkg.metadata.spec, vcs_revision=vcs_revision ).version ) - def update( # pylint: disable=too-many-arguments + def update( self, from_spec, to_spec=None, - only_check=False, - silent=False, - show_incompatible=True, + skip_dependencies=False, ): pkg = self.get_package(from_spec) if not pkg or not pkg.metadata: raise UnknownPackageError(from_spec) - if not silent: - click.echo( - "{} {:<45} {:<35}".format( - "Checking" if only_check else "Updating", - click.style(pkg.metadata.spec.humanize(), fg="cyan"), - "%s @ %s" % (pkg.metadata.version, to_spec.requirements) - if to_spec and to_spec.requirements - else str(pkg.metadata.version), - ), - nl=False, - ) - if not ensure_internet_on(): - if not silent: - click.echo("[%s]" % (click.style("Off-line", fg="yellow"))) - return pkg - outdated = self.outdated(pkg, to_spec) - if not silent: - self.print_outdated_state(outdated, only_check, show_incompatible) - - if only_check or not outdated.is_outdated(allow_incompatible=False): + if not outdated.is_outdated(allow_incompatible=False): + self.log.debug( + click.style( + "{name}@{version} is already up-to-date".format( + **pkg.metadata.as_dict() + ), + fg="yellow", + ) + ) return pkg + self.log.info( + "Updating %s @ %s" + % (click.style(pkg.metadata.name, fg="cyan"), pkg.metadata.version) + ) try: self.lock() - return self._update(pkg, outdated, silent=silent) + return self._update(pkg, outdated, skip_dependencies) finally: self.unlock() - @staticmethod - def print_outdated_state(outdated, only_check, show_incompatible): - if outdated.detached: - return click.echo("[%s]" % (click.style("Detached", fg="yellow"))) - - if ( - not outdated.latest - or outdated.current == outdated.latest - or (not show_incompatible and outdated.current == outdated.wanted) - ): - return click.echo("[%s]" % (click.style("Up-to-date", fg="green"))) - - if outdated.wanted and outdated.current == outdated.wanted: - return click.echo( - "[%s]" % (click.style("Incompatible %s" % outdated.latest, fg="yellow")) - ) - - if only_check: - return click.echo( - "[%s]" - % ( - click.style( - "Outdated %s" % str(outdated.wanted or outdated.latest), - fg="red", - ) - ) - ) - - return click.echo( - "[%s]" - % ( - click.style( - "Updating to %s" % str(outdated.wanted or outdated.latest), - fg="green", - ) - ) - ) - - def _update(self, pkg, outdated, silent=False): + def _update(self, pkg, outdated, skip_dependencies=False): if pkg.metadata.spec.external: - vcs = VCSClientFactory.new(pkg.path, pkg.metadata.spec.url) + vcs = VCSClientFactory.new(pkg.path, pkg.metadata.spec.uri) assert vcs.update() pkg.metadata.version = self._fetch_vcs_latest_version(pkg) pkg.dump_meta() return pkg - new_pkg = self.install( + # uninstall existing version + self.uninstall(pkg, skip_dependencies=True) + + return self.install( PackageSpec( id=pkg.metadata.spec.id, owner=pkg.metadata.spec.owner, name=pkg.metadata.spec.name, requirements=outdated.wanted or outdated.latest, ), - silent=silent, + skip_dependencies=skip_dependencies, ) - if new_pkg: - old_pkg = self.get_package( - PackageSpec( - id=pkg.metadata.spec.id, - owner=pkg.metadata.spec.owner, - name=pkg.metadata.name, - requirements=pkg.metadata.version, - ) - ) - if old_pkg: - self.uninstall(old_pkg, silent=silent, skip_dependencies=True) - return new_pkg diff --git a/platformio/package/manager/base.py b/platformio/package/manager/base.py index 16409d7c..07ca2f5b 100644 --- a/platformio/package/manager/base.py +++ b/platformio/package/manager/base.py @@ -12,13 +12,15 @@ # See the License for the specific language governing permissions and # limitations under the License. +import logging import os +import subprocess from datetime import datetime import click import semantic_version -from platformio import util +from platformio import fs, util from platformio.commands import PlatformioCLI from platformio.compat import ci_strings_are_equal from platformio.package.exception import ManifestException, MissingPackageManifestError @@ -26,7 +28,8 @@ from platformio.package.lockfile import LockFile from platformio.package.manager._download import PackageManagerDownloadMixin from platformio.package.manager._install import PackageManagerInstallMixin from platformio.package.manager._legacy import PackageManagerLegacyMixin -from platformio.package.manager._registry import PackageManageRegistryMixin +from platformio.package.manager._registry import PackageManagerRegistryMixin +from platformio.package.manager._symlink import PackageManagerSymlinkMixin from platformio.package.manager._uninstall import PackageManagerUninstallMixin from platformio.package.manager._update import PackageManagerUpdateMixin from platformio.package.manifest.parser import ManifestParserFactory @@ -36,12 +39,19 @@ from platformio.package.meta import ( PackageSpec, PackageType, ) +from platformio.proc import get_pythonexe_path from platformio.project.helpers import get_project_cache_dir -class BasePackageManager( # pylint: disable=too-many-public-methods +class ClickLoggingHandler(logging.Handler): + def emit(self, record): + click.echo(self.format(record)) + + +class BasePackageManager( # pylint: disable=too-many-public-methods,too-many-instance-attributes PackageManagerDownloadMixin, - PackageManageRegistryMixin, + PackageManagerRegistryMixin, + PackageManagerSymlinkMixin, PackageManagerInstallMixin, PackageManagerUninstallMixin, PackageManagerUpdateMixin, @@ -52,13 +62,33 @@ class BasePackageManager( # pylint: disable=too-many-public-methods def __init__(self, pkg_type, package_dir): self.pkg_type = pkg_type self.package_dir = package_dir - self._MEMORY_CACHE = {} + self.log = self._setup_logger() + self._MEMORY_CACHE = {} self._lockfile = None self._download_dir = None self._tmp_dir = None self._registry_client = None + def __repr__(self): + return ( + f"{self.__class__.__name__} " + ) + + def _setup_logger(self): + logger = logging.getLogger(str(self.__class__.__name__).replace("Package", " ")) + logger.setLevel(logging.INFO) + formatter = logging.Formatter("%(name)s: %(message)s") + sh = ClickLoggingHandler() + sh.setFormatter(formatter) + logger.handlers.clear() + logger.addHandler(sh) + return logger + + def set_log_level(self, level): + self.log.setLevel(level) + def lock(self): if self._lockfile: return @@ -105,12 +135,6 @@ class BasePackageManager( # pylint: disable=too-many-public-methods def manifest_names(self): raise NotImplementedError - def print_message(self, message, **kwargs): - click.echo( - "%s: " % str(self.__class__.__name__).replace("Package", " "), nl=False - ) - click.secho(message, **kwargs) - def get_download_dir(self): if not self._download_dir: self._download_dir = self.ensure_dir_exists( @@ -165,7 +189,7 @@ class BasePackageManager( # pylint: disable=too-many-public-methods return result except ManifestException as e: if not PlatformioCLI.in_silence(): - self.print_message(str(e), fg="yellow") + self.log.warning(click.style(str(e), fg="yellow")) raise MissingPackageManifestError(", ".join(self.manifest_names)) @staticmethod @@ -191,7 +215,7 @@ class BasePackageManager( # pylint: disable=too-many-public-methods metadata.version = self.generate_rand_version() return metadata - def get_installed(self): + def get_installed(self): # pylint: disable=too-many-branches if not os.path.isdir(self.package_dir): return [] @@ -203,14 +227,18 @@ class BasePackageManager( # pylint: disable=too-many-public-methods for name in sorted(os.listdir(self.package_dir)): if name.startswith("_tmp_installing"): # legacy tmp folder continue - pkg_dir = os.path.join(self.package_dir, name) - if not os.path.isdir(pkg_dir): + pkg = None + path = os.path.join(self.package_dir, name) + if os.path.isdir(path): + pkg = PackageItem(path) + elif self.is_symlink(path): + pkg = self.get_symlinked_package(path) + if not pkg: continue - pkg = PackageItem(pkg_dir) if not pkg.metadata: try: - spec = self.build_legacy_spec(pkg_dir) - pkg.metadata = self.build_metadata(pkg_dir, spec) + spec = self.build_legacy_spec(pkg.path) + pkg.metadata = self.build_metadata(pkg.path, spec) except MissingPackageManifestError: pass if not pkg.metadata: @@ -252,12 +280,12 @@ class BasePackageManager( # pylint: disable=too-many-public-methods # external "URL" mismatch if spec.external: # local folder mismatch - if os.path.abspath(spec.url) == os.path.abspath(pkg.path) or ( - spec.url.startswith("file://") - and os.path.abspath(pkg.path) == os.path.abspath(spec.url[7:]) + if os.path.abspath(spec.uri) == os.path.abspath(pkg.path) or ( + spec.uri.startswith("file://") + and os.path.abspath(pkg.path) == os.path.abspath(spec.uri[7:]) ): return True - if spec.url != pkg.metadata.spec.url: + if spec.uri != pkg.metadata.spec.uri: return False # "owner" mismatch @@ -271,3 +299,42 @@ class BasePackageManager( # pylint: disable=too-many-public-methods return False return True + + def get_pkg_dependencies(self, pkg): + return self.load_manifest(pkg).get("dependencies") + + @staticmethod + def dependency_to_spec(dependency): + return PackageSpec( + owner=dependency.get("owner"), + name=dependency.get("name"), + requirements=dependency.get("version"), + ) + + def call_pkg_script(self, pkg, event): + manifest = None + try: + manifest = self.load_manifest(pkg) + except MissingPackageManifestError: + pass + scripts = (manifest or {}).get("scripts") + if not scripts or not isinstance(scripts, dict): + return + cmd = scripts.get(event) + if not cmd: + return + shell = False + if not isinstance(cmd, list): + shell = True + cmd = [cmd] + os.environ["PIO_PYTHON_EXE"] = get_pythonexe_path() + with fs.cd(pkg.path): + if os.path.isfile(cmd[0]) and cmd[0].endswith(".py"): + cmd = [os.environ["PIO_PYTHON_EXE"]] + cmd + subprocess.run( + " ".join(cmd) if shell else cmd, + cwd=pkg.path, + shell=shell, + env=os.environ, + check=True, + ) diff --git a/platformio/package/manager/core.py b/platformio/package/manager/core.py index 50b7e34e..d9a05cb5 100644 --- a/platformio/package/manager/core.py +++ b/platformio/package/manager/core.py @@ -38,11 +38,11 @@ def get_installed_core_packages(): return result -def get_core_package_dir(name, auto_install=True): +def get_core_package_dir(name, spec=None, auto_install=True): if name not in __core_packages__: raise exception.PlatformioException("Please upgrade PlatformIO Core") pm = ToolPackageManager() - spec = PackageSpec( + spec = spec or PackageSpec( owner="platformio", name=name, requirements=__core_packages__[name] ) pkg = pm.get_package(spec) @@ -55,17 +55,15 @@ def get_core_package_dir(name, auto_install=True): return pm.get_package(spec).path -def update_core_packages(only_check=False, silent=False): +def update_core_packages(): pm = ToolPackageManager() for name, requirements in __core_packages__.items(): spec = PackageSpec(owner="platformio", name=name, requirements=requirements) - pkg = pm.get_package(spec) - if not pkg: - continue - if not silent or pm.outdated(pkg, spec).is_outdated(): - pm.update(pkg, spec, only_check=only_check) - if not only_check: - remove_unnecessary_core_packages() + try: + pm.update(spec, spec) + except UnknownPackageError: + pass + remove_unnecessary_core_packages() return True @@ -152,7 +150,7 @@ def build_contrib_pysite_package(target_dir, with_metadata=True): if "linux" in systype: args.extend(["--no-binary", ":all:"]) try: - subprocess.run(args + get_contrib_pysite_deps(), check=True) + subprocess.run(args + get_contrib_pysite_deps(), check=True, env=os.environ) except subprocess.CalledProcessError as exc: if "linux" in systype: raise UserSideException( @@ -212,15 +210,19 @@ def build_contrib_pysite_package(target_dir, with_metadata=True): def get_contrib_pysite_deps(): - twisted_version = "21.7.0" - result = [ - # twisted[tls], see setup.py for %twisted_version% - "twisted == %s" % twisted_version, - # pyopenssl depends on it, use RUST-less version - "cryptography >= 3.3, < 35.0.0", - "pyopenssl >= 16.0.0, <= 21.0.0", - "service_identity >= 18.1.0, <= 21.1.0", - ] - if "windows" in util.get_systype(): + systype = util.get_systype() + twisted_version = "22.1.0" + if "linux_arm" in systype: + result = [ + # twisted[tls], see setup.py for %twisted_version% + "twisted == %s" % twisted_version, + # pyopenssl depends on it, use RUST-less version + "cryptography >= 3.3, < 35.0.0", + "pyopenssl >= 16.0.0, <= 21.0.0", + "service_identity >= 18.1.0, <= 21.1.0", + ] + else: + result = ["twisted[tls] == %s" % twisted_version] + if "windows" in systype: result.append("pywin32 != 226") return result diff --git a/platformio/package/manager/library.py b/platformio/package/manager/library.py index c3519a20..802e0cfd 100644 --- a/platformio/package/manager/library.py +++ b/platformio/package/manager/library.py @@ -15,18 +15,16 @@ import json import os -from platformio.package.exception import ( - MissingPackageManifestError, - UnknownPackageError, -) +from platformio.commands.lib.helpers import is_builtin_lib +from platformio.package.exception import MissingPackageManifestError from platformio.package.manager.base import BasePackageManager -from platformio.package.meta import PackageItem, PackageSpec, PackageType +from platformio.package.meta import PackageSpec, PackageType from platformio.project.config import ProjectConfig class LibraryPackageManager(BasePackageManager): # pylint: disable=too-many-ancestors def __init__(self, package_dir=None): - super(LibraryPackageManager, self).__init__( + super().__init__( PackageType.LIBRARY, package_dir or ProjectConfig.get_instance().get("platformio", "globallib_dir"), @@ -38,7 +36,7 @@ class LibraryPackageManager(BasePackageManager): # pylint: disable=too-many-anc def find_pkg_root(self, path, spec): try: - return super(LibraryPackageManager, self).find_pkg_root(path, spec) + return super().find_pkg_root(path, spec) except MissingPackageManifestError: pass assert isinstance(spec, PackageSpec) @@ -81,81 +79,12 @@ class LibraryPackageManager(BasePackageManager): # pylint: disable=too-many-anc return root return path - def _install( # pylint: disable=too-many-arguments - self, - spec, - search_filters=None, - silent=False, - skip_dependencies=False, - force=False, - ): - try: - return super(LibraryPackageManager, self)._install( - spec, - search_filters=search_filters, - silent=silent, - skip_dependencies=skip_dependencies, - force=force, - ) - except UnknownPackageError as e: - # pylint: disable=import-outside-toplevel - from platformio.commands.lib.helpers import is_builtin_lib - - spec = self.ensure_spec(spec) - if is_builtin_lib(spec.name): - self.print_message("Already installed, built-in library", fg="yellow") - return True - - raise e - - def install_dependencies(self, pkg, silent=False): - assert isinstance(pkg, PackageItem) - manifest = self.load_manifest(pkg) - if not manifest.get("dependencies"): - return - if not silent: - self.print_message("Installing dependencies...") - for dependency in manifest.get("dependencies"): - if not self._install_dependency(dependency, silent) and not silent: - self.print_message( - "Warning! Could not install dependency %s for package '%s'" - % (dependency, pkg.metadata.name), - fg="yellow", - ) - - def _install_dependency(self, dependency, silent=False): - spec = PackageSpec( - owner=dependency.get("owner"), - name=dependency.get("name"), - requirements=dependency.get("version"), - ) - search_filters = { - key: value - for key, value in dependency.items() - if key in ("authors", "platforms", "frameworks") - } - try: - return self._install( - spec, search_filters=search_filters or None, silent=silent - ) - except UnknownPackageError: - pass + def install_dependency(self, dependency): + spec = self.dependency_to_spec(dependency) + # skip built-in dependencies + not_builtin_conds = [spec.external, spec.owner] + if not any(not_builtin_conds): + not_builtin_conds.append(not is_builtin_lib(spec.name)) + if any(not_builtin_conds): + return super().install_dependency(dependency) return None - - def uninstall_dependencies(self, pkg, silent=False): - assert isinstance(pkg, PackageItem) - manifest = self.load_manifest(pkg) - if not manifest.get("dependencies"): - return - if not silent: - self.print_message("Removing dependencies...", fg="yellow") - for dependency in manifest.get("dependencies"): - spec = PackageSpec( - owner=dependency.get("owner"), - name=dependency.get("name"), - requirements=dependency.get("version"), - ) - pkg = self.get_package(spec) - if not pkg: - continue - self._uninstall(pkg, silent=silent) diff --git a/platformio/package/manager/platform.py b/platformio/package/manager/platform.py index 0b438018..6d0cc040 100644 --- a/platformio/package/manager/platform.py +++ b/platformio/package/manager/platform.py @@ -29,7 +29,7 @@ from platformio.project.config import ProjectConfig class PlatformPackageManager(BasePackageManager): # pylint: disable=too-many-ancestors def __init__(self, package_dir=None): self.config = ProjectConfig.get_instance() - super(PlatformPackageManager, self).__init__( + super().__init__( PackageType.PLATFORM, package_dir or self.config.get("platformio", "platforms_dir"), ) @@ -38,94 +38,71 @@ class PlatformPackageManager(BasePackageManager): # pylint: disable=too-many-an def manifest_names(self): return PackageType.get_manifest_map()[PackageType.PLATFORM] - def install( # pylint: disable=arguments-differ, too-many-arguments + def install( # pylint: disable=arguments-differ,too-many-arguments self, spec, - with_packages=None, - without_packages=None, - skip_default_package=False, - with_all_packages=False, - silent=False, + skip_dependencies=False, force=False, + project_env=None, + project_targets=None, ): - pkg = super(PlatformPackageManager, self).install( - spec, silent=silent, force=force, skip_dependencies=True - ) + already_installed = self.get_package(spec) + pkg = super().install(spec, force=force, skip_dependencies=True) try: p = PlatformFactory.new(pkg) + # set logging level for underlying tool manager + p.pm.set_log_level(self.log.getEffectiveLevel()) p.ensure_engine_compatible() except IncompatiblePlatform as e: - super(PlatformPackageManager, self).uninstall( - pkg, silent=silent, skip_dependencies=True - ) + super().uninstall(pkg, skip_dependencies=True) raise e - - if with_all_packages: - with_packages = list(p.packages) - - p.install_packages( - with_packages, - without_packages, - skip_default_package, - silent=silent, - force=force, - ) - p.install_python_packages() - p.on_installed() + if project_env: + p.configure_project_packages(project_env, project_targets) + if not skip_dependencies: + p.install_required_packages(force=force) + if not already_installed: + p.on_installed() return pkg - def uninstall(self, spec, silent=False, skip_dependencies=False): + def uninstall( # pylint: disable=arguments-differ + self, spec, skip_dependencies=False, project_env=None + ): pkg = self.get_package(spec) if not pkg or not pkg.metadata: raise UnknownPackageError(spec) p = PlatformFactory.new(pkg) - assert super(PlatformPackageManager, self).uninstall( - pkg, silent=silent, skip_dependencies=True - ) + # set logging level for underlying tool manager + p.pm.set_log_level(self.log.getEffectiveLevel()) + if project_env: + p.configure_project_packages(project_env) if not skip_dependencies: - p.uninstall_python_packages() - p.on_uninstalled() + p.uninstall_packages() + assert super().uninstall(pkg, skip_dependencies=True) + p.on_uninstalled() return pkg - def update( # pylint: disable=arguments-differ, too-many-arguments + def update( # pylint: disable=arguments-differ self, from_spec, to_spec=None, - only_check=False, - silent=False, - show_incompatible=True, - only_packages=False, + skip_dependencies=False, + project_env=None, ): pkg = self.get_package(from_spec) if not pkg or not pkg.metadata: raise UnknownPackageError(from_spec) + pkg = super().update( + from_spec, + to_spec, + ) p = PlatformFactory.new(pkg) - pkgs_before = [item.metadata.name for item in p.get_installed_packages()] - - new_pkg = None - missed_pkgs = set() - if not only_packages: - new_pkg = super(PlatformPackageManager, self).update( - from_spec, - to_spec, - only_check=only_check, - silent=silent, - show_incompatible=show_incompatible, - ) - p = PlatformFactory.new(new_pkg) - missed_pkgs = set(pkgs_before) & set(p.packages) - missed_pkgs -= set( - item.metadata.name for item in p.get_installed_packages() - ) - - p.update_packages(only_check) - - if missed_pkgs: - p.install_packages( - with_packages=list(missed_pkgs), skip_default_package=True - ) - - return new_pkg or pkg + # set logging level for underlying tool manager + p.pm.set_log_level(self.log.getEffectiveLevel()) + if project_env: + p.configure_project_packages(project_env) + if not skip_dependencies: + p.update_packages() + return pkg @util.memoized(expire="5s") def get_installed_boards(self): @@ -180,13 +157,13 @@ def remove_unnecessary_platform_packages(dry_run=False): core_packages = get_installed_core_packages() for platform in PlatformPackageManager().get_installed(): p = PlatformFactory.new(platform) - for pkg in p.get_installed_packages(with_optional=True): + for pkg in p.get_installed_packages(with_optional_versions=True): required.add(pkg) pm = ToolPackageManager() for pkg in pm.get_installed(): skip_conds = [ - pkg.metadata.spec.url, + pkg.metadata.spec.uri, os.path.isfile(os.path.join(pkg.path, ".piokeep")), pkg in required, pkg in core_packages, diff --git a/platformio/package/manager/tool.py b/platformio/package/manager/tool.py index 70a76377..0919a33e 100644 --- a/platformio/package/manager/tool.py +++ b/platformio/package/manager/tool.py @@ -19,9 +19,11 @@ from platformio.project.config import ProjectConfig class ToolPackageManager(BasePackageManager): # pylint: disable=too-many-ancestors def __init__(self, package_dir=None): - if not package_dir: - package_dir = ProjectConfig.get_instance().get("platformio", "packages_dir") - super(ToolPackageManager, self).__init__(PackageType.TOOL, package_dir) + super().__init__( + PackageType.TOOL, + package_dir + or ProjectConfig.get_instance().get("platformio", "packages_dir"), + ) @property def manifest_names(self): diff --git a/platformio/package/manifest/parser.py b/platformio/package/manifest/parser.py index 556339a7..a5e0f837 100644 --- a/platformio/package/manifest/parser.py +++ b/platformio/package/manifest/parser.py @@ -18,6 +18,7 @@ import json import os import re import tarfile +from urllib.parse import urlparse from platformio import util from platformio.clients.http import fetch_remote_content @@ -25,11 +26,6 @@ from platformio.compat import get_object_members, string_types from platformio.package.exception import ManifestParserError, UnknownManifestError from platformio.project.helpers import is_platformio_project -try: - from urllib.parse import urlparse -except ImportError: - from urlparse import urlparse - class ManifestFileType(object): PLATFORM_JSON = "platform.json" diff --git a/platformio/package/manifest/schema.py b/platformio/package/manifest/schema.py index 78bf43a7..c8f69e0c 100644 --- a/platformio/package/manifest/schema.py +++ b/platformio/package/manifest/schema.py @@ -15,6 +15,7 @@ # pylint: disable=too-many-ancestors import json +import re import marshmallow import requests @@ -25,37 +26,21 @@ from platformio.clients.http import fetch_remote_content from platformio.package.exception import ManifestValidationError from platformio.util import memoized -MARSHMALLOW_2 = marshmallow.__version_info__ < (3,) +class BaseSchema(Schema): + class Meta(object): # pylint: disable=no-init + unknown = marshmallow.EXCLUDE # pylint: disable=no-member -if MARSHMALLOW_2: - - class CompatSchema(Schema): - pass - -else: - - class CompatSchema(Schema): - class Meta(object): # pylint: disable=no-init - unknown = marshmallow.EXCLUDE # pylint: disable=no-member - - def handle_error(self, error, data, **_): # pylint: disable=arguments-differ - raise ManifestValidationError( - error.messages, - data, - error.valid_data if hasattr(error, "valid_data") else error.data, - ) - - -class BaseSchema(CompatSchema): def load_manifest(self, data): - if MARSHMALLOW_2: - data, errors = self.load(data) - if errors: - raise ManifestValidationError(errors, data, data) - return data return self.load(data) + def handle_error(self, error, data, **_): # pylint: disable=arguments-differ + raise ManifestValidationError( + error.messages, + data, + error.valid_data if hasattr(error, "valid_data") else error.data, + ) + class StrictSchema(BaseSchema): def handle_error(self, error, data, **_): # pylint: disable=arguments-differ @@ -66,8 +51,6 @@ class StrictSchema(BaseSchema): ] else: error.valid_data = None - if MARSHMALLOW_2: - error.data = error.valid_data raise error @@ -76,9 +59,7 @@ class StrictListField(fields.List): self, value, attr, data, **kwargs ): try: - return super(StrictListField, self)._deserialize( - value, attr, data, **kwargs - ) + return super()._deserialize(value, attr, data, **kwargs) except ValidationError as exc: if exc.data: exc.data = [item for item in exc.data if item is not None] @@ -151,6 +132,21 @@ class ExampleSchema(StrictSchema): files = StrictListField(fields.Str, required=True) +# Fields + + +class ScriptField(fields.Field): + def _deserialize(self, value, attr, data, **kwargs): + if isinstance(value, (str, list)): + return value + raise ValidationError( + "Script value must be a command (string) or list of arguments" + ) + + +# Scheme + + class ManifestSchema(BaseSchema): # Required fields name = fields.Str( @@ -172,6 +168,10 @@ class ManifestSchema(BaseSchema): license = fields.Str(validate=validate.Length(min=1, max=255)) repository = fields.Nested(RepositorySchema) dependencies = fields.Nested(DependencySchema, many=True) + scripts = fields.Dict( + keys=fields.Str(validate=validate.OneOf(["postinstall", "preuninstall"])), + values=ScriptField(), + ) # library.json export = fields.Nested(ExportSchema) @@ -236,6 +236,12 @@ class ManifestSchema(BaseSchema): try: value = str(value) assert "." in value + # check leading zeros + try: + semantic_version.Version(value) + except ValueError as exc: + if "Invalid leading zero" in str(exc): + raise exc semantic_version.Version.coerce(value) except (AssertionError, ValueError): raise ValidationError( @@ -248,9 +254,18 @@ class ManifestSchema(BaseSchema): spdx = self.load_spdx_licenses() except requests.exceptions.RequestException: raise ValidationError("Could not load SPDX licenses for validation") - for item in spdx.get("licenses", []): - if item.get("licenseId") == value: - return True + known_ids = set(item.get("licenseId") for item in spdx.get("licenses", [])) + if value in known_ids: + return True + # parse license expression + # https://spdx.github.io/spdx-spec/SPDX-license-expressions/ + package_ids = [ + item.strip() + for item in re.sub(r"(\s+(?:OR|AND|WITH)\s+|[\(\)])", " ", value).split(" ") + if item.strip() + ] + if known_ids >= set(package_ids): + return True raise ValidationError( "Invalid SPDX license identifier. See valid identifiers at " "https://spdx.org/licenses/" @@ -259,7 +274,7 @@ class ManifestSchema(BaseSchema): @staticmethod @memoized(expire="1h") def load_spdx_licenses(): - version = "3.16" + version = "3.17" spdx_data_url = ( "https://raw.githubusercontent.com/spdx/license-list-data/" "v%s/json/licenses.json" % version diff --git a/platformio/package/meta.py b/platformio/package/meta.py index 309c5fd8..20199527 100644 --- a/platformio/package/meta.py +++ b/platformio/package/meta.py @@ -17,18 +17,15 @@ import os import re import tarfile from binascii import crc32 +from urllib.parse import urlparse import semantic_version +from platformio import fs from platformio.compat import get_object_members, hashlib_encode_data, string_types from platformio.package.manifest.parser import ManifestFileType from platformio.package.version import cast_version_to_semver -try: - from urllib.parse import urlparse -except ImportError: - from urlparse import urlparse - class PackageType(object): LIBRARY = "library" @@ -67,6 +64,10 @@ class PackageType(object): class PackageOutdatedResult(object): + UPDATE_INCREMENT_MAJOR = "major" + UPDATE_INCREMENT_MINOR = "minor" + UPDATE_INCREMENT_PATCH = "patch" + def __init__(self, current, latest=None, wanted=None, detached=False): self.current = current self.latest = latest @@ -91,7 +92,25 @@ class PackageOutdatedResult(object): and not isinstance(value, semantic_version.Version) ): value = cast_version_to_semver(str(value)) - return super(PackageOutdatedResult, self).__setattr__(name, value) + return super().__setattr__(name, value) + + @property + def update_increment_type(self): + if not self.current or not self.latest: + return None + patch_conds = [ + self.current.major == self.latest.major, + self.current.minor == self.latest.minor, + ] + if all(patch_conds): + return self.UPDATE_INCREMENT_PATCH + minor_conds = [ + self.current.major == self.latest.major, + self.current.major > 0, + ] + if all(minor_conds): + return self.UPDATE_INCREMENT_MINOR + return self.UPDATE_INCREMENT_MAJOR def is_outdated(self, allow_incompatible=False): if self.detached or not self.latest or self.current == self.latest: @@ -105,19 +124,19 @@ class PackageOutdatedResult(object): class PackageSpec(object): # pylint: disable=too-many-instance-attributes def __init__( # pylint: disable=redefined-builtin,too-many-arguments - self, raw=None, owner=None, id=None, name=None, requirements=None, url=None + self, raw=None, owner=None, id=None, name=None, requirements=None, uri=None ): self._requirements = None self.owner = owner self.id = id self.name = name - self.url = url + self.uri = uri self.raw = raw if requirements: try: self.requirements = requirements except ValueError as exc: - if not self.name or self.url or self.raw: + if not self.name or self.uri or self.raw: raise exc self.raw = "%s=%s" % (self.name, requirements) self._name_is_custom = False @@ -130,7 +149,7 @@ class PackageSpec(object): # pylint: disable=too-many-instance-attributes self.id == other.id, self.name == other.name, self.requirements == other.requirements, - self.url == other.url, + self.uri == other.uri, ] ) @@ -138,19 +157,23 @@ class PackageSpec(object): # pylint: disable=too-many-instance-attributes return crc32( hashlib_encode_data( "%s-%s-%s-%s-%s" - % (self.owner, self.id, self.name, self.requirements, self.url) + % (self.owner, self.id, self.name, self.requirements, self.uri) ) ) def __repr__(self): return ( "PackageSpec ".format(**self.as_dict()) + "requirements={requirements} uri={uri}>".format(**self.as_dict()) ) @property def external(self): - return bool(self.url) + return bool(self.uri) + + @property + def symlink(self): + return self.uri and self.uri.startswith("symlink://") @property def requirements(self): @@ -169,8 +192,8 @@ class PackageSpec(object): # pylint: disable=too-many-instance-attributes def humanize(self): result = "" - if self.url: - result = self.url + if self.uri: + result = self.uri elif self.name: if self.owner: result = self.owner + "/" @@ -190,12 +213,12 @@ class PackageSpec(object): # pylint: disable=too-many-instance-attributes id=self.id, name=self.name, requirements=str(self.requirements) if self.requirements else None, - url=self.url, + uri=self.uri, ) def as_dependency(self): - if self.url: - return self.raw or self.url + if self.uri: + return self.raw or self.uri result = "" if self.name: result = "%s/%s" % (self.owner, self.name) if self.owner else self.name @@ -219,30 +242,32 @@ class PackageSpec(object): # pylint: disable=too-many-instance-attributes self._parse_custom_name, self._parse_id, self._parse_owner, - self._parse_url, + self._parse_uri, ) for parser in parsers: if raw is None: break raw = parser(raw) - # if name is not custom, parse it from URL - if not self.name and self.url: - self.name = self._parse_name_from_url(self.url) + # if name is not custom, parse it from URI + if not self.name and self.uri: + self.name = self._parse_name_from_uri(self.uri) elif raw: # the leftover is a package name self.name = raw @staticmethod def _parse_local_file(raw): - if raw.startswith("file://") or not any(c in raw for c in ("/", "\\")): + if raw.startswith(("file://", "symlink://")) or not any( + c in raw for c in ("/", "\\") + ): return raw if os.path.exists(raw): return "file://%s" % raw return raw def _parse_requirements(self, raw): - if "@" not in raw or raw.startswith("file://"): + if "@" not in raw or raw.startswith(("file://", "symlink://")): return raw tokens = raw.rsplit("@", 1) if any(s in tokens[1] for s in (":", "/")): @@ -276,14 +301,18 @@ class PackageSpec(object): # pylint: disable=too-many-instance-attributes self.name = tokens[1].strip() return None - def _parse_url(self, raw): + def _parse_uri(self, raw): if not any(s in raw for s in ("@", ":", "/")): return raw - self.url = raw.strip() - parts = urlparse(self.url) + self.uri = raw.strip() + parts = urlparse(self.uri) - # if local file or valid URL with scheme vcs+protocol:// - if parts.scheme == "file" or "+" in parts.scheme or self.url.startswith("git+"): + # if local file or valid URI with scheme vcs+protocol:// + if ( + parts.scheme in ("file", "symlink://") + or "+" in parts.scheme + or self.uri.startswith("git+") + ): return None # parse VCS @@ -301,29 +330,29 @@ class PackageSpec(object): # pylint: disable=too-many-instance-attributes in ("mbed.com", "os.mbed.com", "developer.mbed.org") ] if any(git_conditions): - self.url = "git+" + self.url + self.uri = "git+" + self.uri elif any(hg_conditions): - self.url = "hg+" + self.url + self.uri = "hg+" + self.uri return None @staticmethod - def _parse_name_from_url(url): - if url.endswith("/"): - url = url[:-1] + def _parse_name_from_uri(uri): + if uri.endswith("/"): + uri = uri[:-1] stop_chars = ["#", "?"] - if url.startswith("file://"): + if uri.startswith(("file://", "symlink://")): stop_chars.append("@") # detached path for c in stop_chars: - if c in url: - url = url[: url.index(c)] + if c in uri: + uri = uri[: uri.index(c)] # parse real repository name from Github - parts = urlparse(url) + parts = urlparse(uri) if parts.netloc == "github.com" and parts.path.count("/") > 2: return parts.path.split("/")[2] - name = os.path.basename(url) + name = os.path.basename(uri) if "." in name: return name.split(".", 1)[0].strip() return name @@ -387,11 +416,14 @@ class PackageMetaData(object): @staticmethod def load(path): - with open(path, encoding="utf8") as fp: - data = json.load(fp) - if data["spec"]: - data["spec"] = PackageSpec(**data["spec"]) - return PackageMetaData(**data) + data = fs.load_json(path) + if data["spec"]: + # legacy support for Core<5.3 packages + if "url" in data["spec"]: + data["spec"]["uri"] = data["spec"]["url"] + del data["spec"]["url"] + data["spec"] = PackageSpec(**data["spec"]) + return PackageMetaData(**data) class PackageItem(object): @@ -410,9 +442,13 @@ class PackageItem(object): ) def __eq__(self, other): - if not self.path or not other.path: - return self.path == other.path - return os.path.realpath(self.path) == os.path.realpath(other.path) + conds = [ + os.path.realpath(self.path) == os.path.realpath(other.path) + if self.path and other.path + else self.path == other.path, + self.metadata == other.metadata, + ] + return all(conds) def __hash__(self): return hash(os.path.realpath(self.path)) diff --git a/platformio/package/pack.py b/platformio/package/pack.py index 84d835ed..ed9a9a21 100644 --- a/platformio/package/pack.py +++ b/platformio/package/pack.py @@ -33,7 +33,12 @@ from platformio.package.unpack import FileUnpacker class PackagePacker(object): - INCLUDE_DEFAULT = ManifestFileType.items().values() + INCLUDE_DEFAULT = list(ManifestFileType.items().values()) + [ + "README", + "README.md", + "README.rst", + "LICENSE", + ] EXCLUDE_DEFAULT = [ # PlatformIO internal files PackageItem.METAFILE_NAME, @@ -83,6 +88,7 @@ class PackagePacker(object): "**/*.[jJ][pP][eE][gG]", "**/*.[pP][nN][gG]", "**/*.[gG][iI][fF]", + "**/*.[sS][vV][gG]", "**/*.[zZ][iI][pP]", "**/*.[gG][zZ]", "**/*.3[gG][pP]", @@ -125,6 +131,20 @@ class PackagePacker(object): ), ) + @staticmethod + def load_gitignore_filters(path): + result = [] + with open(path, encoding="utf8") as fp: + for line in fp.readlines(): + line = line.strip() + if not line or line.startswith(("#")): + continue + if line.startswith("!"): + result.append(f"+<{line[1:]}>") + else: + result.append(f"-<{line}>") + return result + def pack(self, dst=None): tmp_dir = tempfile.mkdtemp() try: @@ -156,7 +176,7 @@ class PackagePacker(object): elif os.path.isdir(dst): dst = os.path.join(dst, filename) - return self._create_tarball(src, dst, manifest) + return self.create_tarball(src, dst, manifest) finally: shutil.rmtree(tmp_dir) @@ -183,7 +203,7 @@ class PackagePacker(object): return src - def _create_tarball(self, src, dst, manifest): + def create_tarball(self, src, dst, manifest): include = manifest.get("export", {}).get("include") exclude = manifest.get("export", {}).get("exclude") # remap root @@ -224,11 +244,15 @@ class PackagePacker(object): result += ["-<%s>" % p for p in self.EXCLUDE_DEFAULT] # exclude items declared in manifest result += ["-<%s>" % p for p in exclude or []] + # apply extra excludes if no custom "export" field in manifest if (not include and not exclude) or isinstance( self.manifest_parser, LibraryPropertiesManifestParser ): result += ["-<%s>" % p for p in exclude_extra] - # automatically include manifests + if os.path.exists(os.path.join(src, ".gitignore")): + result += self.load_gitignore_filters(os.path.join(src, ".gitignore")) + + # always include manifests and relevant files result += ["+<%s>" % p for p in self.INCLUDE_DEFAULT] return result diff --git a/platformio/package/unpack.py b/platformio/package/unpack.py index f9e68ff8..d8544a25 100644 --- a/platformio/package/unpack.py +++ b/platformio/package/unpack.py @@ -57,9 +57,7 @@ class BaseArchiver(object): class TARArchiver(BaseArchiver): def __init__(self, archpath): - super(TARArchiver, self).__init__( - tarfile_open(archpath) # pylint: disable=consider-using-with - ) + super().__init__(tarfile_open(archpath)) # pylint: disable=consider-using-with def get_items(self): return self._afo.getmembers() @@ -68,7 +66,7 @@ class TARArchiver(BaseArchiver): return item.name @staticmethod - def is_link(item): + def is_link(item): # pylint: disable=arguments-differ return item.islnk() or item.issym() @staticmethod @@ -90,7 +88,7 @@ class TARArchiver(BaseArchiver): self.is_link(item) and self.is_bad_link(item, dest_dir), ] if not any(bad_conds): - super(TARArchiver, self).extract_item(item, dest_dir) + super().extract_item(item, dest_dir) else: click.secho( "Blocked insecure item `%s` from TAR archive" % item.name, @@ -101,9 +99,7 @@ class TARArchiver(BaseArchiver): class ZIPArchiver(BaseArchiver): def __init__(self, archpath): - super(ZIPArchiver, self).__init__( - ZipFile(archpath) # pylint: disable=consider-using-with - ) + super().__init__(ZipFile(archpath)) # pylint: disable=consider-using-with @staticmethod def preserve_permissions(item, dest_dir): @@ -119,7 +115,7 @@ class ZIPArchiver(BaseArchiver): ) @staticmethod - def is_link(_): + def is_link(_): # pylint: disable=arguments-differ return False def get_items(self): diff --git a/platformio/package/vcsclient.py b/platformio/package/vcsclient.py index adbcd6f5..ed7434b0 100644 --- a/platformio/package/vcsclient.py +++ b/platformio/package/vcsclient.py @@ -16,6 +16,7 @@ import os import re import subprocess import sys +from urllib.parse import urlparse from platformio import proc from platformio.package.exception import ( @@ -24,11 +25,6 @@ from platformio.package.exception import ( UserSideException, ) -try: - from urllib.parse import urlparse -except ImportError: - from urlparse import urlparse - class VCSBaseException(PackageException): pass @@ -51,7 +47,7 @@ class VCSClientFactory(object): if not type_: raise VCSBaseException("VCS: Unknown repository type %s" % remote_url) try: - obj = getattr(sys.modules[__name__], "%sClient" % type_.title())( + obj = getattr(sys.modules[__name__], "%sClient" % type_.capitalize())( src_dir, remote_url, tag, silent ) assert isinstance(obj, VCSClientBase) @@ -135,7 +131,7 @@ class GitClient(VCSClientBase): def __init__(self, *args, **kwargs): self.configure() - super(GitClient, self).__init__(*args, **kwargs) + super().__init__(*args, **kwargs) @classmethod def configure(cls): @@ -217,12 +213,18 @@ class GitClient(VCSClientBase): return self.get_current_revision() branch = self.get_current_branch() if not branch: - return self.get_current_revision() - result = self.get_cmd_output(["ls-remote"]) + return None + + branch_ref = f"refs/heads/{branch}" + result = self.get_cmd_output(["ls-remote", self.remote_url, branch_ref]) + if not result: + return None + for line in result.split("\n"): - ref_pos = line.strip().find("refs/heads/" + branch) - if ref_pos > 0: - return line[:ref_pos].strip()[:7] + sha, ref = line.strip().split("\t") + if ref == branch_ref: + return sha[:7] + return None diff --git a/platformio/platform/_packages.py b/platformio/platform/_packages.py index 786f1efc..c741c790 100644 --- a/platformio/platform/_packages.py +++ b/platformio/platform/_packages.py @@ -12,7 +12,6 @@ # See the License for the specific language governing permissions and # limitations under the License. -from platformio.package.exception import UnknownPackageError from platformio.package.meta import PackageSpec @@ -37,11 +36,13 @@ class PlatformPackagesMixin(object): pkg = self.get_package(name) return str(pkg.metadata.version) if pkg else None - def get_installed_packages(self, with_optional=False): + def get_installed_packages(self, with_optional=True, with_optional_versions=False): result = [] - for name, options in self.packages.items(): + for name, options in dict(sorted(self.packages.items())).items(): + if not with_optional and options.get("optional"): + continue versions = [options.get("version")] - if with_optional: + if with_optional_versions: versions.extend(options.get("optionalVersions", [])) for version in versions: if not version: @@ -61,77 +62,26 @@ class PlatformPackagesMixin(object): continue item = {"name": pkg.metadata.name, "version": str(pkg.metadata.version)} if pkg.metadata.spec.external: - item["src_url"] = pkg.metadata.spec.url + item["src_url"] = pkg.metadata.spec.uri result.append(item) return result - def autoinstall_runtime_packages(self): + def install_package(self, name, spec=None, force=False): + return self.pm.install(spec or self.get_package_spec(name), force=force) + + def install_required_packages(self, force=False): for name, options in self.packages.items(): - if options.get("optional", False): + if options.get("optional"): continue - if self.get_package(name): - continue - self.pm.install(self.get_package_spec(name)) - return True + self.install_package(name, force=force) - def install_packages( # pylint: disable=too-many-arguments - self, - with_packages=None, - without_packages=None, - skip_default_package=False, - silent=False, - force=False, - ): - with_packages = set(self._find_pkg_names(with_packages or [])) - without_packages = set(self._find_pkg_names(without_packages or [])) - - upkgs = with_packages | without_packages - ppkgs = set(self.packages) - if not upkgs.issubset(ppkgs): - raise UnknownPackageError(", ".join(upkgs - ppkgs)) - - for name, options in self.packages.items(): - if name in without_packages: - continue - if name in with_packages or not ( - skip_default_package or options.get("optional", False) - ): - self.pm.install(self.get_package_spec(name), silent=silent, force=force) - - return True - - def _find_pkg_names(self, candidates): - result = [] - for candidate in candidates: - found = False - - # lookup by package types - for _name, _opts in self.packages.items(): - if _opts.get("type") == candidate: - result.append(_name) - found = True - - if ( - self.frameworks - and candidate.startswith("framework-") - and candidate[10:] in self.frameworks - ): - result.append(self.frameworks[candidate[10:]]["package"]) - found = True - - if not found: - result.append(candidate) - - return result - - def update_packages(self, only_check=False): + def uninstall_packages(self): for pkg in self.get_installed_packages(): - self.pm.update( - pkg, - to_spec=self.get_package_spec(pkg.metadata.name), - only_check=only_check, - show_incompatible=False, - ) + self.pm.uninstall(pkg) + + def update_packages(self): + for pkg in self.get_installed_packages(): + self.pm.update(pkg, to_spec=self.get_package_spec(pkg.metadata.name)) def are_outdated_packages(self): for pkg in self.get_installed_packages(): diff --git a/platformio/platform/_run.py b/platformio/platform/_run.py index b82475b7..5bac13ae 100644 --- a/platformio/platform/_run.py +++ b/platformio/platform/_run.py @@ -13,22 +13,19 @@ # limitations under the License. import base64 +import json import os import re import sys +from urllib.parse import quote import click from platformio import app, fs, proc, telemetry -from platformio.compat import hashlib_encode_data, is_bytes +from platformio.compat import hashlib_encode_data from platformio.package.manager.core import get_core_package_dir from platformio.platform.exception import BuildScriptNotFound -try: - from urllib.parse import quote -except ImportError: - from urllib import quote - class PlatformRunMixin(object): @@ -36,13 +33,16 @@ class PlatformRunMixin(object): @staticmethod def encode_scons_arg(value): - data = base64.urlsafe_b64encode(hashlib_encode_data(value)) - return data.decode() if is_bytes(data) else data + if isinstance(value, (list, tuple, dict)): + value = json.dumps(value) + return base64.urlsafe_b64encode(hashlib_encode_data(value)).decode() @staticmethod def decode_scons_arg(data): - value = base64.urlsafe_b64decode(data) - return value.decode() if is_bytes(value) else value + value = base64.urlsafe_b64decode(data).decode() + if value.startswith(("[", "{")): + value = json.loads(value) + return value def run( # pylint: disable=too-many-arguments self, variables, targets, silent, verbose, jobs @@ -51,15 +51,8 @@ class PlatformRunMixin(object): assert isinstance(targets, list) self.ensure_engine_compatible() - - options = self.config.items(env=variables["pioenv"], as_dict=True) - if "framework" in options: - # support PIO Core 3.0 dev/platforms - options["pioframework"] = options["framework"] - self.configure_default_packages(options, targets) - self.autoinstall_runtime_packages() - - self._report_non_sensitive_data(options, targets) + self.configure_project_packages(variables["pioenv"], targets) + self._report_non_sensitive_data(variables["pioenv"], targets) self.silent = silent self.verbose = verbose or app.get_setting("force_verbose") @@ -79,14 +72,14 @@ class PlatformRunMixin(object): return result - def _report_non_sensitive_data(self, options, targets): - topts = options.copy() - topts["platform_packages"] = [ + def _report_non_sensitive_data(self, env, targets): + options = self.config.items(env=env, as_dict=True) + options["platform_packages"] = [ dict(name=item["name"], version=item["version"]) for item in self.dump_used_packages() ] - topts["platform"] = {"name": self.name, "version": self.version} - telemetry.send_run_environment(topts, targets) + options["platform"] = {"name": self.name, "version": self.version} + telemetry.send_run_environment(options, targets) def _run_scons(self, variables, targets, jobs): scons_dir = get_core_package_dir("tool-scons") diff --git a/platformio/platform/base.py b/platformio/platform/base.py index d5ce103d..49db0df4 100644 --- a/platformio/platform/base.py +++ b/platformio/platform/base.py @@ -13,12 +13,10 @@ # limitations under the License. import os -import subprocess -import click import semantic_version -from platformio import __version__, fs, proc +from platformio import __version__, fs from platformio.package.manager.tool import ToolPackageManager from platformio.package.version import pepver_to_semver from platformio.platform._packages import PlatformPackagesMixin @@ -104,18 +102,16 @@ class PlatformBase( # pylint: disable=too-many-instance-attributes,too-many-pub packages[spec.name].update(**options) return packages - @property - def python_packages(self): - return self._manifest.get("pythonPackages") - def ensure_engine_compatible(self): if not self.engines or "platformio" not in self.engines: return True core_spec = semantic_version.SimpleSpec(self.engines["platformio"]) if self.CORE_SEMVER in core_spec: return True - # PIO Core 5 is compatible with dev-platforms for PIO Core 2.0, 3.0, 4.0 - if any(semantic_version.Version.coerce(str(v)) in core_spec for v in (2, 3, 4)): + # PIO Core 6 is compatible with dev-platforms for PIO Core 2.0, 3.0, 4.0 + if any( + semantic_version.Version.coerce(str(v)) in core_spec for v in (2, 3, 4, 5) + ): return True raise IncompatiblePlatform(self.name, str(self.CORE_SEMVER), str(core_spec)) @@ -178,10 +174,16 @@ class PlatformBase( # pylint: disable=too-many-instance-attributes,too-many-pub def get_package_type(self, name): return self.packages[name].get("type") - def configure_default_packages(self, options, targets): + def configure_project_packages(self, env, targets=None): + options = self.config.items(env=env, as_dict=True) + if "framework" in options: + # support PIO Core 3.0 dev/platforms + options["pioframework"] = options["framework"] # override user custom packages self._custom_packages = options.get("platform_packages") + self.configure_default_packages(options, targets or []) + def configure_default_packages(self, options, targets): # enable used frameworks for framework in options.get("framework", []): if not self.frameworks: @@ -232,37 +234,3 @@ class PlatformBase( # pylint: disable=too-many-instance-attributes,too-many-pub def on_uninstalled(self): pass - - def install_python_packages(self): - if not self.python_packages: - return None - click.echo( - "Installing Python packages: %s" - % ", ".join(list(self.python_packages.keys())), - ) - args = [proc.get_pythonexe_path(), "-m", "pip", "install", "--upgrade"] - for name, requirements in self.python_packages.items(): - if any(c in requirements for c in ("<", ">", "=")): - args.append("%s%s" % (name, requirements)) - else: - args.append("%s==%s" % (name, requirements)) - try: - return subprocess.call(args) == 0 - except Exception as e: # pylint: disable=broad-except - click.secho( - "Could not install Python packages -> %s" % e, fg="red", err=True - ) - return None - - def uninstall_python_packages(self): - if not self.python_packages: - return - click.echo("Uninstalling Python packages") - args = [proc.get_pythonexe_path(), "-m", "pip", "uninstall", "--yes"] - args.extend(list(self.python_packages.keys())) - try: - subprocess.call(args) == 0 - except Exception as e: # pylint: disable=broad-except - click.secho( - "Could not install Python packages -> %s" % e, fg="red", err=True - ) diff --git a/platformio/platform/board.py b/platformio/platform/board.py index 65940962..2e998ac0 100644 --- a/platformio/platform/board.py +++ b/platformio/platform/board.py @@ -15,6 +15,7 @@ import os from platformio import fs, telemetry, util +from platformio.compat import MISSING from platformio.debug.exception import DebugInvalidOptionsError, DebugSupportError from platformio.exception import UserSideException from platformio.platform.exception import InvalidBoardManifest @@ -34,14 +35,14 @@ class PlatformBoardConfig(object): "Please specify name, url and vendor fields for " + manifest_path ) - def get(self, path, default=None): + def get(self, path, default=MISSING): try: value = self._manifest for k in path.split("."): value = value[k] return value except KeyError: - if default is not None: + if default != MISSING: return default raise KeyError("Invalid board option '%s'" % path) diff --git a/platformio/platform/factory.py b/platformio/platform/factory.py index 1aff6709..2df9e1b4 100644 --- a/platformio/platform/factory.py +++ b/platformio/platform/factory.py @@ -26,7 +26,7 @@ class PlatformFactory(object): @staticmethod def get_clsname(name): name = re.sub(r"[^\da-z\_]+", "", name, flags=re.I) - return "%s%sPlatform" % (name.upper()[0], name.lower()[1:]) + return "%sPlatform" % name.lower().capitalize() @staticmethod def load_module(name, path): @@ -36,28 +36,32 @@ class PlatformFactory(object): raise UnknownPlatform(name) @classmethod - def new(cls, pkg_or_spec): + def new(cls, pkg_or_spec, autoinstall=False) -> PlatformBase: # pylint: disable=import-outside-toplevel + from platformio.package.manager.platform import PlatformPackageManager platform_dir = None platform_name = None if isinstance(pkg_or_spec, PackageItem): platform_dir = pkg_or_spec.path platform_name = pkg_or_spec.metadata.name - elif os.path.isdir(pkg_or_spec): + elif isinstance(pkg_or_spec, (str, bytes)) and os.path.isdir(pkg_or_spec): platform_dir = pkg_or_spec else: - from platformio.package.manager.platform import PlatformPackageManager - pkg = PlatformPackageManager().get_package(pkg_or_spec) - if not pkg: - raise UnknownPlatform(pkg_or_spec) - platform_dir = pkg.path - platform_name = pkg.metadata.name + if pkg: + platform_dir = pkg.path + platform_name = pkg.metadata.name if not platform_dir or not os.path.isfile( os.path.join(platform_dir, "platform.json") ): + if autoinstall: + return cls.new( + PlatformPackageManager().install( + pkg_or_spec, skip_dependencies=True + ) + ) raise UnknownPlatform(pkg_or_spec) if not platform_name: diff --git a/platformio/proc.py b/platformio/proc.py index f041d61c..83943273 100644 --- a/platformio/proc.py +++ b/platformio/proc.py @@ -62,7 +62,7 @@ class BuildAsyncPipe(AsyncPipeBase): def __init__(self, line_callback, data_callback): self.line_callback = line_callback self.data_callback = data_callback - super(BuildAsyncPipe, self).__init__() + super().__init__() def do_reading(self): line = "" @@ -95,7 +95,7 @@ class BuildAsyncPipe(AsyncPipeBase): class LineBufferedAsyncPipe(AsyncPipeBase): def __init__(self, line_callback): self.line_callback = line_callback - super(LineBufferedAsyncPipe, self).__init__() + super().__init__() def do_reading(self): for line in iter(self._pipe_reader.readline, ""): diff --git a/platformio/project/commands/__init__.py b/platformio/project/commands/__init__.py new file mode 100644 index 00000000..b0514903 --- /dev/null +++ b/platformio/project/commands/__init__.py @@ -0,0 +1,13 @@ +# Copyright (c) 2014-present PlatformIO +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. diff --git a/platformio/project/commands/config.py b/platformio/project/commands/config.py new file mode 100644 index 00000000..b59ff005 --- /dev/null +++ b/platformio/project/commands/config.py @@ -0,0 +1,57 @@ +# Copyright (c) 2014-present PlatformIO +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import os + +import click +from tabulate import tabulate + +from platformio import fs +from platformio.project.config import ProjectConfig +from platformio.project.exception import NotPlatformIOProjectError +from platformio.project.helpers import is_platformio_project + + +@click.command("config", short_help="Show computed configuration") +@click.option( + "-d", + "--project-dir", + default=os.getcwd, + type=click.Path(exists=True, file_okay=False, dir_okay=True, resolve_path=True), +) +@click.option("--json-output", is_flag=True) +def project_config_cmd(project_dir, json_output): + if not is_platformio_project(project_dir): + raise NotPlatformIOProjectError(project_dir) + with fs.cd(project_dir): + config = ProjectConfig.get_instance() + if json_output: + return click.echo(config.to_json()) + click.echo( + "Computed project configuration for %s" % click.style(project_dir, fg="cyan") + ) + for section, options in config.as_tuple(): + click.secho(section, fg="cyan") + click.echo("-" * len(section)) + click.echo( + tabulate( + [ + (name, "=", "\n".join(value) if isinstance(value, list) else value) + for name, value in options + ], + tablefmt="plain", + ) + ) + click.echo() + return None diff --git a/platformio/project/commands/init.py b/platformio/project/commands/init.py new file mode 100644 index 00000000..7c3b77cb --- /dev/null +++ b/platformio/project/commands/init.py @@ -0,0 +1,355 @@ +# Copyright (c) 2014-present PlatformIO +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# pylint: disable=line-too-long,too-many-arguments,too-many-locals + + +import json +import os + +import click + +from platformio import fs +from platformio.package.commands.install import install_project_dependencies +from platformio.package.manager.platform import PlatformPackageManager +from platformio.platform.exception import UnknownBoard +from platformio.project.config import ProjectConfig +from platformio.project.generator import ProjectGenerator +from platformio.project.helpers import is_platformio_project + + +def validate_boards(ctx, param, value): # pylint: disable=W0613 + pm = PlatformPackageManager() + for id_ in value: + try: + pm.board_config(id_) + except UnknownBoard: + raise click.BadParameter( + "`%s`. Please search for board ID using `platformio boards` " + "command" % id_ + ) + return value + + +@click.command("init", short_help="Initialize a project or update existing") +@click.option( + "--project-dir", + "-d", + default=os.getcwd, + type=click.Path( + exists=True, file_okay=False, dir_okay=True, writable=True, resolve_path=True + ), +) +@click.option("-b", "--board", multiple=True, metavar="ID", callback=validate_boards) +@click.option("--ide", type=click.Choice(ProjectGenerator.get_supported_ides())) +@click.option("-e", "--environment", help="Update existing environment") +@click.option("-O", "--project-option", multiple=True) +@click.option("--env-prefix", default="") +@click.option("--no-install-dependencies", is_flag=True) +@click.option("-s", "--silent", is_flag=True) +def project_init_cmd( + project_dir, + board, + ide, + environment, + project_option, + env_prefix, + no_install_dependencies, + silent, +): + is_new_project = not is_platformio_project(project_dir) + if is_new_project: + if not silent: + print_header(project_dir) + init_base_project(project_dir) + + if environment: + update_project_env(project_dir, environment, project_option) + elif board: + update_board_envs(project_dir, board, project_option, env_prefix) + + # resolve project dependencies + if not no_install_dependencies and (environment or board): + install_project_dependencies( + options=dict( + project_dir=project_dir, + environments=[environment] if environment else [], + silent=silent, + ) + ) + + if ide: + if not silent: + click.echo( + "Updating metadata for the %s IDE..." % click.style(ide, fg="cyan") + ) + with fs.cd(project_dir): + config = ProjectConfig.get_instance( + os.path.join(project_dir, "platformio.ini") + ) + config.validate() + ProjectGenerator(config, environment, ide, board).generate() + + if is_new_project: + init_cvs_ignore(project_dir) + + if not silent: + print_footer(is_new_project) + + +def print_header(project_dir): + if project_dir == os.getcwd(): + click.secho("\nThe current working directory ", fg="yellow", nl=False) + try: + click.secho(project_dir, fg="cyan", nl=False) + except UnicodeEncodeError: + click.secho(json.dumps(project_dir), fg="cyan", nl=False) + click.secho(" will be used for the project.", fg="yellow") + click.echo("") + + click.echo("The next files/directories have been created in ", nl=False) + try: + click.secho(project_dir, fg="cyan") + except UnicodeEncodeError: + click.secho(json.dumps(project_dir), fg="cyan") + click.echo("%s - Put project header files here" % click.style("include", fg="cyan")) + click.echo( + "%s - Put here project specific (private) libraries" + % click.style("lib", fg="cyan") + ) + click.echo("%s - Put project source files here" % click.style("src", fg="cyan")) + click.echo( + "%s - Project Configuration File" % click.style("platformio.ini", fg="cyan") + ) + + +def print_footer(is_new_project): + if is_new_project: + return click.secho( + "\nProject has been successfully initialized! Useful commands:\n" + "`pio run` - process/build project from the current directory\n" + "`pio run --target upload` or `pio run -t upload` " + "- upload firmware to a target\n" + "`pio run --target clean` - clean project (remove compiled files)" + "\n`pio run --help` - additional information", + fg="green", + ) + return click.secho( + "Project has been successfully updated!", + fg="green", + ) + + +def init_base_project(project_dir): + with fs.cd(project_dir): + config = ProjectConfig() + config.save() + dir_to_readme = [ + (config.get("platformio", "src_dir"), None), + (config.get("platformio", "include_dir"), init_include_readme), + (config.get("platformio", "lib_dir"), init_lib_readme), + (config.get("platformio", "test_dir"), init_test_readme), + ] + for (path, cb) in dir_to_readme: + if os.path.isdir(path): + continue + os.makedirs(path) + if cb: + cb(path) + + +def init_include_readme(include_dir): + with open(os.path.join(include_dir, "README"), mode="w", encoding="utf8") as fp: + fp.write( + """ +This directory is intended for project header files. + +A header file is a file containing C declarations and macro definitions +to be shared between several project source files. You request the use of a +header file in your project source file (C, C++, etc) located in `src` folder +by including it, with the C preprocessing directive `#include'. + +```src/main.c + +#include "header.h" + +int main (void) +{ + ... +} +``` + +Including a header file produces the same results as copying the header file +into each source file that needs it. Such copying would be time-consuming +and error-prone. With a header file, the related declarations appear +in only one place. If they need to be changed, they can be changed in one +place, and programs that include the header file will automatically use the +new version when next recompiled. The header file eliminates the labor of +finding and changing all the copies as well as the risk that a failure to +find one copy will result in inconsistencies within a program. + +In C, the usual convention is to give header files names that end with `.h'. +It is most portable to use only letters, digits, dashes, and underscores in +header file names, and at most one dot. + +Read more about using header files in official GCC documentation: + +* Include Syntax +* Include Operation +* Once-Only Headers +* Computed Includes + +https://gcc.gnu.org/onlinedocs/cpp/Header-Files.html +""", + ) + + +def init_lib_readme(lib_dir): + with open(os.path.join(lib_dir, "README"), mode="w", encoding="utf8") as fp: + fp.write( + """ +This directory is intended for project specific (private) libraries. +PlatformIO will compile them to static libraries and link into executable file. + +The source code of each library should be placed in a an own separate directory +("lib/your_library_name/[here are source files]"). + +For example, see a structure of the following two libraries `Foo` and `Bar`: + +|--lib +| | +| |--Bar +| | |--docs +| | |--examples +| | |--src +| | |- Bar.c +| | |- Bar.h +| | |- library.json (optional, custom build options, etc) https://docs.platformio.org/page/librarymanager/config.html +| | +| |--Foo +| | |- Foo.c +| | |- Foo.h +| | +| |- README --> THIS FILE +| +|- platformio.ini +|--src + |- main.c + +and a contents of `src/main.c`: +``` +#include +#include + +int main (void) +{ + ... +} + +``` + +PlatformIO Library Dependency Finder will find automatically dependent +libraries scanning project source files. + +More information about PlatformIO Library Dependency Finder +- https://docs.platformio.org/page/librarymanager/ldf.html +""", + ) + + +def init_test_readme(test_dir): + with open(os.path.join(test_dir, "README"), mode="w", encoding="utf8") as fp: + fp.write( + """ +This directory is intended for PlatformIO Test Runner and project tests. + +Unit Testing is a software testing method by which individual units of +source code, sets of one or more MCU program modules together with associated +control data, usage procedures, and operating procedures, are tested to +determine whether they are fit for use. Unit testing finds problems early +in the development cycle. + +More information about PlatformIO Unit Testing: +- https://docs.platformio.org/en/latest/advanced/unit-testing/index.html +""", + ) + + +def init_cvs_ignore(project_dir): + conf_path = os.path.join(project_dir, ".gitignore") + if os.path.isfile(conf_path): + return + with open(conf_path, mode="w", encoding="utf8") as fp: + fp.write(".pio\n") + + +def update_board_envs(project_dir, board_ids, project_option, env_prefix): + config = ProjectConfig( + os.path.join(project_dir, "platformio.ini"), parse_extra=False + ) + used_boards = [] + for section in config.sections(): + cond = [section.startswith("env:"), config.has_option(section, "board")] + if all(cond): + used_boards.append(config.get(section, "board")) + + pm = PlatformPackageManager() + modified = False + for id_ in board_ids: + board_config = pm.board_config(id_) + if id_ in used_boards: + continue + used_boards.append(id_) + modified = True + + envopts = {"platform": board_config["platform"], "board": id_} + # find default framework for board + frameworks = board_config.get("frameworks") + if frameworks: + envopts["framework"] = frameworks[0] + + for item in project_option: + if "=" not in item: + continue + _name, _value = item.split("=", 1) + envopts[_name.strip()] = _value.strip() + + section = "env:%s%s" % (env_prefix, id_) + config.add_section(section) + + for option, value in envopts.items(): + config.set(section, option, value) + + if modified: + config.save() + + +def update_project_env(project_dir, environment, project_option): + if not project_option: + return + config = ProjectConfig( + os.path.join(project_dir, "platformio.ini"), parse_extra=False + ) + + section = "env:%s" % environment + if not config.has_section(section): + config.add_section(section) + + for item in project_option: + if "=" not in item: + continue + _name, _value = item.split("=", 1) + config.set(section, _name.strip(), _value.strip()) + + config.save() diff --git a/platformio/project/commands/metadata.py b/platformio/project/commands/metadata.py new file mode 100644 index 00000000..06457574 --- /dev/null +++ b/platformio/project/commands/metadata.py @@ -0,0 +1,80 @@ +# Copyright (c) 2014-present PlatformIO +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import json +import os + +import click +from tabulate import tabulate + +from platformio import fs +from platformio.package.commands.install import install_project_dependencies +from platformio.project.config import ProjectConfig +from platformio.project.helpers import load_build_metadata + + +@click.command( + "metadata", short_help="Dump metadata intended for IDE extensions/plugins" +) +@click.option( + "-d", + "--project-dir", + default=os.getcwd, + type=click.Path(exists=True, file_okay=False, dir_okay=True, resolve_path=True), +) +@click.option("-e", "--environment", "environments", multiple=True) +@click.option("--json-output", is_flag=True) +@click.option("--json-output-path", type=click.Path(resolve_path=True)) +def project_metadata_cmd(project_dir, environments, json_output, json_output_path): + with fs.cd(project_dir): + config = ProjectConfig.get_instance() + config.validate(environments) + environments = list(environments or config.envs()) + build_metadata = load_build_metadata(project_dir, environments) + + if not json_output: + install_project_dependencies( + options=dict( + project_dir=project_dir, + environments=environments, + ) + ) + click.echo() + + if json_output or json_output_path: + if json_output_path: + if os.path.isdir(json_output_path): + json_output_path = os.path.join(json_output_path, "metadata.json") + with open(json_output_path, mode="w", encoding="utf8") as fp: + json.dump(build_metadata, fp) + click.secho(f"Saved metadata to the {json_output_path}", fg="green") + if json_output: + click.echo(json.dumps(build_metadata)) + return + + for envname, metadata in build_metadata.items(): + click.echo("Environment: " + click.style(envname, fg="cyan", bold=True)) + click.echo("=" * (13 + len(envname))) + click.echo( + tabulate( + [ + (click.style(name, bold=True), "=", json.dumps(value, indent=2)) + for name, value in metadata.items() + ], + tablefmt="plain", + ) + ) + click.echo() + + return diff --git a/platformio/project/config.py b/platformio/project/config.py index 7f2efb03..9b7f43cf 100644 --- a/platformio/project/config.py +++ b/platformio/project/config.py @@ -12,6 +12,7 @@ # See the License for the specific language governing permissions and # limitations under the License. +import configparser import glob import json import os @@ -20,15 +21,10 @@ import re import click from platformio import fs -from platformio.compat import string_types +from platformio.compat import MISSING, string_types from platformio.project import exception from platformio.project.options import ProjectOptions -try: - import ConfigParser as ConfigParser -except ImportError: - import configparser as ConfigParser - CONFIG_HEADER = """ ; PlatformIO Project Configuration File ; @@ -42,9 +38,6 @@ CONFIG_HEADER = """ """ -MISSING = object() - - class ProjectConfigBase(object): INLINE_COMMENT_RE = re.compile(r"\s+;.*$") @@ -87,7 +80,7 @@ class ProjectConfigBase(object): self.expand_interpolations = expand_interpolations self.warnings = [] self._parsed = [] - self._parser = ConfigParser.ConfigParser(inline_comment_prefixes=("#", ";")) + self._parser = configparser.ConfigParser(inline_comment_prefixes=("#", ";")) if path and os.path.isfile(path): self.read(path, parse_extra) @@ -102,7 +95,7 @@ class ProjectConfigBase(object): self._parsed.append(path) try: self._parser.read(path, "utf-8") - except ConfigParser.Error as e: + except configparser.Error as e: raise exception.InvalidProjectConfError(path, str(e)) if not parse_extra: @@ -139,7 +132,7 @@ class ProjectConfigBase(object): renamed_options.update({name: option.name for name in option.oldnames}) for section in self._parser.sections(): - scope = section.split(":", 1)[0] + scope = self.get_section_scope(section) if scope not in ("platformio", "env"): continue for option in self._parser.options(section): @@ -171,6 +164,10 @@ class ProjectConfigBase(object): ) return True + @staticmethod + def get_section_scope(section): + return section.split(":", 1)[0] if ":" in section else section + def walk_options(self, root_section): extends_queue = ( ["env", root_section] if root_section.startswith("env:") else [root_section] @@ -202,7 +199,7 @@ class ProjectConfigBase(object): result.append(option) # handle system environment variables - scope = section.split(":", 1)[0] + scope = self.get_section_scope(section) for option_meta in ProjectOptions.values(): if option_meta.scope != scope or option_meta.name in result: continue @@ -240,9 +237,29 @@ class ProjectConfigBase(object): value = "\n" + value self._parser.set(section, option, value) - def getraw( # pylint: disable=too-many-branches - self, section, option, default=MISSING - ): + def getraw(self, section, option, default=MISSING): + try: + return self._getraw(section, option, default) + except configparser.NoOptionError as exc: + renamed_option = self._resolve_renamed_option(section, option) + if renamed_option: + return self._getraw(section, renamed_option, default) + raise exc + + def _resolve_renamed_option(self, section, old_name): + scope = self.get_section_scope(section) + if scope not in ("platformio", "env"): + return None + for option_meta in ProjectOptions.values(): + if ( + option_meta.oldnames + and option_meta.scope == scope + and old_name in option_meta.oldnames + ): + return option_meta.name + return None + + def _getraw(self, section, option, default): # pylint: disable=too-many-branches if not self.expand_interpolations: return self._parser.get(section, option) @@ -252,13 +269,15 @@ class ProjectConfigBase(object): value = self._parser.get(sec, option) break - option_meta = ProjectOptions.get("%s.%s" % (section.split(":", 1)[0], option)) + option_meta = ProjectOptions.get( + "%s.%s" % (self.get_section_scope(section), option) + ) if not option_meta: if value == MISSING: value = ( default if default != MISSING else self._parser.get(section, option) ) - return self._expand_interpolations(value) + return self._expand_interpolations(section, value) if option_meta.sysenvvar: envvar_value = os.getenv(option_meta.sysenvvar) @@ -281,23 +300,33 @@ class ProjectConfigBase(object): if value == MISSING: return None - return self._expand_interpolations(value) + return self._expand_interpolations(section, value) - def _expand_interpolations(self, value): + def _expand_interpolations(self, parent_section, value): if ( not value or not isinstance(value, string_types) or not all(["${" in value, "}" in value]) ): return value - return self.VARTPL_RE.sub(self._re_interpolation_handler, value) + return self.VARTPL_RE.sub( + lambda match: self._re_interpolation_handler(parent_section, match), value + ) - def _re_interpolation_handler(self, match): + def _re_interpolation_handler(self, parent_section, match): section, option = match.group(1), match.group(2) + # handle system environment variables if section == "sysenv": return os.getenv(option) + # handle ${this.*} + if section == "this": + section = parent_section + if option == "__env__": + assert parent_section.startswith("env:") + return parent_section[4:] + # handle nested calls try: - value = self.getraw(section, option) + value = self.get(section, option) except RecursionError: raise exception.ProjectOptionValueError( "Infinite recursion has been detected", option, section @@ -310,10 +339,12 @@ class ProjectConfigBase(object): value = None try: value = self.getraw(section, option, default) - except ConfigParser.Error as e: + except configparser.Error as e: raise exception.InvalidProjectConfError(self.path, str(e)) - option_meta = ProjectOptions.get("%s.%s" % (section.split(":", 1)[0], option)) + option_meta = ProjectOptions.get( + "%s.%s" % (self.get_section_scope(section), option) + ) if not option_meta: return value @@ -345,9 +376,16 @@ class ProjectConfigBase(object): def default_envs(self): return self.get("platformio", "default_envs", []) + def get_default_env(self): + default_envs = self.default_envs() + if default_envs: + return default_envs[0] + envs = self.envs() + return envs[0] if envs else None + def validate(self, envs=None, silent=False): if not os.path.isfile(self.path): - raise exception.NotPlatformIOProjectError(self.path) + raise exception.NotPlatformIOProjectError(os.path.dirname(self.path)) # check envs known = set(self.envs()) if not known: @@ -398,7 +436,7 @@ class ProjectConfig(ProjectConfigBase, ProjectConfigDirsMixin): def update(self, data, clear=False): assert isinstance(data, list) if clear: - self._parser = ConfigParser.ConfigParser() + self._parser = configparser.ConfigParser() for section, options in data: if not self._parser.has_section(section): self._parser.add_section(section) diff --git a/platformio/project/generator.py b/platformio/project/generator.py index 31d3620b..10efba7d 100644 --- a/platformio/project/generator.py +++ b/platformio/project/generator.py @@ -20,7 +20,7 @@ import bottle from platformio import fs, util from platformio.proc import where_is_program -from platformio.project.helpers import load_project_ide_data +from platformio.project.helpers import load_build_metadata class ProjectGenerator(object): @@ -93,7 +93,7 @@ class ProjectGenerator(object): # default env configuration tpl_vars.update(self.config.items(env=self.env_name, as_dict=True)) # build data - tpl_vars.update(load_project_ide_data(self.project_dir, self.env_name) or {}) + tpl_vars.update(load_build_metadata(self.project_dir, self.env_name) or {}) with fs.cd(self.project_dir): tpl_vars.update( diff --git a/platformio/project/helpers.py b/platformio/project/helpers.py index 2aab20b7..bc8d324a 100644 --- a/platformio/project/helpers.py +++ b/platformio/project/helpers.py @@ -12,7 +12,6 @@ # See the License for the specific language governing permissions and # limitations under the License. -import json import os import subprocess from hashlib import sha1 @@ -120,7 +119,7 @@ def compute_project_checksum(config): return checksum.hexdigest() -def load_project_ide_data(project_dir, env_or_envs, cache=False): +def load_build_metadata(project_dir, env_or_envs, cache=False): assert env_or_envs env_names = env_or_envs if not isinstance(env_names, list): @@ -130,14 +129,18 @@ def load_project_ide_data(project_dir, env_or_envs, cache=False): result = _load_cached_project_ide_data(project_dir, env_names) if cache else {} missed_env_names = set(env_names) - set(result.keys()) if missed_env_names: - result.update(_load_project_ide_data(project_dir, missed_env_names)) + result.update(_load_build_metadata(project_dir, missed_env_names)) if not isinstance(env_or_envs, list) and env_or_envs in result: return result[env_or_envs] return result or None -def _load_project_ide_data(project_dir, env_names): +# Backward compatibiility with dev-platforms +load_project_ide_data = load_build_metadata + + +def _load_build_metadata(project_dir, env_names): # pylint: disable=import-outside-toplevel from platformio.commands.run.command import cli as cmd_run @@ -162,6 +165,5 @@ def _load_cached_project_ide_data(project_dir, env_names): for name in env_names: if not os.path.isfile(os.path.join(build_dir, name, "idedata.json")): continue - with open(os.path.join(build_dir, name, "idedata.json"), encoding="utf8") as fp: - result[name] = json.load(fp) + result[name] = fs.load_json(os.path.join(build_dir, name, "idedata.json")) return result diff --git a/platformio/project/options.py b/platformio/project/options.py index ee5fd603..02a8e580 100644 --- a/platformio/project/options.py +++ b/platformio/project/options.py @@ -403,7 +403,7 @@ ProjectOptions = OrderedDict( group="build", name="build_type", description="Project build configuration", - type=click.Choice(["release", "debug"]), + type=click.Choice(["release", "test", "debug"]), default="release", ), ConfigEnvOption( @@ -419,13 +419,14 @@ ProjectOptions = OrderedDict( ), ConfigEnvOption( group="build", - name="src_build_flags", + name="build_src_flags", + oldnames=["src_build_flags"], description=( "The same as `build_flags` but configures flags the only for " - "project source files (`src` folder)" + "project source files in the `src` folder" ), multiple=True, - sysenvvar="PLATFORMIO_SRC_BUILD_FLAGS", + sysenvvar="PLATFORMIO_BUILD_SRC_FLAGS", buildenvvar="SRC_BUILD_FLAGS", ), ConfigEnvOption( @@ -438,13 +439,14 @@ ProjectOptions = OrderedDict( ), ConfigEnvOption( group="build", - name="src_filter", + name="build_src_filter", + oldnames=["src_filter"], description=( - "Control which source files should be included/excluded from a " - "build process" + "Control which source files from the `src` folder should " + "be included/excluded from a build process" ), multiple=True, - sysenvvar="PLATFORMIO_SRC_FILTER", + sysenvvar="PLATFORMIO_BUILD_SRC_FILTER", buildenvvar="SRC_FILTER", default="+<*> -<.git/> -<.svn/>", ), @@ -646,6 +648,13 @@ ProjectOptions = OrderedDict( default=False, ), # Test + ConfigEnvOption( + group="test", + name="test_framework", + description="A unit testing framework", + type=click.Choice(["doctest", "googletest", "unity", "custom"]), + default="unity", + ), ConfigEnvOption( group="test", name="test_filter", @@ -668,19 +677,25 @@ ProjectOptions = OrderedDict( name="test_speed", description="A connection speed (baud rate) to communicate with a target device", type=click.INT, + default=115200, ), ConfigEnvOption( group="test", - name="test_transport", - description="A transport to communicate with a target device", - ), - ConfigEnvOption( - group="test", - name="test_build_project_src", - description="Build project source code in a pair with test code", + name="test_build_src", + oldnames=["test_build_project_src"], + description="Build main source code in pair with a test code", type=click.BOOL, default=False, ), + ConfigEnvOption( + group="test", + name="test_testing_command", + multiple=True, + description=( + "A custom testing command that runs test cases " + "and returns results to the standard output" + ), + ), # Debug ConfigEnvOption( group="debug", diff --git a/platformio/project/savedeps.py b/platformio/project/savedeps.py new file mode 100644 index 00000000..1c12bd3d --- /dev/null +++ b/platformio/project/savedeps.py @@ -0,0 +1,87 @@ +# Copyright (c) 2014-present PlatformIO +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import os + +from platformio.compat import ci_strings_are_equal +from platformio.package.meta import PackageSpec +from platformio.project.config import ProjectConfig +from platformio.project.exception import InvalidProjectConfError + + +def pkg_to_save_spec(pkg, user_spec): + assert isinstance(user_spec, PackageSpec) + if user_spec.external: + return user_spec + return PackageSpec( + owner=pkg.metadata.spec.owner, + name=pkg.metadata.spec.name, + requirements=user_spec.requirements + or ( + ("^%s" % pkg.metadata.version) + if not pkg.metadata.version.build + else pkg.metadata.version + ), + ) + + +def save_project_dependencies( + project_dir, specs, scope, action="add", environments=None +): + config = ProjectConfig.get_instance(os.path.join(project_dir, "platformio.ini")) + config.validate(environments) + for env in config.envs(): + if environments and env not in environments: + continue + config.expand_interpolations = False + candidates = [] + try: + candidates = _ignore_deps_by_specs(config.get("env:" + env, scope), specs) + except InvalidProjectConfError: + pass + if action == "add": + candidates.extend(spec.as_dependency() for spec in specs) + if candidates: + result = [] + for item in candidates: + item = item.strip() + if item and item not in result: + result.append(item) + config.set("env:" + env, scope, result) + elif config.has_option("env:" + env, scope): + config.remove_option("env:" + env, scope) + config.save() + + +def _ignore_deps_by_specs(deps, specs): + result = [] + for dep in deps: + ignore_conditions = [] + depspec = PackageSpec(dep) + if depspec.external: + ignore_conditions.append(depspec in specs) + else: + for spec in specs: + if depspec.owner: + ignore_conditions.append( + ci_strings_are_equal(depspec.owner, spec.owner) + and ci_strings_are_equal(depspec.name, spec.name) + ) + else: + ignore_conditions.append( + ci_strings_are_equal(depspec.name, spec.name) + ) + if not any(ignore_conditions): + result.append(dep) + return result diff --git a/platformio/project/tpls/clion/CMakeListsPrivate.txt.tpl b/platformio/project/tpls/clion/CMakeListsPrivate.txt.tpl index b8695d0e..6297706f 100644 --- a/platformio/project/tpls/clion/CMakeListsPrivate.txt.tpl +++ b/platformio/project/tpls/clion/CMakeListsPrivate.txt.tpl @@ -8,7 +8,7 @@ % import os % import re % -% from platformio.project.helpers import load_project_ide_data +% from platformio.project.helpers import load_build_metadata % % def _normalize_path(path): % if project_dir in path: @@ -97,7 +97,7 @@ endif() % % ide_data = {} % if leftover_envs: -% ide_data = load_project_ide_data(project_dir, leftover_envs) +% ide_data = load_build_metadata(project_dir, leftover_envs) % end % % for env, data in ide_data.items(): diff --git a/platformio/project/tpls/qtcreator/.gitignore.tpl b/platformio/project/tpls/qtcreator/.gitignore.tpl new file mode 100644 index 00000000..0c1fe4af --- /dev/null +++ b/platformio/project/tpls/qtcreator/.gitignore.tpl @@ -0,0 +1,2 @@ +.pio +.qtc_clangd diff --git a/platformio/project/tpls/qtcreator/Makefile.tpl b/platformio/project/tpls/qtcreator/Makefile.tpl new file mode 100644 index 00000000..d6d63445 --- /dev/null +++ b/platformio/project/tpls/qtcreator/Makefile.tpl @@ -0,0 +1,12 @@ +all: + platformio -c qtcreator run + +# regenerate project files to reflect platformio.ini changes +project-update: + @echo "This will overwrite project metadata files. Are you sure? [y/N] " \ + && read ans && [ $${ans:-'N'} = 'y' ] + platformio project init --ide qtcreator + +# forward any other target (clean, build, etc.) to pio run +{{'%'}}: + platformio -c qtcreator run --target $* diff --git a/platformio/project/tpls/qtcreator/platformio.cflags.tpl b/platformio/project/tpls/qtcreator/platformio.cflags.tpl new file mode 100644 index 00000000..f09a94f9 --- /dev/null +++ b/platformio/project/tpls/qtcreator/platformio.cflags.tpl @@ -0,0 +1 @@ +{{cc_flags.replace('-mlongcalls', '-mlong-calls')}} diff --git a/platformio/project/tpls/qtcreator/platformio.config.tpl b/platformio/project/tpls/qtcreator/platformio.config.tpl new file mode 100644 index 00000000..936fdad0 --- /dev/null +++ b/platformio/project/tpls/qtcreator/platformio.config.tpl @@ -0,0 +1,8 @@ +% for define in defines: +% tokens = define.split("=", 1) +% if len(tokens) > 1: +#define {{tokens[0].strip()}} {{!tokens[1].strip()}} +% else: +#define {{define}} +% end +% end diff --git a/platformio/project/tpls/qtcreator/platformio.creator.tpl b/platformio/project/tpls/qtcreator/platformio.creator.tpl new file mode 100644 index 00000000..d0cc464e --- /dev/null +++ b/platformio/project/tpls/qtcreator/platformio.creator.tpl @@ -0,0 +1,2 @@ +[General] + diff --git a/platformio/project/tpls/qtcreator/platformio.cxxflags.tpl b/platformio/project/tpls/qtcreator/platformio.cxxflags.tpl new file mode 100644 index 00000000..c5b30511 --- /dev/null +++ b/platformio/project/tpls/qtcreator/platformio.cxxflags.tpl @@ -0,0 +1 @@ +{{cxx_flags.replace('-mlongcalls', '-mlong-calls')}} diff --git a/platformio/project/tpls/qtcreator/platformio.files.tpl b/platformio/project/tpls/qtcreator/platformio.files.tpl new file mode 100644 index 00000000..b5c02654 --- /dev/null +++ b/platformio/project/tpls/qtcreator/platformio.files.tpl @@ -0,0 +1,6 @@ +Makefile +platformio.ini +.gitignore +% for file in src_files: +{{file}} +% end diff --git a/platformio/project/tpls/qtcreator/platformio.includes.tpl b/platformio/project/tpls/qtcreator/platformio.includes.tpl new file mode 100644 index 00000000..435cab10 --- /dev/null +++ b/platformio/project/tpls/qtcreator/platformio.includes.tpl @@ -0,0 +1,4 @@ +./ +% for include in filter_includes(includes): +{{include}} +% end diff --git a/platformio/project/tpls/qtcreator/platformio.pro.tpl b/platformio/project/tpls/qtcreator/platformio.pro.tpl deleted file mode 100644 index 89774699..00000000 --- a/platformio/project/tpls/qtcreator/platformio.pro.tpl +++ /dev/null @@ -1,45 +0,0 @@ -% import re -% -% cpp_standards_remap = { -% "0x": "11", -% "1y": "14", -% "1z": "17", -% "2a": "20", -% "2b": "23" -% } - -win32 { - HOMEDIR += $$(USERPROFILE) -} -else { - HOMEDIR += $$(HOME) -} - -% for include in filter_includes(includes): -% if include.startswith(user_home_dir): -INCLUDEPATH += "$${HOMEDIR}{{include.replace(user_home_dir, "")}}" -% else: -INCLUDEPATH += "{{include}}" -% end -% end - -% for define in defines: -% tokens = define.split("##", 1) -DEFINES += "{{tokens[0].strip()}}" -% end - -OTHER_FILES += platformio.ini - -% for file in src_files: -% if file.endswith((".h", ".hpp")): -HEADERS += {{file}} -% else: -SOURCES += {{file}} -% end -% end - -% STD_RE = re.compile(r"\-std=[a-z\+]+(\w+)") -% cxx_stds = STD_RE.findall(cxx_flags) -% if cxx_stds: -CONFIG += c++{{ cpp_standards_remap.get(cxx_stds[-1], cxx_stds[-1]) }} -% end diff --git a/platformio/shared.py b/platformio/shared.py new file mode 100644 index 00000000..ead093ce --- /dev/null +++ b/platformio/shared.py @@ -0,0 +1,28 @@ +# Copyright (c) 2014-present PlatformIO +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# pylint: disable=unused-import + +from platformio.device.filters.base import DeviceMonitorFilterBase +from platformio.device.list import list_serial_ports +from platformio.fs import to_unix_path +from platformio.platform.base import PlatformBase +from platformio.project.config import ProjectConfig +from platformio.project.helpers import load_build_metadata +from platformio.test.result import TestCase, TestCaseSource, TestStatus +from platformio.test.runners.base import TestRunnerBase +from platformio.test.runners.doctest import DoctestTestCaseParser +from platformio.test.runners.googletest import GoogletestTestRunner +from platformio.test.runners.unity import UnityTestRunner +from platformio.util import get_systype diff --git a/platformio/telemetry.py b/platformio/telemetry.py index ed2bf9b0..2792d5c6 100644 --- a/platformio/telemetry.py +++ b/platformio/telemetry.py @@ -16,6 +16,7 @@ import atexit import hashlib import json import os +import queue import re import shutil import sys @@ -32,11 +33,6 @@ from platformio.compat import hashlib_encode_data, string_types from platformio.proc import is_ci, is_container from platformio.project.helpers import is_platformio_project -try: - import queue -except ImportError: - import Queue as queue - class TelemetryBase(object): def __init__(self): @@ -68,7 +64,7 @@ class MeasurementProtocol(TelemetryBase): } def __init__(self): - super(MeasurementProtocol, self).__init__() + super().__init__() self["v"] = 1 self["tid"] = self.TID self["cid"] = app.get_cid() @@ -86,12 +82,12 @@ class MeasurementProtocol(TelemetryBase): def __getitem__(self, name): if name in self.PARAMS_MAP: name = self.PARAMS_MAP[name] - return super(MeasurementProtocol, self).__getitem__(name) + return super().__getitem__(name) def __setitem__(self, name, value): if name in self.PARAMS_MAP: name = self.PARAMS_MAP[name] - super(MeasurementProtocol, self).__setitem__(name, value) + super().__setitem__(name, value) def _prefill_appinfo(self): self["av"] = __version__ diff --git a/platformio/test/__init__.py b/platformio/test/__init__.py new file mode 100644 index 00000000..b0514903 --- /dev/null +++ b/platformio/test/__init__.py @@ -0,0 +1,13 @@ +# Copyright (c) 2014-present PlatformIO +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. diff --git a/platformio/test/command.py b/platformio/test/command.py new file mode 100644 index 00000000..cfc2ea3d --- /dev/null +++ b/platformio/test/command.py @@ -0,0 +1,202 @@ +# Copyright (c) 2014-present PlatformIO +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import os +import shutil + +import click + +from platformio import app, exception, fs, util +from platformio.project.config import ProjectConfig +from platformio.test.helpers import list_test_suites +from platformio.test.reports.base import TestReportFactory +from platformio.test.result import TestResult, TestStatus +from platformio.test.runners.base import TestRunnerOptions +from platformio.test.runners.factory import TestRunnerFactory + + +@click.command("test", short_help="Unit Testing") +@click.option("--environment", "-e", multiple=True) +@click.option( + "--filter", + "-f", + multiple=True, + metavar="PATTERN", + help="Filter tests by a pattern", +) +@click.option( + "--ignore", + "-i", + multiple=True, + metavar="PATTERN", + help="Ignore tests by a pattern", +) +@click.option("--upload-port") +@click.option("--test-port") +@click.option( + "-d", + "--project-dir", + default=os.getcwd, + type=click.Path( + exists=True, file_okay=False, dir_okay=True, writable=True, resolve_path=True + ), +) +@click.option( + "-c", + "--project-conf", + type=click.Path( + exists=True, file_okay=True, dir_okay=False, readable=True, resolve_path=True + ), +) +@click.option("--without-building", is_flag=True) +@click.option("--without-uploading", is_flag=True) +@click.option("--without-testing", is_flag=True) +@click.option("--no-reset", is_flag=True) +@click.option( + "--monitor-rts", + default=None, + type=click.IntRange(0, 1), + help="Set initial RTS line state for Serial Monitor", +) +@click.option( + "--monitor-dtr", + default=None, + type=click.IntRange(0, 1), + help="Set initial DTR line state for Serial Monitor", +) +@click.option( + "-a", + "--program-arg", + "program_args", + multiple=True, + help="A program argument (multiple are allowed)", +) +@click.option("--list-tests", is_flag=True) +@click.option("--json-output-path", type=click.Path(resolve_path=True)) +@click.option("--junit-output-path", type=click.Path(resolve_path=True)) +@click.option("--verbose", "-v", is_flag=True) +@click.pass_context +def test_cmd( # pylint: disable=too-many-arguments,too-many-locals,redefined-builtin + ctx, + environment, + ignore, + filter, + upload_port, + test_port, + project_dir, + project_conf, + without_building, + without_uploading, + without_testing, + no_reset, + monitor_rts, + monitor_dtr, + program_args, + list_tests, + json_output_path, + junit_output_path, + verbose, +): + app.set_session_var("custom_project_conf", project_conf) + + with fs.cd(project_dir): + project_config = ProjectConfig.get_instance(project_conf) + project_config.validate(envs=environment) + + test_result = TestResult(project_dir) + test_suites = list_test_suites( + project_config, environments=environment, filters=filter, ignores=ignore + ) + test_names = sorted(set(s.test_name for s in test_suites)) + + if not verbose: + click.echo("Verbose mode can be enabled via `-v, --verbose` option") + click.secho("Collected %d tests" % len(test_names), bold=True, nl=not verbose) + if verbose: + click.echo(" (%s)" % ", ".join(test_names)) + + for test_suite in test_suites: + test_result.add_suite(test_suite) + if list_tests or test_suite.is_finished(): # skipped by user + continue + runner = TestRunnerFactory.new( + test_suite, + project_config, + TestRunnerOptions( + verbose=verbose, + without_building=without_building, + without_uploading=without_uploading, + without_testing=without_testing, + upload_port=upload_port, + test_port=test_port, + no_reset=no_reset, + monitor_rts=monitor_rts, + monitor_dtr=monitor_dtr, + program_args=program_args, + ), + ) + click.echo() + print_suite_header(test_suite) + runner.start(ctx) + print_suite_footer(test_suite) + + # Reset custom project config + app.set_session_var("custom_project_conf", None) + + stdout_report = TestReportFactory.new("stdout", test_result) + stdout_report.generate(verbose=verbose or list_tests) + + for output_format, output_path in [ + ("json", json_output_path), + ("junit", junit_output_path), + ]: + if not output_path: + continue + custom_report = TestReportFactory.new(output_format, test_result) + custom_report.generate(output_path=output_path, verbose=True) + + if test_result.is_errored or test_result.get_status_nums(TestStatus.FAILED): + raise exception.ReturnErrorCode(1) + + +def print_suite_header(test_suite): + click.echo( + "Processing %s in %s environment" + % ( + click.style(test_suite.test_name, fg="yellow", bold=True), + click.style(test_suite.env_name, fg="cyan", bold=True), + ) + ) + terminal_width, _ = shutil.get_terminal_size() + click.secho("-" * terminal_width, bold=True) + + +def print_suite_footer(test_suite): + is_error = test_suite.status in (TestStatus.FAILED, TestStatus.ERRORED) + util.print_labeled_bar( + "%s [%s] Took %.2f seconds" + % ( + click.style( + "%s:%s" % (test_suite.env_name, test_suite.test_name), bold=True + ), + ( + click.style(test_suite.status.name, fg="red", bold=True) + if is_error + else click.style("PASSED", fg="green", bold=True) + ), + test_suite.duration, + ), + is_error=is_error, + sep="-", + ) diff --git a/platformio/test/exception.py b/platformio/test/exception.py new file mode 100644 index 00000000..2d8c790c --- /dev/null +++ b/platformio/test/exception.py @@ -0,0 +1,34 @@ +# Copyright (c) 2014-present PlatformIO +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from platformio.exception import PlatformioException, UserSideException + + +class UnitTestError(PlatformioException): + pass + + +class TestDirNotExistsError(UnitTestError, UserSideException): + + MESSAGE = ( + "A test folder '{0}' does not exist.\nPlease create 'test' " + "directory in the project root and put a test set.\n" + "More details about Unit " + "Testing: https://docs.platformio.org/page/plus/" + "unit-testing.html" + ) + + +class UnitTestSuiteError(UnitTestError): + pass diff --git a/platformio/test/helpers.py b/platformio/test/helpers.py new file mode 100644 index 00000000..a8e2f818 --- /dev/null +++ b/platformio/test/helpers.py @@ -0,0 +1,62 @@ +# Copyright (c) 2014-present PlatformIO +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import os +from fnmatch import fnmatch + +from platformio.test.exception import TestDirNotExistsError +from platformio.test.result import TestSuite + + +def list_test_names(project_config): + test_dir = project_config.get("platformio", "test_dir") + if not os.path.isdir(test_dir): + raise TestDirNotExistsError(test_dir) + names = [] + for root, _, __ in os.walk(test_dir): + if not os.path.basename(root).startswith("test_"): + continue + names.append(os.path.relpath(root, test_dir).replace("\\", "/")) + if not names: + names = ["*"] + return names + + +def list_test_suites(project_config, environments, filters, ignores): + result = [] + default_envs = project_config.default_envs() + test_names = list_test_names(project_config) + for env_name in project_config.envs(): + for test_name in test_names: + + # filter and ignore patterns + patterns = dict(filter=list(filters), ignore=list(ignores)) + for key in patterns: + if patterns[key]: # overriden from CLI + continue + patterns[key].extend( + project_config.get(f"env:{env_name}", f"test_{key}", []) + ) + + skip_conditions = [ + environments and env_name not in environments, + not environments and default_envs and env_name not in default_envs, + test_name != "*" + and patterns["filter"] + and not any(fnmatch(test_name, p) for p in patterns["filter"]), + test_name != "*" + and any(fnmatch(test_name, p) for p in patterns["ignore"]), + ] + result.append(TestSuite(env_name, test_name, finished=any(skip_conditions))) + return result diff --git a/platformio/test/reports/__init__.py b/platformio/test/reports/__init__.py new file mode 100644 index 00000000..b0514903 --- /dev/null +++ b/platformio/test/reports/__init__.py @@ -0,0 +1,13 @@ +# Copyright (c) 2014-present PlatformIO +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. diff --git a/platformio/test/reports/base.py b/platformio/test/reports/base.py new file mode 100644 index 00000000..78244aef --- /dev/null +++ b/platformio/test/reports/base.py @@ -0,0 +1,34 @@ +# Copyright (c) 2014-present PlatformIO +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import importlib + +from platformio.test.result import TestResult + + +class TestReportBase: + def __init__(self, test_result): + self.test_result = test_result + + def generate(self, output_path, verbose): + raise NotImplementedError() + + +class TestReportFactory: + @staticmethod + def new(format, test_result) -> TestReportBase: # pylint: disable=redefined-builtin + assert isinstance(test_result, TestResult) + mod = importlib.import_module(f"platformio.test.reports.{format}") + report_cls = getattr(mod, "%sTestReport" % format.lower().capitalize()) + return report_cls(test_result) diff --git a/platformio/test/reports/json.py b/platformio/test/reports/json.py new file mode 100644 index 00000000..c22fb587 --- /dev/null +++ b/platformio/test/reports/json.py @@ -0,0 +1,99 @@ +# Copyright (c) 2014-present PlatformIO +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import datetime +import json +import os + +import click + +from platformio.test.reports.base import TestReportBase +from platformio.test.result import TestStatus + + +class JsonTestReport(TestReportBase): + def generate(self, output_path, verbose=False): + if os.path.isdir(output_path): + output_path = os.path.join( + output_path, + "pio-test-report-%s-%s.json" + % ( + os.path.basename(self.test_result.project_dir), + datetime.datetime.now().strftime("%Y%m%d%H%M%S"), + ), + ) + + with open(output_path, mode="w", encoding="utf8") as fp: + json.dump(self.to_json(), fp) + + if verbose: + click.secho(f"Saved JSON report to the {output_path}", fg="green") + + def to_json(self): + result = dict( + version="1.0", + project_dir=self.test_result.project_dir, + duration=self.test_result.duration, + testcase_nums=self.test_result.case_nums, + error_nums=self.test_result.get_status_nums(TestStatus.ERRORED), + failure_nums=self.test_result.get_status_nums(TestStatus.FAILED), + skipped_nums=self.test_result.get_status_nums(TestStatus.SKIPPED), + test_suites=[], + ) + for test_suite in self.test_result.suites: + result["test_suites"].append(self.test_suite_to_json(test_suite)) + return result + + def test_suite_to_json(self, test_suite): + result = dict( + env_name=test_suite.env_name, + test_name=test_suite.test_name, + status=test_suite.status.name, + duration=test_suite.duration, + timestamp=datetime.datetime.fromtimestamp(test_suite.timestamp).strftime( + "%Y-%m-%dT%H:%M:%S" + ) + if test_suite.timestamp + else None, + testcase_nums=len(test_suite.cases), + error_nums=test_suite.get_status_nums(TestStatus.ERRORED), + failure_nums=test_suite.get_status_nums(TestStatus.FAILED), + skipped_nums=test_suite.get_status_nums(TestStatus.SKIPPED), + test_cases=[], + ) + for test_case in test_suite.cases: + result["test_cases"].append(self.test_case_to_json(test_case)) + return result + + @staticmethod + def test_case_to_json(test_case): + result = dict( + name=test_case.name, + status=test_case.status.name, + message=test_case.message, + stdout=test_case.stdout, + duration=test_case.duration, + exception=None, + source=None, + ) + if test_case.exception: + result["exception"] = "%s: %s" % ( + test_case.exception.__class__.__name__, + test_case.exception, + ) + if test_case.source: + result["source"] = dict( + file=test_case.source.filename, line=test_case.source.line + ) + return result diff --git a/platformio/test/reports/junit.py b/platformio/test/reports/junit.py new file mode 100644 index 00000000..34e177f6 --- /dev/null +++ b/platformio/test/reports/junit.py @@ -0,0 +1,107 @@ +# Copyright (c) 2014-present PlatformIO +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import datetime +import os +import xml.etree.ElementTree as ET + +import click + +from platformio import __version__ +from platformio.test.reports.base import TestReportBase +from platformio.test.result import TestStatus + + +class JunitTestReport(TestReportBase): + def generate(self, output_path, verbose=False): + if os.path.isdir(output_path): + output_path = os.path.join( + output_path, + "pio-test-report-%s-%s-junit.xml" + % ( + os.path.basename(self.test_result.project_dir), + datetime.datetime.now().strftime("%Y%m%d%H%M%S"), + ), + ) + + with open(output_path, mode="wb") as fp: + self.build_xml_tree().write(fp, encoding="utf8") + + if verbose: + click.secho(f"Saved JUnit report to the {output_path}", fg="green") + + def build_xml_tree(self): + root = ET.Element("testsuites") + root.set("name", self.test_result.project_dir) + root.set("platformio_version", __version__) + root.set("tests", str(self.test_result.case_nums)) + root.set("errors", str(self.test_result.get_status_nums(TestStatus.ERRORED))) + root.set("failures", str(self.test_result.get_status_nums(TestStatus.FAILED))) + root.set("time", str(self.test_result.duration)) + for suite in self.test_result.suites: + root.append(self.build_testsuite_node(suite)) + return ET.ElementTree(root) + + def build_testsuite_node(self, test_suite): + element = ET.Element("testsuite") + element.set("name", f"{test_suite.env_name}:{test_suite.test_name}") + element.set("tests", str(len(test_suite.cases))) + element.set("errors", str(test_suite.get_status_nums(TestStatus.ERRORED))) + element.set("failures", str(test_suite.get_status_nums(TestStatus.FAILED))) + element.set("skipped", str(test_suite.get_status_nums(TestStatus.SKIPPED))) + element.set("time", str(test_suite.duration)) + if test_suite.timestamp: + element.set( + "timestamp", + datetime.datetime.fromtimestamp(test_suite.timestamp).strftime( + "%Y-%m-%dT%H:%M:%S" + ), + ) + for test_case in test_suite.cases: + element.append(self.build_testcase_node(test_case)) + return element + + def build_testcase_node(self, test_case): + element = ET.Element("testcase") + element.set("name", str(test_case.name)) + element.set("time", str(test_case.duration)) + element.set("status", str(test_case.status.name)) + if test_case.source: + element.set("file", test_case.source.filename) + element.set("line", str(test_case.source.line)) + if test_case.status == TestStatus.SKIPPED: + element.append(ET.Element("skipped")) + elif test_case.status == TestStatus.ERRORED: + element.append(self.build_testcase_error_node(test_case)) + elif test_case.status == TestStatus.FAILED: + element.append(self.build_testcase_failure_node(test_case)) + return element + + @staticmethod + def build_testcase_error_node(test_case): + element = ET.Element("error") + element.set("type", test_case.exception.__class__.__name__) + element.set("message", str(test_case.exception)) + if test_case.stdout: + element.text = test_case.stdout + return element + + @staticmethod + def build_testcase_failure_node(test_case): + element = ET.Element("failure") + if test_case.message: + element.set("message", test_case.message) + if test_case.stdout: + element.text = test_case.stdout + return element diff --git a/platformio/test/reports/stdout.py b/platformio/test/reports/stdout.py new file mode 100644 index 00000000..2f37bf4a --- /dev/null +++ b/platformio/test/reports/stdout.py @@ -0,0 +1,99 @@ +# Copyright (c) 2014-present PlatformIO +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import click +from tabulate import tabulate + +from platformio import util +from platformio.test.reports.base import TestReportBase +from platformio.test.result import TestStatus + + +class StdoutTestReport(TestReportBase): + def generate(self, verbose=False): # pylint: disable=arguments-differ + click.echo() + + tabular_data = [] + failed_nums = self.test_result.get_status_nums(TestStatus.FAILED) + skipped_nums = self.test_result.get_status_nums(TestStatus.SKIPPED) + is_error = failed_nums > 0 or self.test_result.is_errored + + for test_suite in self.test_result.suites: + if not verbose and test_suite.status == TestStatus.SKIPPED: + continue + status_str = test_suite.status.name + if test_suite.status in (TestStatus.FAILED, TestStatus.ERRORED): + status_str = click.style(status_str, fg="red") + elif test_suite.status == TestStatus.PASSED: + status_str = click.style(status_str, fg="green") + + tabular_data.append( + ( + click.style(test_suite.env_name, fg="cyan"), + test_suite.test_name, + status_str, + util.humanize_duration_time(test_suite.duration or None), + ) + ) + + if tabular_data: + util.print_labeled_bar( + "SUMMARY", + is_error=is_error, + fg="red" if is_error else "green", + ) + click.echo( + tabulate( + tabular_data, + headers=[ + click.style(s, bold=True) + for s in ("Environment", "Test", "Status", "Duration") + ], + ), + err=is_error, + ) + + if failed_nums: + self.print_failed_test_cases() + + util.print_labeled_bar( + "%d test cases: %s%s%d succeeded in %s" + % ( + self.test_result.case_nums, + ("%d failed, " % failed_nums) if failed_nums else "", + ("%d skipped, " % skipped_nums) if skipped_nums else "", + self.test_result.get_status_nums(TestStatus.PASSED), + util.humanize_duration_time(self.test_result.duration), + ), + is_error=is_error, + fg="red" if is_error else "green", + ) + + def print_failed_test_cases(self): + click.echo() + for test_suite in self.test_result.suites: + if test_suite.status != TestStatus.FAILED: + continue + util.print_labeled_bar( + click.style( + f"{test_suite.env_name}:{test_suite.test_name}", bold=True, fg="red" + ), + is_error=True, + sep="_", + ) + for test_case in test_suite.cases: + if test_case.status != TestStatus.FAILED: + continue + click.echo((test_case.stdout or "").strip()) + click.echo() diff --git a/platformio/test/result.py b/platformio/test/result.py new file mode 100644 index 00000000..b2000b2e --- /dev/null +++ b/platformio/test/result.py @@ -0,0 +1,165 @@ +# Copyright (c) 2014-present PlatformIO +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import enum +import functools +import operator +import time + +import click + + +class TestStatus(enum.Enum): + PASSED = enum.auto() + FAILED = enum.auto() + SKIPPED = enum.auto() + WARNED = enum.auto() + ERRORED = enum.auto() + + @classmethod + def from_string(cls, value: str): + value = value.lower() + if value.startswith(("failed", "fail")): + return cls.FAILED + if value.startswith(("passed", "pass", "success", "ok")): + return cls.PASSED + if value.startswith(("skipped", "skip", "ignore", "ignored")): + return cls.SKIPPED + if value.startswith("WARNING"): + return cls.WARNED + raise ValueError(f"Unknown test status `{value}`") + + def to_ansi_color(self): + if self == TestStatus.FAILED: + return "red" + if self == TestStatus.PASSED: + return "green" + return "yellow" + + +class TestCaseSource: + def __init__(self, filename, line=None): + self.filename = filename + self.line = line + + +class TestCase: + def __init__( # pylint: disable=too-many-arguments + self, + name, + status, + message=None, + stdout=None, + source=None, + duration=0, + exception=None, + ): + assert isinstance(status, TestStatus) + if status == TestStatus.ERRORED: + assert isinstance(exception, Exception) + self.name = name.strip() + self.status = status + self.message = message + self.stdout = stdout + self.source = source + self.duration = duration + self.exception = exception + + def humanize(self): + parts = [] + if self.source: + parts.append("%s:%d: " % (self.source.filename, self.source.line)) + parts.append(self.name) + if self.message: + parts.append(": " + self.message) + parts.extend( + [ + "\t", + "[%s]" % click.style(self.status.name, fg=self.status.to_ansi_color()), + ] + ) + return "".join(parts) + + +class TestSuite: + def __init__(self, env_name, test_name, finished=False): + self.env_name = env_name + self.test_name = test_name + self.timestamp = 0 + self.duration = 0 + self._cases = [] + self._finished = finished + + @property + def cases(self): + return self._cases + + @property + def status(self): + for s in (TestStatus.ERRORED, TestStatus.FAILED): + if self.get_status_nums(s): + return s + if self._cases and any(c.status == TestStatus.PASSED for c in self._cases): + return TestStatus.PASSED + return TestStatus.SKIPPED + + def get_status_nums(self, status): + return len([True for c in self._cases if c.status == status]) + + def add_case(self, case: TestCase): + assert isinstance(case, TestCase) + self._cases.append(case) + + def is_finished(self): + return self._finished + + def on_start(self): + self.timestamp = time.time() + + def on_finish(self): + if self.is_finished(): + return + self._finished = True + self.duration = time.time() - self.timestamp + + +class TestResult: + def __init__(self, project_dir): + self.project_dir = project_dir + self._suites = [] + + @property + def suites(self): + return self._suites + + def add_suite(self, suite): + assert isinstance(suite, TestSuite) + self._suites.append(suite) + + @property + def duration(self): + return functools.reduce(operator.add, [s.duration for s in self._suites]) + + @property + def case_nums(self): + return functools.reduce(operator.add, [len(s.cases) for s in self._suites]) + + @property + def is_errored(self): + return any(s.status == TestStatus.ERRORED for s in self._suites) + + def get_status_nums(self, status): + return functools.reduce( + operator.add, [s.get_status_nums(status) for s in self._suites] + ) diff --git a/platformio/test/runners/__init__.py b/platformio/test/runners/__init__.py new file mode 100644 index 00000000..b0514903 --- /dev/null +++ b/platformio/test/runners/__init__.py @@ -0,0 +1,13 @@ +# Copyright (c) 2014-present PlatformIO +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. diff --git a/platformio/test/runners/base.py b/platformio/test/runners/base.py new file mode 100644 index 00000000..ead621e5 --- /dev/null +++ b/platformio/test/runners/base.py @@ -0,0 +1,208 @@ +# Copyright (c) 2014-present PlatformIO +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import click + +from platformio.exception import ReturnErrorCode +from platformio.platform.factory import PlatformFactory +from platformio.test.exception import UnitTestSuiteError +from platformio.test.result import TestCase, TestStatus +from platformio.test.runners.readers.program import ProgramTestOutputReader +from platformio.test.runners.readers.serial import SerialTestOutputReader + +CTX_META_TEST_IS_RUNNING = __name__ + ".test_running" +CTX_META_TEST_RUNNING_NAME = __name__ + ".test_running_name" + + +class TestRunnerOptions: # pylint: disable=too-many-instance-attributes + def __init__( # pylint: disable=too-many-arguments + self, + verbose=False, + without_building=False, + without_uploading=False, + without_testing=False, + without_debugging=True, + upload_port=None, + test_port=None, + no_reset=False, + monitor_rts=None, + monitor_dtr=None, + program_args=None, + ): + self.verbose = verbose + self.without_building = without_building + self.without_uploading = without_uploading + self.without_testing = without_testing + self.without_debugging = without_debugging + self.upload_port = upload_port + self.test_port = test_port + self.no_reset = no_reset + self.monitor_rts = monitor_rts + self.monitor_dtr = monitor_dtr + self.program_args = program_args + + +class TestRunnerBase: + + NAME = None + EXTRA_LIB_DEPS = None + TESTCASE_PARSE_RE = None + + def __init__(self, test_suite, project_config, options=None): + self.test_suite = test_suite + self.options = options + self.project_config = project_config + self.platform = PlatformFactory.new( + self.project_config.get(f"env:{self.test_suite.env_name}", "platform"), + autoinstall=True, + ) + self.cmd_ctx = None + self._testing_output_buffer = "" + + @property + def name(self): + return self.__class__.__name__.replace("TestRunner", "").lower() + + def get_test_speed(self): + return int( + self.project_config.get(f"env:{self.test_suite.env_name}", "test_speed") + ) + + def get_test_port(self): + return self.options.test_port or self.project_config.get( + f"env:{self.test_suite.env_name}", "test_port" + ) + + def start(self, cmd_ctx): + # setup command context + self.cmd_ctx = cmd_ctx + self.cmd_ctx.meta[CTX_META_TEST_IS_RUNNING] = True + if self.test_suite.test_name != "*": + self.cmd_ctx.meta[CTX_META_TEST_RUNNING_NAME] = self.test_suite.test_name + + self.test_suite.on_start() + try: + self.setup() + for stage in ("building", "uploading", "testing"): + getattr(self, f"stage_{stage}")() + except Exception as exc: # pylint: disable=broad-except + click.secho(str(exc), fg="red", err=True) + self.test_suite.add_case( + TestCase( + name=f"{self.test_suite.env_name}:{self.test_suite.test_name}", + status=TestStatus.ERRORED, + exception=exc, + ) + ) + finally: + self.test_suite.on_finish() + self.teardown() + + def setup(self): + pass + + def stage_building(self): + if self.options.without_building: + return None + click.secho("Building...", bold=True) + targets = ["__test"] + if not self.options.without_debugging: + targets.append("__debug") + if self.platform.is_embedded(): + targets.append("checkprogsize") + try: + return self.run_project_targets(targets) + except ReturnErrorCode: + raise UnitTestSuiteError( + "Building stage has failed, see errors above. " + "Use `pio test --verbose` option to enable verbose output." + ) + + def stage_uploading(self): + if self.options.without_uploading or not self.platform.is_embedded(): + return None + click.secho("Uploading...", bold=True) + targets = ["upload"] + if self.options.without_building: + targets.append("nobuild") + else: + targets.append("__test") + if not self.options.without_debugging: + targets.append("__debug") + try: + return self.run_project_targets(targets) + except ReturnErrorCode: + raise UnitTestSuiteError( + "Uploading stage has failed, see errors above. " + "Use `pio test --verbose` option to enable verbose output." + ) + + def stage_testing(self): + if self.options.without_testing: + return None + click.secho("Testing...", bold=True) + test_port = self.get_test_port() + program_conds = [ + not self.platform.is_embedded() + and (not test_port or "://" not in test_port), + self.project_config.get( + f"env:{self.test_suite.env_name}", "test_testing_command" + ), + ] + reader = ( + ProgramTestOutputReader(self) + if any(program_conds) + else SerialTestOutputReader(self) + ) + return reader.begin() + + def teardown(self): + pass + + def run_project_targets(self, targets): + # pylint: disable=import-outside-toplevel + from platformio.commands.run.command import cli as run_cmd + + assert self.cmd_ctx + return self.cmd_ctx.invoke( + run_cmd, + project_conf=self.project_config.path, + upload_port=self.options.upload_port, + verbose=self.options.verbose, + silent=not self.options.verbose, + environment=[self.test_suite.env_name], + disable_auto_clean="nobuild" in targets, + target=targets, + ) + + def configure_build_env(self, env): # pylint: disable=no-self-use + """ + Configure SCons build environment + Called in "builder/tools/piotest" tool + """ + return env + + def on_testing_data_output(self, data): + if isinstance(data, bytes): + data = data.decode("utf8", "ignore") + self._testing_output_buffer += data + self._testing_output_buffer = self._testing_output_buffer.replace("\r", "") + while "\n" in self._testing_output_buffer: + nl_pos = self._testing_output_buffer.index("\n") + line = self._testing_output_buffer[: nl_pos + 1] + self._testing_output_buffer = self._testing_output_buffer[nl_pos + 1 :] + self.on_testing_line_output(line) + + def on_testing_line_output(self, line): # pylint: disable=no-self-use + click.echo(line, nl=False) diff --git a/platformio/test/runners/doctest.py b/platformio/test/runners/doctest.py new file mode 100644 index 00000000..15f6b311 --- /dev/null +++ b/platformio/test/runners/doctest.py @@ -0,0 +1,127 @@ +# Copyright (c) 2014-present PlatformIO +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import click + +from platformio.test.result import TestCase, TestCaseSource, TestStatus +from platformio.test.runners.base import TestRunnerBase + + +class DoctestTestCaseParser: + def __init__(self): + self._tmp_tc = None + self._name_tokens = [] + + def parse(self, line): + if self.is_divider(line): + return self._on_divider() + if not self._tmp_tc or line.strip().startswith("[doctest]"): + return None + + self._tmp_tc.stdout += line + line = line.strip() + + # source + if not self._tmp_tc.source and line: + self._tmp_tc.source = self.parse_source(line) + return None + + # name + if not self._tmp_tc.name: + if line: + self._name_tokens.append(line) + return None + self._tmp_tc.name = self.parse_name(self._name_tokens) + return None + + if self._tmp_tc.status != TestStatus.FAILED: + self._parse_assert(line) + + return None + + @staticmethod + def is_divider(line): + line = line.strip() + return line.startswith("===") and line.endswith("===") + + def _on_divider(self): + test_case = None + if self._tmp_tc: + test_case = TestCase( + name=self._tmp_tc.name.strip(), + status=self._tmp_tc.status, + message=(self._tmp_tc.message or "").strip() or None, + source=self._tmp_tc.source, + stdout=self._tmp_tc.stdout.strip(), + ) + + self._tmp_tc = TestCase("", TestStatus.PASSED, stdout="") + self._name_tokens = [] + return test_case + + @staticmethod + def parse_source(line): + if not line.endswith(":"): + return None + filename, line = line[:-1].rsplit(":", 1) + return TestCaseSource(filename, int(line)) + + @staticmethod + def parse_name(tokens): + cleaned_tokens = [] + for token in tokens: + if token.startswith("TEST ") and ":" in token: + token = token[token.index(":") + 1 :] + cleaned_tokens.append(token.strip()) + return "/".join(cleaned_tokens) + + def _parse_assert(self, line): + status_tokens = [ + (TestStatus.FAILED, "ERROR"), + (TestStatus.FAILED, "FATAL ERROR"), + (TestStatus.WARNED, "WARNING"), + ] + for status, token in status_tokens: + index = line.find(": %s:" % token) + if index == -1: + continue + self._tmp_tc.status = status + self._tmp_tc.message = line[index + len(token) + 3 :].strip() or None + + +class DoctestTestRunner(TestRunnerBase): + + EXTRA_LIB_DEPS = ["doctest/doctest@^2.4.8"] + + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + self._tc_parser = DoctestTestCaseParser() + + def configure_build_env(self, env): + env.Append(CPPDEFINES=["DOCTEST_CONFIG_COLORS_NONE"]) + if self.platform.is_embedded(): + return + env.Append(CXXFLAGS=["-std=c++11"]) + + def on_testing_line_output(self, line): + if self.options.verbose: + click.echo(line, nl=False) + + test_case = self._tc_parser.parse(line) + if test_case: + click.echo(test_case.humanize()) + self.test_suite.add_case(test_case) + + if "[doctest] Status:" in line: + self.test_suite.on_finish() diff --git a/platformio/test/runners/factory.py b/platformio/test/runners/factory.py new file mode 100644 index 00000000..6c428316 --- /dev/null +++ b/platformio/test/runners/factory.py @@ -0,0 +1,67 @@ +# Copyright (c) 2014-present PlatformIO +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import importlib +import os +import re + +from platformio.compat import load_python_module +from platformio.exception import UserSideException +from platformio.project.config import ProjectConfig +from platformio.test.result import TestSuite +from platformio.test.runners.base import TestRunnerBase, TestRunnerOptions + + +class TestRunnerFactory(object): + @staticmethod + def get_clsname(name): + name = re.sub(r"[^\da-z\_\-]+", "", name, flags=re.I) + return "%sTestRunner" % name.lower().capitalize() + + @classmethod + def new(cls, test_suite, project_config, options=None) -> TestRunnerBase: + assert isinstance(test_suite, TestSuite) + assert isinstance(project_config, ProjectConfig) + if options: + assert isinstance(options, TestRunnerOptions) + test_framework = project_config.get( + f"env:{test_suite.env_name}", "test_framework" + ) + module_name = f"platformio.test.runners.{test_framework}" + runner_cls = None + if test_framework == "custom": + test_dir = project_config.get("platformio", "test_dir") + custom_runner_path = os.path.join(test_dir, "test_custom_runner.py") + test_name = test_suite.test_name if test_suite.test_name != "*" else None + while test_name: + if os.path.isfile( + os.path.join(test_dir, test_name, "test_custom_runner.py") + ): + custom_runner_path = os.path.join( + test_dir, test_name, "test_custom_runner.py" + ) + break + test_name = os.path.dirname(test_name) # parent dir + + try: + mod = load_python_module(module_name, custom_runner_path) + except (FileNotFoundError, ImportError): + raise UserSideException( + "Could not find custom test runner " + f"by this path -> {custom_runner_path}" + ) + else: + mod = importlib.import_module(module_name) + runner_cls = getattr(mod, cls.get_clsname(test_framework)) + return runner_cls(test_suite, project_config, options) diff --git a/platformio/test/runners/googletest.py b/platformio/test/runners/googletest.py new file mode 100644 index 00000000..b3687bad --- /dev/null +++ b/platformio/test/runners/googletest.py @@ -0,0 +1,118 @@ +# Copyright (c) 2014-present PlatformIO +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import os +import re + +import click + +from platformio.compat import IS_WINDOWS +from platformio.test.result import TestCase, TestCaseSource, TestStatus +from platformio.test.runners.base import TestRunnerBase + + +class DoctestTestCaseParser: + + # Examples: + # [ RUN ] FooTest.Bar + # ... + # [ FAILED ] FooTest.Bar (0 ms) + STATUS__NAME_RE = r"^\[\s+(?P[A-Z]+)\s+\]\s+(?P[^\(\s]+)" + + # Examples: + # [ RUN ] FooTest.Bar + # test/test_gtest/test_main.cpp:26: Failure + # Y:\core\examples\unit-testing\googletest\test\test_gtest\test_main.cpp:26: Failure + SOURCE_MESSAGE_RE = r"^(?P.+):(?P\d+):(?P.*)$" + + def __init__(self): + self._tmp_tc = None + + def parse(self, line): + if self._tmp_tc: + self._tmp_tc.stdout += line + return self._parse_test_case(line) + + def _parse_test_case(self, line): + status, name = self._parse_status_and_name(line) + if status == "RUN": + self._tmp_tc = TestCase(name, TestStatus.PASSED, stdout=line) + return None + if not status or not self._tmp_tc: + return None + source, message = self._parse_source_and_message(self._tmp_tc.stdout) + test_case = TestCase( + name=self._tmp_tc.name, + status=TestStatus.from_string(status), + message=message, + source=source, + stdout=self._tmp_tc.stdout.strip(), + ) + self._tmp_tc = None + return test_case + + def _parse_status_and_name(self, line): + result = (None, None) + line = line.strip() + if not line.startswith("["): + return result + match = re.search(self.STATUS__NAME_RE, line) + if not match: + return result + return match.group("status"), match.group("name") + + def _parse_source_and_message(self, stdout): + for line in stdout.split("\n"): + line = line.strip() + if not line: + continue + match = re.search(self.SOURCE_MESSAGE_RE, line) + if not match: + continue + return ( + TestCaseSource( + match.group("source_file"), int(match.group("source_line")) + ), + (match.group("message") or "").strip() or None, + ) + return (None, None) + + +class GoogletestTestRunner(TestRunnerBase): + + EXTRA_LIB_DEPS = ["google/googletest@^1.11.0"] + + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + self._tc_parser = DoctestTestCaseParser() + os.environ["GTEST_COLOR"] = "no" # disable ANSI symbols + + def configure_build_env(self, env): + if self.platform.is_embedded(): + return + env.Append(CXXFLAGS=["-std=c++11"]) + if not IS_WINDOWS: + env.Append(CCFLAGS=["-pthread"], LINKFLAGS=["-pthread"]) + + def on_testing_line_output(self, line): + if self.options.verbose: + click.echo(line, nl=False) + + test_case = self._tc_parser.parse(line) + if test_case: + click.echo(test_case.humanize()) + self.test_suite.add_case(test_case) + + if "Global test environment tear-down" in line: + self.test_suite.on_finish() diff --git a/platformio/test/runners/readers/__init__.py b/platformio/test/runners/readers/__init__.py new file mode 100644 index 00000000..b0514903 --- /dev/null +++ b/platformio/test/runners/readers/__init__.py @@ -0,0 +1,13 @@ +# Copyright (c) 2014-present PlatformIO +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. diff --git a/platformio/test/runners/readers/program.py b/platformio/test/runners/readers/program.py new file mode 100644 index 00000000..b0b33130 --- /dev/null +++ b/platformio/test/runners/readers/program.py @@ -0,0 +1,119 @@ +# Copyright (c) 2014-present PlatformIO +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import asyncio +import os +import signal +import subprocess +import time + +from platformio.compat import IS_WINDOWS, get_filesystem_encoding, get_locale_encoding +from platformio.test.exception import UnitTestError + + +class ProgramProcessProtocol(asyncio.SubprocessProtocol): + def __init__(self, test_runner, exit_future): + self.test_runner = test_runner + self.exit_future = exit_future + + def pipe_data_received(self, _, data): + try: + data = data.decode(get_locale_encoding() or get_filesystem_encoding()) + except UnicodeDecodeError: + data = data.decode("latin-1") + self.test_runner.on_testing_data_output(data) + if self.test_runner.test_suite.is_finished(): + self._stop_testing() + + def process_exited(self): + self._stop_testing() + + def _stop_testing(self): + if not self.exit_future.done(): + self.exit_future.set_result(True) + + +class ProgramTestOutputReader: + + KILLING_TIMEOUT = 5 # seconds + + def __init__(self, test_runner): + self.test_runner = test_runner + self.aio_loop = ( + asyncio.ProactorEventLoop() if IS_WINDOWS else asyncio.new_event_loop() + ) + asyncio.set_event_loop(self.aio_loop) + + def get_testing_command(self): + custom_testing_command = self.test_runner.project_config.get( + f"env:{self.test_runner.test_suite.env_name}", "test_testing_command" + ) + if custom_testing_command: + return custom_testing_command + build_dir = self.test_runner.project_config.get("platformio", "build_dir") + cmd = [ + os.path.join( + build_dir, + self.test_runner.test_suite.env_name, + "program.exe" if IS_WINDOWS else "program", + ) + ] + if self.test_runner.options.program_args: + cmd.extend(self.test_runner.options.program_args) + return cmd + + async def gather_results(self): + exit_future = asyncio.Future(loop=self.aio_loop) + transport, _ = await self.aio_loop.subprocess_exec( + lambda: ProgramProcessProtocol(self.test_runner, exit_future), + *self.get_testing_command(), + stdin=None, + stdout=subprocess.PIPE, + stderr=subprocess.PIPE, + ) + await exit_future + last_return_code = transport.get_returncode() + transport.close() + + # wait until subprocess will be killed + start = time.time() + while ( + start > (time.time() - self.KILLING_TIMEOUT) + and transport.get_returncode() is None + ): + await asyncio.sleep(0.5) + + if last_return_code: + self.raise_for_status(last_return_code) + + @staticmethod + def raise_for_status(return_code): + try: + sig = signal.Signals(abs(return_code)) + try: + signal_description = signal.strsignal(sig) + except AttributeError: + signal_description = "" + raise UnitTestError( + f"Program received signal {sig.name} ({signal_description})" + ) + except ValueError: + raise UnitTestError("Program errored with %d code" % return_code) + + def begin(self): + try: + self.aio_loop.run_until_complete(self.gather_results()) + finally: + self.aio_loop.run_until_complete(self.aio_loop.shutdown_asyncgens()) + self.aio_loop.close() diff --git a/platformio/test/runners/readers/serial.py b/platformio/test/runners/readers/serial.py new file mode 100644 index 00000000..195298a0 --- /dev/null +++ b/platformio/test/runners/readers/serial.py @@ -0,0 +1,89 @@ +# Copyright (c) 2014-present PlatformIO +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from time import sleep + +import click +import serial + +from platformio.device.finder import find_serial_port +from platformio.exception import UserSideException + + +class SerialTestOutputReader: + + SERIAL_TIMEOUT = 600 + + def __init__(self, test_runner): + self.test_runner = test_runner + + def begin(self): + click.echo( + "If you don't see any output for the first 10 secs, " + "please reset board (press reset button)" + ) + click.echo() + + try: + ser = serial.serial_for_url( + self.resolve_test_port(), + do_not_open=True, + baudrate=self.test_runner.get_test_speed(), + timeout=self.SERIAL_TIMEOUT, + ) + ser.rts = self.test_runner.options.monitor_rts + ser.dtr = self.test_runner.options.monitor_dtr + ser.open() + except serial.SerialException as e: + click.secho(str(e), fg="red", err=True) + return None + + if not self.test_runner.options.no_reset: + ser.flushInput() + ser.setDTR(False) + ser.setRTS(False) + sleep(0.1) + ser.setDTR(True) + ser.setRTS(True) + sleep(0.1) + + while not self.test_runner.test_suite.is_finished(): + self.test_runner.on_testing_data_output(ser.read(ser.in_waiting or 1)) + ser.close() + + def resolve_test_port(self): + project_options = self.test_runner.project_config.items( + env=self.test_runner.test_suite.env_name, as_dict=True + ) + scan_options = dict( + initial_port=self.test_runner.get_test_port(), + board_config=self.test_runner.platform.board_config( + project_options["board"] + ), + upload_protocol=project_options.get("upload_port"), + ensure_ready=True, + ) + + elapsed = 0 + while elapsed < 5: + port = find_serial_port(**scan_options) + if port: + return port + sleep(0.25) + elapsed += 0.25 + + raise UserSideException( + "Please specify `test_port` for environment or use " + "global `--test-port` option." + ) diff --git a/platformio/test/runners/unity.py b/platformio/test/runners/unity.py new file mode 100644 index 00000000..6e0eae5a --- /dev/null +++ b/platformio/test/runners/unity.py @@ -0,0 +1,296 @@ +# Copyright (c) 2014-present PlatformIO +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import os +import re +import string +from pathlib import Path + +import click + +from platformio.test.exception import UnitTestSuiteError +from platformio.test.result import TestCase, TestCaseSource, TestStatus +from platformio.test.runners.base import TestRunnerBase +from platformio.util import strip_ansi_codes + + +class UnityTestRunner(TestRunnerBase): + + EXTRA_LIB_DEPS = ["throwtheswitch/Unity@^2.5.2"] + + # Example: + # test/test_foo.cpp:44:test_function_foo:FAIL: Expected 32 Was 33 + TESTCASE_PARSE_RE = re.compile( + r"(?P[^:]+):(?P\d+):(?P[^:]+):" + r"(?PPASS|IGNORE|FAIL)(:\s*(?P.+)$)?" + ) + + UNITY_CONFIG_H = """ +#ifndef UNITY_CONFIG_H +#define UNITY_CONFIG_H + +#ifndef NULL +#ifndef __cplusplus +#define NULL (void*)0 +#else +#define NULL 0 +#endif +#endif + +#ifdef __cplusplus +extern "C" +{ +#endif + +void unityOutputStart(unsigned long); +void unityOutputChar(unsigned int); +void unityOutputFlush(); +void unityOutputComplete(); + +#define UNITY_OUTPUT_START() unityOutputStart((unsigned long) $baudrate) +#define UNITY_OUTPUT_CHAR(c) unityOutputChar(c) +#define UNITY_OUTPUT_FLUSH() unityOutputFlush() +#define UNITY_OUTPUT_COMPLETE() unityOutputComplete() + +#ifdef __cplusplus +} +#endif /* extern "C" */ + +#endif /* UNITY_CONFIG_H */ + +""" + + UNITY_CONFIG_C = """ +#include + +#if !defined(UNITY_WEAK_ATTRIBUTE) && !defined(UNITY_WEAK_PRAGMA) +# if defined(__GNUC__) || defined(__ghs__) /* __GNUC__ includes clang */ +# if !(defined(__WIN32__) && defined(__clang__)) && !defined(__TMS470__) +# define UNITY_WEAK_ATTRIBUTE __attribute__((weak)) +# endif +# endif +#endif + +#ifdef __cplusplus +extern "C" +{ +#endif + +#ifdef UNITY_WEAK_ATTRIBUTE + UNITY_WEAK_ATTRIBUTE void setUp(void) { } + UNITY_WEAK_ATTRIBUTE void tearDown(void) { } + UNITY_WEAK_ATTRIBUTE void suiteSetUp(void) { } + UNITY_WEAK_ATTRIBUTE int suiteTearDown(int num_failures) { return num_failures; } +#elif defined(UNITY_WEAK_PRAGMA) + #pragma weak setUp + void setUp(void) { } + #pragma weak tearDown + void tearDown(void) { } + #pragma weak suiteSetUp + void suiteSetUp(void) { } + #pragma weak suiteTearDown + int suiteTearDown(int num_failures) { return num_failures; } +#endif + +#ifdef __cplusplus +} +#endif /* extern "C" */ + +$framework_config_code + """ + + UNITY_FRAMEWORK_CONFIG = dict( + native=dict( + code=""" +#include +void unityOutputStart(unsigned long baudrate) { } +void unityOutputChar(unsigned int c) { putchar(c); } +void unityOutputFlush(void) { fflush(stdout); } +void unityOutputComplete(void) { } + """, + language="c", + ), + arduino=dict( + code=""" +#include +void unityOutputStart(unsigned long baudrate) { Serial.begin(baudrate); } +void unityOutputChar(unsigned int c) { Serial.write(c); } +void unityOutputFlush(void) { Serial.flush(); } +void unityOutputComplete(void) { Serial.end(); } + """, + language="cpp", + ), + mbed=dict( + code=""" +#include +#if MBED_MAJOR_VERSION == 6 +UnbufferedSerial pc(USBTX, USBRX); +#else +RawSerial pc(USBTX, USBRX); +#endif +void unityOutputStart(unsigned long baudrate) { pc.baud(baudrate); } +void unityOutputChar(unsigned int c) { +#if MBED_MAJOR_VERSION == 6 + pc.write(&c, 1); +#else + pc.putc(c); +#endif +} +void unityOutputFlush(void) { } +void unityOutputComplete(void) { } + """, + language="cpp", + ), + espidf=dict( + code=""" +#include +void unityOutputStart(unsigned long baudrate) { } +void unityOutputChar(unsigned int c) { putchar(c); } +void unityOutputFlush(void) { fflush(stdout); } +void unityOutputComplete(void) { } + """, + language="c", + ), + zephyr=dict( + code=""" +#include +void unityOutputStart(unsigned long baudrate) { } +void unityOutputChar(unsigned int c) { printk("%c", c); } +void unityOutputFlush(void) { } +void unityOutputComplete(void) { } + """, + language="c", + ), + legacy_custom_transport=dict( + code=""" +#include +void unityOutputStart(unsigned long baudrate) { unittest_uart_begin(); } +void unityOutputChar(unsigned int c) { unittest_uart_putchar(c); } +void unityOutputFlush(void) { unittest_uart_flush(); } +void unityOutputComplete(void) { unittest_uart_end(); } + """, + language="cpp", + ), + ) + + def get_unity_framework_config(self): + if not self.platform.is_embedded(): + return self.UNITY_FRAMEWORK_CONFIG["native"] + if ( + self.project_config.get( + f"env:{self.test_suite.env_name}", "test_transport", None + ) + == "custom" + ): + framework = "legacy_custom_transport" + else: + framework = ( + self.project_config.get(f"env:{self.test_suite.env_name}", "framework") + or [None] + )[0] + if framework and framework in self.UNITY_FRAMEWORK_CONFIG: + return self.UNITY_FRAMEWORK_CONFIG[framework] + raise UnitTestSuiteError( + f"Could not find Unity configuration for the `{framework}` framework.\n" + "Learn how to create a custom Unity configuration at" + "https://docs.platformio.org/en/latest/advanced/" + "unit-testing/frameworks/unity.html" + ) + + def configure_build_env(self, env): + env.Append(CPPDEFINES=["UNITY_INCLUDE_CONFIG_H"]) + if self.custom_unity_config_exists(): + return env + env.Replace( + UNITY_CONFIG_DIR=os.path.join("$BUILD_DIR", "unity_config"), + BUILD_UNITY_CONFIG_DIR=os.path.join("$BUILD_DIR", "unity_config_build"), + ) + env.Prepend(CPPPATH=["$UNITY_CONFIG_DIR"]) + self.generate_unity_extras(env.subst("$UNITY_CONFIG_DIR")) + env.BuildSources("$BUILD_UNITY_CONFIG_DIR", "$UNITY_CONFIG_DIR") + return env + + def custom_unity_config_exists(self): + test_dir = self.project_config.get("platformio", "test_dir") + config_fname = "unity_config.h" + if os.path.isfile(os.path.join(test_dir, config_fname)): + return True + test_name = ( + self.test_suite.test_name if self.test_suite.test_name != "*" else None + ) + while test_name: + if os.path.isfile(os.path.join(test_dir, test_name, config_fname)): + return True + test_name = os.path.dirname(test_name) # parent dir + return False + + def generate_unity_extras(self, dst_dir): + dst_dir = Path(dst_dir) + dst_dir.mkdir(parents=True, exist_ok=True) + unity_h = dst_dir / "unity_config.h" + if not unity_h.is_file(): + unity_h.write_text( + string.Template(self.UNITY_CONFIG_H).substitute( + baudrate=self.get_test_speed() + ), + encoding="utf8", + ) + framework_config = self.get_unity_framework_config() + unity_c = dst_dir / ("unity_config.%s" % framework_config.get("language", "c")) + if not unity_c.is_file(): + unity_c.write_text( + string.Template(self.UNITY_CONFIG_C).substitute( + framework_config_code=framework_config["code"] + ), + encoding="utf8", + ) + + def on_testing_line_output(self, line): + if self.options.verbose: + click.echo(line, nl=False) + line = strip_ansi_codes(line or "").strip() + if not line: + return + + test_case = self.parse_test_case(line) + if test_case: + click.echo(test_case.humanize()) + + if all(s in line for s in ("Tests", "Failures", "Ignored")): + self.test_suite.on_finish() + + def parse_test_case(self, line): + if not self.TESTCASE_PARSE_RE: + raise NotImplementedError() + line = line.strip() + if not line: + return None + match = self.TESTCASE_PARSE_RE.search(line) + if not match: + return None + data = match.groupdict() + source = None + if "source_file" in data: + source = TestCaseSource( + filename=data["source_file"], line=int(data.get("source_line")) + ) + test_case = TestCase( + name=data.get("name").strip(), + status=TestStatus.from_string(data.get("status")), + message=(data.get("message") or "").strip() or None, + stdout=line, + source=source, + ) + self.test_suite.add_case(test_case) + return test_case diff --git a/platformio/util.py b/platformio/util.py index 90bce628..00158d45 100644 --- a/platformio/util.py +++ b/platformio/util.py @@ -14,23 +14,24 @@ from __future__ import absolute_import -import json +import functools import math -import os import platform import re import shutil import time -from functools import wraps -from glob import glob +from datetime import datetime import click -import zeroconf -from platformio import __version__, exception, proc -from platformio.compat import IS_MACOS, IS_WINDOWS -from platformio.fs import cd, load_json # pylint: disable=unused-import -from platformio.proc import exec_command # pylint: disable=unused-import +from platformio import __version__ + +# pylint: disable=unused-import +from platformio.device.list import list_serial_ports as get_serial_ports +from platformio.fs import cd, load_json +from platformio.proc import exec_command + +# pylint: enable=unused-import class memoized(object): @@ -44,7 +45,7 @@ class memoized(object): self.cache = {} def __call__(self, func): - @wraps(func) + @functools.wraps(func) def wrapper(*args, **kwargs): key = str(args) + str(kwargs) if key not in self.cache or ( @@ -66,7 +67,7 @@ class throttle(object): self.last = 0 def __call__(self, func): - @wraps(func) + @functools.wraps(func) def wrapper(*args, **kwargs): diff = int(round((time.time() - self.last) * 1000)) if diff < self.threshhold: @@ -97,140 +98,6 @@ def get_systype(): return "%s_%s" % (type_, arch) if arch else type_ -def get_serial_ports(filter_hwid=False): - try: - # pylint: disable=import-outside-toplevel - from serial.tools.list_ports import comports - except ImportError: - raise exception.GetSerialPortsError(os.name) - - result = [] - for p, d, h in comports(): - if not p: - continue - if not filter_hwid or "VID:PID" in h: - result.append({"port": p, "description": d, "hwid": h}) - - if filter_hwid: - return result - - # fix for PySerial - if not result and IS_MACOS: - for p in glob("/dev/tty.*"): - result.append({"port": p, "description": "n/a", "hwid": "n/a"}) - return result - - -# Backward compatibility for PIO Core <3.5 -get_serialports = get_serial_ports - - -def get_logical_devices(): - items = [] - if IS_WINDOWS: - try: - result = proc.exec_command( - ["wmic", "logicaldisk", "get", "name,VolumeName"] - ).get("out", "") - devicenamere = re.compile(r"^([A-Z]{1}\:)\s*(\S+)?") - for line in result.split("\n"): - match = devicenamere.match(line.strip()) - if not match: - continue - items.append({"path": match.group(1) + "\\", "name": match.group(2)}) - return items - except WindowsError: # pylint: disable=undefined-variable - pass - # try "fsutil" - result = proc.exec_command(["fsutil", "fsinfo", "drives"]).get("out", "") - for device in re.findall(r"[A-Z]:\\", result): - items.append({"path": device, "name": None}) - return items - - result = proc.exec_command(["df"]).get("out") - devicenamere = re.compile(r"^/.+\d+\%\s+([a-z\d\-_/]+)$", flags=re.I) - for line in result.split("\n"): - match = devicenamere.match(line.strip()) - if not match: - continue - items.append({"path": match.group(1), "name": os.path.basename(match.group(1))}) - return items - - -def get_mdns_services(): - class mDNSListener(object): - def __init__(self): - self._zc = zeroconf.Zeroconf(interfaces=zeroconf.InterfaceChoice.All) - self._found_types = [] - self._found_services = [] - - def __enter__(self): - zeroconf.ServiceBrowser( - self._zc, - [ - "_http._tcp.local.", - "_hap._tcp.local.", - "_services._dns-sd._udp.local.", - ], - self, - ) - return self - - def __exit__(self, etype, value, traceback): - self._zc.close() - - def add_service(self, zc, type_, name): - try: - assert zeroconf.service_type_name(name) - assert str(name) - except (AssertionError, UnicodeError, zeroconf.BadTypeInNameException): - return - if name not in self._found_types: - self._found_types.append(name) - zeroconf.ServiceBrowser(self._zc, name, self) - if type_ in self._found_types: - s = zc.get_service_info(type_, name) - if s: - self._found_services.append(s) - - def remove_service(self, zc, type_, name): - pass - - def update_service(self, zc, type_, name): - pass - - def get_services(self): - return self._found_services - - items = [] - with mDNSListener() as mdns: - time.sleep(3) - for service in mdns.get_services(): - properties = None - if service.properties: - try: - properties = { - k.decode("utf8"): v.decode("utf8") - if isinstance(v, bytes) - else v - for k, v in service.properties.items() - } - json.dumps(properties) - except UnicodeDecodeError: - properties = None - - items.append( - { - "type": service.type, - "name": service.name, - "ip": ", ".join(service.parsed_addresses()), - "port": service.port, - "properties": properties, - } - ) - return items - - def pioversion_to_intstr(): """Legacy for framework-zephyr/scripts/platformio/platformio-build-pre.py""" vermatch = re.match(r"^([\d\.]+)", __version__) @@ -252,10 +119,10 @@ def items_in_list(needle, haystack): return set(needle) & set(haystack) -def parse_date(datestr): +def parse_datetime(datestr): if "T" in datestr and "Z" in datestr: - return time.strptime(datestr, "%Y-%m-%dT%H:%M:%SZ") - return time.strptime(datestr) + return datetime.strptime(datestr, "%Y-%m-%dT%H:%M:%SZ") + return datetime.strptime(datestr) def merge_dicts(d1, d2, path=None): @@ -269,10 +136,10 @@ def merge_dicts(d1, d2, path=None): return d1 -def print_labeled_bar(label, is_error=False, fg=None): +def print_labeled_bar(label, is_error=False, fg=None, sep="="): terminal_width, _ = shutil.get_terminal_size() width = len(click.unstyle(label)) - half_line = "=" * int((terminal_width - width - 2) / 2) + half_line = sep * int((terminal_width - width - 2) / 2) click.secho("%s %s %s" % (half_line, label, half_line), fg=fg, err=is_error) @@ -286,3 +153,7 @@ def humanize_duration_time(duration): tokens.append(int(round(duration) if multiplier == 1 else fraction)) duration -= fraction * multiplier return "{:02d}:{:02d}:{:02d}.{:03d}".format(*tokens) + + +def strip_ansi_codes(text): + return re.sub(r"\x1B\[\d+(;\d+){0,2}m", "", text) diff --git a/scripts/docspregen.py b/scripts/docspregen.py index d05cef91..90dc887d 100644 --- a/scripts/docspregen.py +++ b/scripts/docspregen.py @@ -13,10 +13,10 @@ # limitations under the License. import functools -import json import os import sys import tempfile +from urllib.parse import ParseResult, urlparse, urlunparse sys.path.append("..") @@ -26,10 +26,6 @@ from platformio import fs, util # noqa: E402 from platformio.package.manager.platform import PlatformPackageManager # noqa: E402 from platformio.platform.factory import PlatformFactory # noqa: E402 -try: - from urlparse import ParseResult, urlparse, urlunparse -except ImportError: - from urllib.parse import ParseResult, urlparse, urlunparse RST_COPYRIGHT = """.. Copyright (c) 2014-present PlatformIO Licensed under the Apache License, Version 2.0 (the "License"); @@ -75,7 +71,7 @@ def install_platforms(): page = 1 pm = PlatformPackageManager() while True: - result = REGCLIENT.list_packages(filters=dict(types=["platform"]), page=page) + result = REGCLIENT.list_packages(qualifiers=dict(types=["platform"]), page=page) for item in result["items"]: spec = "%s/%s" % (item["owner"]["username"], item["name"]) skip_conds = [ @@ -85,7 +81,7 @@ def install_platforms(): if all(skip_conds): click.secho("Skip community platform: %s" % spec, fg="yellow") continue - pm.install(spec, skip_default_package=True) + pm.install(spec, skip_dependencies=True) page += 1 if not result["items"] or result["page"] * result["limit"] >= result["total"]: break diff --git a/scripts/install_devplatforms.py b/scripts/install_devplatforms.py index af579e17..80526d90 100644 --- a/scripts/install_devplatforms.py +++ b/scripts/install_devplatforms.py @@ -33,9 +33,7 @@ import click ) def main(desktop, ignore, ownernames): platforms = json.loads( - subprocess.check_output( - ["platformio", "platform", "search", "--json-output"] - ).decode() + subprocess.check_output(["pio", "platform", "search", "--json-output"]).decode() ) ignore = [n.strip() for n in (ignore or "").split(",") if n.strip()] ownernames = [n.strip() for n in (ownernames or "").split(",") if n.strip()] @@ -47,7 +45,16 @@ def main(desktop, ignore, ownernames): ] if any(skip): continue - subprocess.check_call(["platformio", "platform", "install", platform["name"]]) + subprocess.check_call( + [ + "pio", + "pkg", + "install", + "--global", + "--platform", + "{ownername}/{name}".format(**platform), + ] + ) if __name__ == "__main__": diff --git a/setup.py b/setup.py index 02b947e0..20585ab3 100644 --- a/setup.py +++ b/setup.py @@ -24,30 +24,27 @@ from platformio import ( __url__, __version__, ) -from platformio.compat import PY2 minimal_requirements = [ "bottle==0.12.*", - "click>=8.0.3,<9", + "click%s" % (">=8.0.3,<9" if sys.version_info >= (3, 7) else "==8.0.4"), "colorama", - "marshmallow%s" % (">=2,<3" if PY2 else ">=2,<4"), + "marshmallow==3.*", "pyelftools>=0.27,<1", "pyserial==3.*", "requests==2.*", "semantic_version==2.9.*", "tabulate==0.8.*", + "zeroconf<1", ] -if not PY2: - minimal_requirements.append("zeroconf==0.38.*") - home_requirements = [ "aiofiles==0.8.*", "ajsonrpc==1.*", - "starlette==0.18.*", + "starlette==%s" % ("0.20.*" if sys.version_info >= (3, 7) else "0.19.1"), "uvicorn==%s" % ("0.17.*" if sys.version_info >= (3, 7) else "0.16.0"), - "wsproto==1.0.*", + "wsproto==%s" % ("1.1.*" if sys.version_info >= (3, 7) else "1.0.0"), ] setup( @@ -59,7 +56,8 @@ setup( author_email=__email__, url=__url__, license=__license__, - install_requires=minimal_requirements + ([] if PY2 else home_requirements), + install_requires=minimal_requirements + home_requirements, + python_requires=">=3.6", packages=find_packages(exclude=["tests.*", "tests"]) + ["scripts"], package_data={ "platformio": [ diff --git a/tests/commands/pkg/__init__.py b/tests/commands/pkg/__init__.py new file mode 100644 index 00000000..b0514903 --- /dev/null +++ b/tests/commands/pkg/__init__.py @@ -0,0 +1,13 @@ +# Copyright (c) 2014-present PlatformIO +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. diff --git a/tests/commands/pkg/test_exec.py b/tests/commands/pkg/test_exec.py new file mode 100644 index 00000000..0b0c0f44 --- /dev/null +++ b/tests/commands/pkg/test_exec.py @@ -0,0 +1,59 @@ +# Copyright (c) 2014-present PlatformIO +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# pylint: disable=unused-argument + +import pytest + +from platformio.package.commands.exec import package_exec_cmd +from platformio.util import strip_ansi_codes + + +def test_pkg_not_installed(clirunner, validate_cliresult, isolated_pio_core): + result = clirunner.invoke( + package_exec_cmd, + ["--", "openocd"], + ) + with pytest.raises( + AssertionError, + match=("Could not find a package with 'openocd' executable file"), + ): + validate_cliresult(result) + + +def test_pkg_specified(clirunner, validate_cliresult, isolated_pio_core): + # with install + result = clirunner.invoke( + package_exec_cmd, + ["-p", "platformio/tool-openocd", "--", "openocd", "--version"], + obj=dict(force_click_stream=True), + ) + validate_cliresult(result) + output = strip_ansi_codes(result.output) + assert "Tool Manager: Installing platformio/tool-openocd" in output + assert "Open On-Chip Debugger" in output + + +def test_unrecognized_options(clirunner, validate_cliresult, isolated_pio_core): + # unrecognized option + result = clirunner.invoke( + package_exec_cmd, + ["--", "openocd", "--test-unrecognized"], + obj=dict(force_click_stream=True), + ) + with pytest.raises( + AssertionError, + match=(r"openocd: (unrecognized|unknown) option"), + ): + validate_cliresult(result) diff --git a/tests/commands/pkg/test_install.py b/tests/commands/pkg/test_install.py new file mode 100644 index 00000000..d2aa25d6 --- /dev/null +++ b/tests/commands/pkg/test_install.py @@ -0,0 +1,542 @@ +# Copyright (c) 2014-present PlatformIO +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# pylint: disable=unused-argument + +import os + +import pytest + +from platformio import fs +from platformio.package.commands.install import package_install_cmd +from platformio.package.manager.library import LibraryPackageManager +from platformio.package.manager.platform import PlatformPackageManager +from platformio.package.manager.tool import ToolPackageManager +from platformio.package.meta import PackageSpec +from platformio.project.config import ProjectConfig + +PROJECT_CONFIG_TPL = """ +[env] +platform = platformio/atmelavr@^3.4.0 +lib_deps = milesburton/DallasTemperature@^3.9.1 + +[env:baremetal] +board = uno + +[env:devkit] +framework = arduino +board = attiny88 +""" + + +def pkgs_to_specs(pkgs): + return [ + PackageSpec(name=pkg.metadata.name, requirements=pkg.metadata.version) + for pkg in pkgs + ] + + +def test_global_packages( + clirunner, validate_cliresult, func_isolated_pio_core, tmp_path +): + # libraries + result = clirunner.invoke( + package_install_cmd, + [ + "--global", + "-l", + "https://github.com/milesburton/Arduino-Temperature-Control-Library.git#3.9.0", + "--skip-dependencies", + ], + ) + validate_cliresult(result) + assert pkgs_to_specs(LibraryPackageManager().get_installed()) == [ + PackageSpec("DallasTemperature@3.9.0+sha.964939d") + ] + # with dependencies + result = clirunner.invoke( + package_install_cmd, + [ + "--global", + "-l", + "https://github.com/milesburton/Arduino-Temperature-Control-Library.git#3.9.0", + "-l", + "bblanchon/ArduinoJson@^5", + ], + ) + validate_cliresult(result) + assert pkgs_to_specs(LibraryPackageManager().get_installed()) == [ + PackageSpec("ArduinoJson@5.13.4"), + PackageSpec("DallasTemperature@3.9.0+sha.964939d"), + PackageSpec("OneWire@2.3.6"), + ] + # custom storage + storage_dir = tmp_path / "custom_lib_storage" + storage_dir.mkdir() + result = clirunner.invoke( + package_install_cmd, + [ + "--global", + "--storage-dir", + str(storage_dir), + "-l", + "bblanchon/ArduinoJson@^5", + ], + ) + validate_cliresult(result) + assert pkgs_to_specs(LibraryPackageManager(storage_dir).get_installed()) == [ + PackageSpec("ArduinoJson@5.13.4") + ] + + # tools + result = clirunner.invoke( + package_install_cmd, + ["--global", "-t", "platformio/framework-arduino-avr-attiny@^1.5.2"], + ) + validate_cliresult(result) + assert pkgs_to_specs(ToolPackageManager().get_installed()) == [ + PackageSpec("framework-arduino-avr-attiny@1.5.2") + ] + + # platforms + result = clirunner.invoke( + package_install_cmd, + ["--global", "-p", "platformio/atmelavr@^3.4.0", "--skip-dependencies"], + ) + validate_cliresult(result) + assert pkgs_to_specs(PlatformPackageManager().get_installed()) == [ + PackageSpec("atmelavr@3.4.0") + ] + + +def test_skip_dependencies(clirunner, validate_cliresult, isolated_pio_core, tmp_path): + project_dir = tmp_path / "project" + project_dir.mkdir() + (project_dir / "platformio.ini").write_text(PROJECT_CONFIG_TPL) + result = clirunner.invoke( + package_install_cmd, + ["-d", str(project_dir), "-e", "devkit", "--skip-dependencies"], + ) + validate_cliresult(result) + with fs.cd(str(project_dir)): + installed_lib_pkgs = LibraryPackageManager( + os.path.join(ProjectConfig().get("platformio", "libdeps_dir"), "devkit") + ).get_installed() + assert pkgs_to_specs(installed_lib_pkgs) == [ + PackageSpec("DallasTemperature@3.9.1") + ] + assert len(ToolPackageManager().get_installed()) == 0 + + +def test_baremetal_project(clirunner, validate_cliresult, isolated_pio_core, tmp_path): + project_dir = tmp_path / "project" + project_dir.mkdir() + (project_dir / "platformio.ini").write_text(PROJECT_CONFIG_TPL) + result = clirunner.invoke( + package_install_cmd, + ["-d", str(project_dir), "-e", "baremetal"], + ) + validate_cliresult(result) + with fs.cd(str(project_dir)): + installed_lib_pkgs = LibraryPackageManager( + os.path.join(ProjectConfig().get("platformio", "libdeps_dir"), "baremetal") + ).get_installed() + assert pkgs_to_specs(installed_lib_pkgs) == [ + PackageSpec("DallasTemperature@3.9.1"), + PackageSpec("OneWire@2.3.6"), + ] + assert pkgs_to_specs(ToolPackageManager().get_installed()) == [ + PackageSpec("toolchain-atmelavr@1.70300.191015"), + ] + + +def test_project(clirunner, validate_cliresult, isolated_pio_core, tmp_path): + project_dir = tmp_path / "project" + project_dir.mkdir() + (project_dir / "platformio.ini").write_text(PROJECT_CONFIG_TPL) + result = clirunner.invoke( + package_install_cmd, + ["-d", str(project_dir)], + ) + validate_cliresult(result) + with fs.cd(str(project_dir)): + config = ProjectConfig() + lm = LibraryPackageManager( + os.path.join(config.get("platformio", "libdeps_dir"), "devkit") + ) + assert pkgs_to_specs(lm.get_installed()) == [ + PackageSpec("DallasTemperature@3.9.1"), + PackageSpec("OneWire@2.3.6"), + ] + assert pkgs_to_specs(ToolPackageManager().get_installed()) == [ + PackageSpec("framework-arduino-avr-attiny@1.5.2"), + PackageSpec("toolchain-atmelavr@1.70300.191015"), + ] + assert config.get("env:devkit", "lib_deps") == [ + "milesburton/DallasTemperature@^3.9.1" + ] + + # test "Already up-to-date" + result = clirunner.invoke( + package_install_cmd, + ["-d", str(project_dir)], + ) + validate_cliresult(result) + assert "Already up-to-date" in result.output + + +def test_private_lib_deps(clirunner, validate_cliresult, isolated_pio_core, tmp_path): + project_dir = tmp_path / "project" + private_lib_dir = project_dir / "lib" / "private" + private_lib_dir.mkdir(parents=True) + (private_lib_dir / "library.json").write_text( + """ +{ + "name": "My Private Lib", + "version": "1.0.0", + "dependencies": { + "bblanchon/ArduinoJson": "^5", + "milesburton/DallasTemperature": "^3.9.1" + } +} +""" + ) + (project_dir / "platformio.ini").write_text( + """ +[env:private] +platform = native +""" + ) + with fs.cd(str(project_dir)): + config = ProjectConfig() + + # some deps were added by user manually + result = clirunner.invoke( + package_install_cmd, + [ + "-g", + "--storage-dir", + config.get("platformio", "lib_dir"), + "-l", + "paulstoffregen/OneWire@^2.3.5", + ], + ) + validate_cliresult(result) + + # ensure all deps are installed + result = clirunner.invoke(package_install_cmd) + validate_cliresult(result) + installed_private_pkgs = LibraryPackageManager( + config.get("platformio", "lib_dir") + ).get_installed() + assert pkgs_to_specs(installed_private_pkgs) == [ + PackageSpec("OneWire@2.3.6"), + PackageSpec("My Private Lib@1.0.0"), + ] + installed_env_pkgs = LibraryPackageManager( + os.path.join(config.get("platformio", "libdeps_dir"), "private") + ).get_installed() + assert pkgs_to_specs(installed_env_pkgs) == [ + PackageSpec("ArduinoJson@5.13.4"), + PackageSpec("DallasTemperature@3.9.1"), + ] + + +def test_remove_project_unused_libdeps( + clirunner, validate_cliresult, isolated_pio_core, tmp_path +): + project_dir = tmp_path / "project" + project_dir.mkdir() + (project_dir / "platformio.ini").write_text(PROJECT_CONFIG_TPL) + result = clirunner.invoke( + package_install_cmd, + ["-d", str(project_dir), "-e", "baremetal"], + ) + validate_cliresult(result) + with fs.cd(str(project_dir)): + config = ProjectConfig() + storage_dir = os.path.join(config.get("platformio", "libdeps_dir"), "baremetal") + lm = LibraryPackageManager(storage_dir) + assert pkgs_to_specs(lm.get_installed()) == [ + PackageSpec("DallasTemperature@3.9.1"), + PackageSpec("OneWire@2.3.6"), + ] + + # add new deps + lib_deps = config.get("env:baremetal", "lib_deps") + config.set("env:baremetal", "lib_deps", lib_deps + ["bblanchon/ArduinoJson@^5"]) + config.save() + result = clirunner.invoke( + package_install_cmd, + ["-e", "baremetal"], + ) + validate_cliresult(result) + lm = LibraryPackageManager(storage_dir) + assert pkgs_to_specs(lm.get_installed()) == [ + PackageSpec("ArduinoJson@5.13.4"), + PackageSpec("DallasTemperature@3.9.1"), + PackageSpec("OneWire@2.3.6"), + ] + + # manually remove from cofiguration file + config.set("env:baremetal", "lib_deps", ["bblanchon/ArduinoJson@^5"]) + config.save() + result = clirunner.invoke( + package_install_cmd, + ["-e", "baremetal"], + ) + validate_cliresult(result) + lm = LibraryPackageManager(storage_dir) + assert pkgs_to_specs(lm.get_installed()) == [PackageSpec("ArduinoJson@5.13.4")] + + +def test_unknown_project_dependencies( + clirunner, validate_cliresult, isolated_pio_core, tmp_path +): + project_dir = tmp_path / "project" + project_dir.mkdir() + (project_dir / "platformio.ini").write_text( + """ +[env:unknown_platform] +platform = unknown_platform + +[env:unknown_lib_deps] +lib_deps = SPI, platformio/unknown_library +""" + ) + with fs.cd(str(project_dir)): + result = clirunner.invoke( + package_install_cmd, + ["-e", "unknown_platform"], + ) + with pytest.raises( + AssertionError, + match=("Could not find the package with 'unknown_platform' requirements"), + ): + validate_cliresult(result) + + # unknown libraries + result = clirunner.invoke( + package_install_cmd, + ["-e", "unknown_lib_deps"], + ) + with pytest.raises( + AssertionError, + match=( + "Could not find the package with 'platformio/unknown_library' requirements" + ), + ): + validate_cliresult(result) + + +def test_custom_project_libraries( + clirunner, validate_cliresult, func_isolated_pio_core, tmp_path +): + project_dir = tmp_path / "project" + project_dir.mkdir() + (project_dir / "platformio.ini").write_text(PROJECT_CONFIG_TPL) + spec = "bblanchon/ArduinoJson@^5" + result = clirunner.invoke( + package_install_cmd, + ["-d", str(project_dir), "-e", "devkit", "-l", spec], + ) + validate_cliresult(result) + with fs.cd(str(project_dir)): + # try again + result = clirunner.invoke( + package_install_cmd, + ["-e", "devkit", "-l", spec], + ) + validate_cliresult(result) + assert "already installed" in result.output + # try again in the silent mode + result = clirunner.invoke( + package_install_cmd, + ["-e", "devkit", "-l", spec, "--silent"], + ) + validate_cliresult(result) + assert not result.output.strip() + + # check folders + config = ProjectConfig() + lm = LibraryPackageManager( + os.path.join(config.get("platformio", "libdeps_dir"), "devkit") + ) + assert pkgs_to_specs(lm.get_installed()) == [PackageSpec("ArduinoJson@5.13.4")] + # do not expect any platforms/tools + assert not os.path.exists(config.get("platformio", "platforms_dir")) + assert not os.path.exists(config.get("platformio", "packages_dir")) + + # check saved deps + assert config.get("env:devkit", "lib_deps") == [ + "bblanchon/ArduinoJson@^5", + ] + + # install library without saving to config + result = clirunner.invoke( + package_install_cmd, + ["-e", "devkit", "-l", "nanopb/Nanopb@^0.4.6", "--no-save"], + ) + validate_cliresult(result) + config = ProjectConfig() + lm = LibraryPackageManager( + os.path.join(config.get("platformio", "libdeps_dir"), "devkit") + ) + assert pkgs_to_specs(lm.get_installed()) == [ + PackageSpec("ArduinoJson@5.13.4"), + PackageSpec("Nanopb@0.4.6+3"), + ] + assert config.get("env:devkit", "lib_deps") == [ + "bblanchon/ArduinoJson@^5", + ] + + # unknown libraries + result = clirunner.invoke( + package_install_cmd, ["-l", "platformio/unknown_library"] + ) + with pytest.raises( + AssertionError, + match=( + "Could not find the package with " + "'platformio/unknown_library' requirements" + ), + ): + validate_cliresult(result) + + +def test_custom_project_tools( + clirunner, validate_cliresult, func_isolated_pio_core, tmp_path +): + project_dir = tmp_path / "project" + project_dir.mkdir() + (project_dir / "platformio.ini").write_text(PROJECT_CONFIG_TPL) + spec = "platformio/tool-openocd" + result = clirunner.invoke( + package_install_cmd, + ["-d", str(project_dir), "-e", "devkit", "-t", spec], + ) + validate_cliresult(result) + with fs.cd(str(project_dir)): + # try again + result = clirunner.invoke( + package_install_cmd, + ["-e", "devkit", "-t", spec], + ) + validate_cliresult(result) + assert "already installed" in result.output + # try again in the silent mode + result = clirunner.invoke( + package_install_cmd, + ["-e", "devkit", "-t", spec, "--silent"], + ) + validate_cliresult(result) + assert not result.output.strip() + + config = ProjectConfig() + assert pkgs_to_specs(ToolPackageManager().get_installed()) == [ + PackageSpec("tool-openocd@2.1100.211028") + ] + assert not LibraryPackageManager( + os.path.join(config.get("platformio", "libdeps_dir"), "devkit") + ).get_installed() + # do not expect any platforms + assert not os.path.exists(config.get("platformio", "platforms_dir")) + + # check saved deps + assert config.get("env:devkit", "platform_packages") == [ + "platformio/tool-openocd@^2.1100.211028", + ] + + # install tool without saving to config + result = clirunner.invoke( + package_install_cmd, + ["-e", "devkit", "-t", "platformio/tool-esptoolpy@1.20310.0", "--no-save"], + ) + validate_cliresult(result) + config = ProjectConfig() + assert pkgs_to_specs(ToolPackageManager().get_installed()) == [ + PackageSpec("tool-esptoolpy@1.20310.0"), + PackageSpec("tool-openocd@2.1100.211028"), + ] + assert config.get("env:devkit", "platform_packages") == [ + "platformio/tool-openocd@^2.1100.211028", + ] + + # unknown tool + result = clirunner.invoke( + package_install_cmd, ["-t", "platformio/unknown_tool"] + ) + with pytest.raises( + AssertionError, + match=( + "Could not find the package with " + "'platformio/unknown_tool' requirements" + ), + ): + validate_cliresult(result) + + +def test_custom_project_platforms( + clirunner, validate_cliresult, func_isolated_pio_core, tmp_path +): + project_dir = tmp_path / "project" + project_dir.mkdir() + (project_dir / "platformio.ini").write_text(PROJECT_CONFIG_TPL) + spec = "atmelavr@^3.4.0" + result = clirunner.invoke( + package_install_cmd, + ["-d", str(project_dir), "-e", "devkit", "-p", spec, "--skip-dependencies"], + ) + validate_cliresult(result) + with fs.cd(str(project_dir)): + # try again + result = clirunner.invoke( + package_install_cmd, + ["-e", "devkit", "-p", spec, "--skip-dependencies"], + ) + validate_cliresult(result) + assert "already installed" in result.output + # try again in the silent mode + result = clirunner.invoke( + package_install_cmd, + ["-e", "devkit", "-p", spec, "--silent", "--skip-dependencies"], + ) + validate_cliresult(result) + assert not result.output.strip() + + config = ProjectConfig() + assert pkgs_to_specs(PlatformPackageManager().get_installed()) == [ + PackageSpec("atmelavr@3.4.0") + ] + assert not LibraryPackageManager( + os.path.join(config.get("platformio", "libdeps_dir"), "devkit") + ).get_installed() + # do not expect any packages + assert not os.path.exists(config.get("platformio", "packages_dir")) + + # unknown platform + result = clirunner.invoke(package_install_cmd, ["-p", "unknown_platform"]) + with pytest.raises( + AssertionError, + match="Could not find the package with 'unknown_platform' requirements", + ): + validate_cliresult(result) + + # incompatible board + result = clirunner.invoke(package_install_cmd, ["-e", "devkit", "-p", "sifive"]) + with pytest.raises( + AssertionError, + match="Unknown board ID", + ): + validate_cliresult(result) diff --git a/tests/commands/pkg/test_list.py b/tests/commands/pkg/test_list.py new file mode 100644 index 00000000..c917879e --- /dev/null +++ b/tests/commands/pkg/test_list.py @@ -0,0 +1,137 @@ +# Copyright (c) 2014-present PlatformIO +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# pylint: disable=unused-argument + +from platformio.package.commands.install import package_install_cmd +from platformio.package.commands.list import package_list_cmd + +PROJECT_CONFIG_TPL = """ +[env] +platform = platformio/atmelavr@^3.4.0 + +[env:baremetal] +board = uno + +[env:devkit] +framework = arduino +board = attiny88 +lib_deps = + milesburton/DallasTemperature@^3.9.1 + https://github.com/bblanchon/ArduinoJson.git#v6.19.0 +""" + + +def test_project(clirunner, validate_cliresult, isolated_pio_core, tmp_path): + project_dir = tmp_path / "project" + project_dir.mkdir() + (project_dir / "platformio.ini").write_text(PROJECT_CONFIG_TPL) + result = clirunner.invoke( + package_install_cmd, + ["-d", str(project_dir)], + ) + validate_cliresult(result) + + # test all envs + result = clirunner.invoke( + package_list_cmd, + ["-d", str(project_dir)], + ) + validate_cliresult(result) + assert all(token in result.output for token in ("baremetal", "devkit")) + assert result.output.count("Platform atmelavr @ 3.4.0") == 2 + assert ( + result.output.count( + "toolchain-atmelavr @ 1.70300.191015 (required: " + "platformio/toolchain-atmelavr @ ~1.70300.0)" + ) + == 2 + ) + assert result.output.count("Libraries") == 1 + assert ( + "ArduinoJson @ 6.19.0+sha.9693fd2 (required: " + "git+https://github.com/bblanchon/ArduinoJson.git#v6.19.0)" + ) in result.output + assert "OneWire @ 2" in result.output + + # test "baremetal" + result = clirunner.invoke( + package_list_cmd, + ["-d", str(project_dir), "-e", "baremetal"], + ) + validate_cliresult(result) + assert "Platform atmelavr @ 3" in result.output + assert "Libraries" not in result.output + + # filter by "tool" package + result = clirunner.invoke( + package_list_cmd, + ["-d", str(project_dir), "-t", "toolchain-atmelavr@~1.70300.0"], + ) + assert "framework-arduino" not in result.output + assert "Libraries" not in result.output + + # list only libraries + result = clirunner.invoke( + package_list_cmd, + ["-d", str(project_dir), "--only-libraries"], + ) + assert "Platform atmelavr" not in result.output + + # list only libraries for baremetal + result = clirunner.invoke( + package_list_cmd, + ["-d", str(project_dir), "-e", "baremetal", "--only-libraries"], + ) + assert "No packages" in result.output + + +def test_global_packages(clirunner, validate_cliresult, isolated_pio_core, tmp_path): + result = clirunner.invoke(package_list_cmd, ["-g"]) + validate_cliresult(result) + assert "atmelavr @ 3" in result.output + assert "framework-arduino-avr-attiny" in result.output + + # only tools + result = clirunner.invoke(package_list_cmd, ["-g", "--only-tools"]) + validate_cliresult(result) + assert "toolchain-atmelavr" in result.output + assert "Platforms" not in result.output + + # find tool package + result = clirunner.invoke(package_list_cmd, ["-g", "-t", "toolchain-atmelavr"]) + validate_cliresult(result) + assert "toolchain-atmelavr" in result.output + assert "framework-arduino-avr-attiny@" not in result.output + + # only libraries - no packages + result = clirunner.invoke(package_list_cmd, ["-g", "--only-libraries"]) + validate_cliresult(result) + assert not result.output.strip() + + # check global libs + result = clirunner.invoke( + package_install_cmd, ["-g", "-l", "milesburton/DallasTemperature@^3.9.1"] + ) + validate_cliresult(result) + result = clirunner.invoke(package_list_cmd, ["-g", "--only-libraries"]) + validate_cliresult(result) + assert "DallasTemperature" in result.output + assert "OneWire" in result.output + + # filter by lib + result = clirunner.invoke(package_list_cmd, ["-g", "-l", "OneWire"]) + validate_cliresult(result) + assert "DallasTemperature" in result.output + assert "OneWire" in result.output diff --git a/tests/commands/pkg/test_outdated.py b/tests/commands/pkg/test_outdated.py new file mode 100644 index 00000000..da1abda8 --- /dev/null +++ b/tests/commands/pkg/test_outdated.py @@ -0,0 +1,62 @@ +# Copyright (c) 2014-present PlatformIO +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# pylint: disable=unused-argument + +import re + +from platformio.package.commands.install import package_install_cmd +from platformio.package.commands.outdated import package_outdated_cmd + +PROJECT_OUTDATED_CONFIG_TPL = """ +[env:devkit] +platform = platformio/atmelavr@^2 +framework = arduino +board = attiny88 +lib_deps = milesburton/DallasTemperature@~3.8.0 +""" + +PROJECT_UPDATED_CONFIG_TPL = """ +[env:devkit] +platform = platformio/atmelavr@<4 +framework = arduino +board = attiny88 +lib_deps = milesburton/DallasTemperature@^3.8.0 +""" + + +def test_project(clirunner, validate_cliresult, isolated_pio_core, tmp_path): + project_dir = tmp_path / "project" + project_dir.mkdir() + (project_dir / "platformio.ini").write_text(PROJECT_OUTDATED_CONFIG_TPL) + result = clirunner.invoke(package_install_cmd, ["-d", str(project_dir)]) + validate_cliresult(result) + + # overwrite config + (project_dir / "platformio.ini").write_text(PROJECT_UPDATED_CONFIG_TPL) + result = clirunner.invoke(package_outdated_cmd, ["-d", str(project_dir)]) + validate_cliresult(result) + + # validate output + assert "Checking" in result.output + assert re.search( + r"^atmelavr\s+2\.2\.0\s+3\.\d+\.\d+\s+3\.\d+\.\d+\s+Platform\s+devkit", + result.output, + re.MULTILINE, + ) + assert re.search( + r"^DallasTemperature\s+3\.8\.1\s+3\.\d+\.\d+\s+3\.\d+\.\d+\s+Library\s+devkit", + result.output, + re.MULTILINE, + ) diff --git a/tests/commands/pkg/test_search.py b/tests/commands/pkg/test_search.py new file mode 100644 index 00000000..284f4363 --- /dev/null +++ b/tests/commands/pkg/test_search.py @@ -0,0 +1,58 @@ +# Copyright (c) 2014-present PlatformIO +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from platformio.package.commands.search import package_search_cmd + + +def test_empty_query(clirunner, validate_cliresult): + result = clirunner.invoke( + package_search_cmd, + [""], + ) + validate_cliresult(result) + assert all(t in result.output for t in ("Found", "Official", "page 1 of")) + + +def test_pagination(clirunner, validate_cliresult): + result = clirunner.invoke( + package_search_cmd, + ["type:tool"], + ) + validate_cliresult(result) + assert all(t in result.output for t in ("Verified Tool", "page 1 of")) + + result = clirunner.invoke( + package_search_cmd, + ["type:tool", "-p", "10"], + ) + validate_cliresult(result) + assert all(t in result.output for t in ("Tool", "page 10 of")) + + +def test_sorting(clirunner, validate_cliresult): + result = clirunner.invoke( + package_search_cmd, + ["OneWire", "-s", "popularity"], + ) + validate_cliresult(result) + assert "paulstoffregen/OneWire" in result.output + + +def test_not_found(clirunner, validate_cliresult): + result = clirunner.invoke( + package_search_cmd, + ["name:unknown-package"], + ) + validate_cliresult(result) + assert "Nothing has been found" in result.output diff --git a/tests/commands/pkg/test_show.py b/tests/commands/pkg/test_show.py new file mode 100644 index 00000000..979edbb3 --- /dev/null +++ b/tests/commands/pkg/test_show.py @@ -0,0 +1,103 @@ +# Copyright (c) 2014-present PlatformIO +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import pytest + +from platformio.exception import UserSideException +from platformio.package.commands.show import package_show_cmd + + +def test_spec_name(clirunner, validate_cliresult): + # library + result = clirunner.invoke( + package_show_cmd, + ["ArduinoJSON"], + ) + validate_cliresult(result) + assert "bblanchon/ArduinoJson" in result.output + assert "Library" in result.output + + # platform + result = clirunner.invoke( + package_show_cmd, + ["espressif32"], + ) + validate_cliresult(result) + assert "platformio/espressif32" in result.output + assert "Platform" in result.output + + # tool + result = clirunner.invoke( + package_show_cmd, + ["tool-jlink"], + ) + validate_cliresult(result) + assert "platformio/tool-jlink" in result.output + assert "tool" in result.output + + +def test_spec_owner(clirunner, validate_cliresult): + result = clirunner.invoke( + package_show_cmd, + ["bblanchon/ArduinoJSON"], + ) + validate_cliresult(result) + assert "bblanchon/ArduinoJson" in result.output + assert "Library" in result.output + + # test broken owner + result = clirunner.invoke( + package_show_cmd, + ["unknown/espressif32"], + ) + with pytest.raises(UserSideException, match="Could not find"): + raise result.exception + + +def test_complete_spec(clirunner, validate_cliresult): + result = clirunner.invoke( + package_show_cmd, + ["bblanchon/ArduinoJSON", "-t", "library"], + ) + validate_cliresult(result) + assert "bblanchon/ArduinoJson" in result.output + assert "Library" in result.output + + # tool + result = clirunner.invoke( + package_show_cmd, + ["platformio/tool-jlink", "-t", "tool"], + ) + validate_cliresult(result) + assert "platformio/tool-jlink" in result.output + assert "tool" in result.output + + +def test_name_conflict(clirunner): + result = clirunner.invoke( + package_show_cmd, + ["OneWire", "-t", "library"], + ) + assert "More than one package" in result.output + assert isinstance(result.exception, UserSideException) + + +def test_spec_version(clirunner, validate_cliresult): + result = clirunner.invoke( + package_show_cmd, + ["bblanchon/ArduinoJSON@5.13.4"], + ) + validate_cliresult(result) + assert "bblanchon/ArduinoJson" in result.output + assert "Library • 5.13.4" in result.output diff --git a/tests/commands/pkg/test_uninstall.py b/tests/commands/pkg/test_uninstall.py new file mode 100644 index 00000000..1dae981c --- /dev/null +++ b/tests/commands/pkg/test_uninstall.py @@ -0,0 +1,407 @@ +# Copyright (c) 2014-present PlatformIO +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# pylint: disable=unused-argument + +import os + +from platformio import fs +from platformio.package.commands.install import package_install_cmd +from platformio.package.commands.uninstall import package_uninstall_cmd +from platformio.package.exception import UnknownPackageError +from platformio.package.manager.library import LibraryPackageManager +from platformio.package.manager.platform import PlatformPackageManager +from platformio.package.manager.tool import ToolPackageManager +from platformio.project.config import ProjectConfig + +PROJECT_CONFIG_TPL = """ +[env] +platform = platformio/atmelavr@^3.4.0 +lib_deps = milesburton/DallasTemperature@^3.9.1 + +[env:baremetal] +board = uno + +[env:devkit] +framework = arduino +board = attiny88 +""" + + +def pkgs_to_names(pkgs): + return [pkg.metadata.name for pkg in pkgs] + + +def test_global_packages( + clirunner, validate_cliresult, func_isolated_pio_core, tmp_path +): + # libraries + result = clirunner.invoke( + package_install_cmd, + [ + "--global", + "-l", + "marvinroger/Homie@^3.0.1", + ], + ) + validate_cliresult(result) + assert pkgs_to_names(LibraryPackageManager().get_installed()) == [ + "ArduinoJson", + "AsyncMqttClient", + "AsyncTCP", + "Bounce2", + "ESP Async WebServer", + "ESPAsyncTCP", + "Homie", + ] + # uninstall all deps + result = clirunner.invoke( + package_uninstall_cmd, + [ + "--global", + "-l", + "Homie", + ], + ) + validate_cliresult(result) + assert not pkgs_to_names(LibraryPackageManager().get_installed()) + + # skip dependencies + validate_cliresult( + clirunner.invoke( + package_install_cmd, + [ + "--global", + "-l", + "marvinroger/Homie@^3.0.1", + ], + ) + ) + result = clirunner.invoke( + package_uninstall_cmd, + ["--global", "-l", "marvinroger/Homie@^3.0.1", "--skip-dependencies"], + ) + validate_cliresult(result) + assert pkgs_to_names(LibraryPackageManager().get_installed()) == [ + "ArduinoJson", + "AsyncMqttClient", + "AsyncTCP", + "Bounce2", + "ESP Async WebServer", + "ESPAsyncTCP", + ] + # remove specific dependency + result = clirunner.invoke( + package_uninstall_cmd, + [ + "--global", + "-l", + "ESP Async WebServer", + ], + ) + validate_cliresult(result) + assert pkgs_to_names(LibraryPackageManager().get_installed()) == [ + "ArduinoJson", + "AsyncMqttClient", + "Bounce2", + ] + + # custom storage + storage_dir = tmp_path / "custom_lib_storage" + storage_dir.mkdir() + result = clirunner.invoke( + package_install_cmd, + [ + "--global", + "--storage-dir", + str(storage_dir), + "-l", + "marvinroger/Homie@^3.0.1", + "--skip-dependencies", + ], + ) + validate_cliresult(result) + assert pkgs_to_names(LibraryPackageManager(storage_dir).get_installed()) == [ + "Homie" + ] + result = clirunner.invoke( + package_uninstall_cmd, + [ + "--global", + "--storage-dir", + str(storage_dir), + "-l", + "marvinroger/Homie@^3.0.1", + ], + ) + validate_cliresult(result) + assert not pkgs_to_names(LibraryPackageManager(storage_dir).get_installed()) + + # tools + result = clirunner.invoke( + package_install_cmd, + ["--global", "-t", "platformio/framework-arduino-avr-attiny@^1.5.2"], + ) + validate_cliresult(result) + assert pkgs_to_names(ToolPackageManager().get_installed()) == [ + "framework-arduino-avr-attiny" + ] + result = clirunner.invoke( + package_uninstall_cmd, + ["--global", "-t", "framework-arduino-avr-attiny"], + ) + validate_cliresult(result) + assert not pkgs_to_names(ToolPackageManager().get_installed()) + + # platforms + result = clirunner.invoke( + package_install_cmd, + ["--global", "-p", "platformio/atmelavr@^3.4.0"], + ) + validate_cliresult(result) + assert pkgs_to_names(PlatformPackageManager().get_installed()) == ["atmelavr"] + assert pkgs_to_names(ToolPackageManager().get_installed()) == ["toolchain-atmelavr"] + result = clirunner.invoke( + package_uninstall_cmd, + ["--global", "-p", "platformio/atmelavr@^3.4.0"], + ) + validate_cliresult(result) + assert not pkgs_to_names(PlatformPackageManager().get_installed()) + assert not pkgs_to_names(ToolPackageManager().get_installed()) + + +def test_project(clirunner, validate_cliresult, isolated_pio_core, tmp_path): + project_dir = tmp_path / "project" + project_dir.mkdir() + (project_dir / "platformio.ini").write_text(PROJECT_CONFIG_TPL) + result = clirunner.invoke( + package_install_cmd, + ["-d", str(project_dir)], + ) + validate_cliresult(result) + with fs.cd(str(project_dir)): + config = ProjectConfig() + lm = LibraryPackageManager( + os.path.join(config.get("platformio", "libdeps_dir"), "devkit") + ) + assert pkgs_to_names(lm.get_installed()) == ["DallasTemperature", "OneWire"] + assert pkgs_to_names(ToolPackageManager().get_installed()) == [ + "framework-arduino-avr-attiny", + "toolchain-atmelavr", + ] + assert config.get("env:devkit", "lib_deps") == [ + "milesburton/DallasTemperature@^3.9.1" + ] + + # try again + result = clirunner.invoke( + package_install_cmd, + ["-d", str(project_dir)], + ) + validate_cliresult(result) + assert "Already up-to-date" in result.output + + # uninstall + result = clirunner.invoke( + package_uninstall_cmd, + ["-d", str(project_dir)], + ) + validate_cliresult(result) + with fs.cd(str(project_dir)): + config = ProjectConfig() + lm = LibraryPackageManager( + os.path.join(config.get("platformio", "libdeps_dir"), "devkit") + ) + assert not pkgs_to_names(lm.get_installed()) + assert not pkgs_to_names(ToolPackageManager().get_installed()) + assert config.get("env:devkit", "lib_deps") == [ + "milesburton/DallasTemperature@^3.9.1" + ] + + +def test_custom_project_libraries( + clirunner, validate_cliresult, func_isolated_pio_core, tmp_path +): + project_dir = tmp_path / "project" + project_dir.mkdir() + (project_dir / "platformio.ini").write_text(PROJECT_CONFIG_TPL) + spec = "bblanchon/ArduinoJson@^6.19.2" + result = clirunner.invoke( + package_install_cmd, + ["-d", str(project_dir), "-e", "devkit", "-l", spec], + ) + validate_cliresult(result) + assert "Already up-to-date" not in result.output + with fs.cd(str(project_dir)): + # check folders + config = ProjectConfig() + lm = LibraryPackageManager( + os.path.join(config.get("platformio", "libdeps_dir"), "devkit") + ) + assert pkgs_to_names(lm.get_installed()) == ["ArduinoJson"] + # do not expect any platforms/tools + assert not os.path.exists(config.get("platformio", "platforms_dir")) + assert not os.path.exists(config.get("platformio", "packages_dir")) + # check saved deps + assert config.get("env:devkit", "lib_deps") == [ + "bblanchon/ArduinoJson@^6.19.2", + ] + # uninstall + result = clirunner.invoke( + package_uninstall_cmd, + ["-e", "devkit", "-l", spec], + ) + validate_cliresult(result) + config = ProjectConfig() + lm = LibraryPackageManager( + os.path.join(config.get("platformio", "libdeps_dir"), "devkit") + ) + assert not pkgs_to_names(lm.get_installed()) + # do not expect any platforms/tools + assert not os.path.exists(config.get("platformio", "platforms_dir")) + assert not os.path.exists(config.get("platformio", "packages_dir")) + # check saved deps + assert config.get("env:devkit", "lib_deps") == [ + "milesburton/DallasTemperature@^3.9.1" + ] + + # install library without saving to config + result = clirunner.invoke( + package_install_cmd, + ["-e", "devkit", "-l", spec, "--no-save"], + ) + validate_cliresult(result) + config = ProjectConfig() + lm = LibraryPackageManager( + os.path.join(config.get("platformio", "libdeps_dir"), "devkit") + ) + assert pkgs_to_names(lm.get_installed()) == ["ArduinoJson"] + assert config.get("env:devkit", "lib_deps") == [ + "milesburton/DallasTemperature@^3.9.1", + ] + result = clirunner.invoke( + package_uninstall_cmd, + ["-e", "devkit", "-l", spec, "--no-save"], + ) + validate_cliresult(result) + config = ProjectConfig() + assert config.get("env:devkit", "lib_deps") == [ + "milesburton/DallasTemperature@^3.9.1", + ] + + # unknown libraries + result = clirunner.invoke( + package_uninstall_cmd, ["-l", "platformio/unknown_library"] + ) + assert isinstance(result.exception, UnknownPackageError) + + +def test_custom_project_tools( + clirunner, validate_cliresult, func_isolated_pio_core, tmp_path +): + project_dir = tmp_path / "project" + project_dir.mkdir() + (project_dir / "platformio.ini").write_text(PROJECT_CONFIG_TPL) + spec = "platformio/tool-openocd" + result = clirunner.invoke( + package_install_cmd, + ["-d", str(project_dir), "-e", "devkit", "-t", spec], + ) + validate_cliresult(result) + with fs.cd(str(project_dir)): + config = ProjectConfig() + assert pkgs_to_names(ToolPackageManager().get_installed()) == ["tool-openocd"] + assert not LibraryPackageManager( + os.path.join(config.get("platformio", "libdeps_dir"), "devkit") + ).get_installed() + # do not expect any platforms + assert not os.path.exists(config.get("platformio", "platforms_dir")) + # check saved deps + assert config.get("env:devkit", "platform_packages") == [ + "platformio/tool-openocd@^2.1100.211028", + ] + # uninstall + result = clirunner.invoke( + package_uninstall_cmd, + ["-e", "devkit", "-t", spec], + ) + validate_cliresult(result) + assert not pkgs_to_names(ToolPackageManager().get_installed()) + # check saved deps + assert not ProjectConfig().get("env:devkit", "platform_packages") + + # install tool without saving to config + result = clirunner.invoke( + package_install_cmd, + ["-e", "devkit", "-t", "platformio/tool-esptoolpy@1.20310.0"], + ) + validate_cliresult(result) + assert pkgs_to_names(ToolPackageManager().get_installed()) == [ + "tool-esptoolpy", + ] + assert ProjectConfig().get("env:devkit", "platform_packages") == [ + "platformio/tool-esptoolpy@1.20310.0", + ] + # uninstall + result = clirunner.invoke( + package_uninstall_cmd, + ["-e", "devkit", "-t", "platformio/tool-esptoolpy@^1", "--no-save"], + ) + validate_cliresult(result) + assert not pkgs_to_names(ToolPackageManager().get_installed()) + assert ProjectConfig().get("env:devkit", "platform_packages") == [ + "platformio/tool-esptoolpy@1.20310.0", + ] + + # unknown tool + result = clirunner.invoke( + package_uninstall_cmd, ["-t", "platformio/unknown_tool"] + ) + assert isinstance(result.exception, UnknownPackageError) + + +def test_custom_project_platforms( + clirunner, validate_cliresult, func_isolated_pio_core, tmp_path +): + project_dir = tmp_path / "project" + project_dir.mkdir() + (project_dir / "platformio.ini").write_text(PROJECT_CONFIG_TPL) + spec = "platformio/atmelavr@^3.4.0" + result = clirunner.invoke( + package_install_cmd, + ["-d", str(project_dir), "-e", "devkit", "-p", spec], + ) + validate_cliresult(result) + with fs.cd(str(project_dir)): + config = ProjectConfig() + assert pkgs_to_names(PlatformPackageManager().get_installed()) == ["atmelavr"] + assert not LibraryPackageManager( + os.path.join(config.get("platformio", "libdeps_dir"), "devkit") + ).get_installed() + assert pkgs_to_names(ToolPackageManager().get_installed()) == [ + "framework-arduino-avr-attiny", + "toolchain-atmelavr", + ] + # uninstall + result = clirunner.invoke( + package_uninstall_cmd, + ["-e", "devkit", "-p", spec], + ) + validate_cliresult(result) + assert not pkgs_to_names(PlatformPackageManager().get_installed()) + assert not pkgs_to_names(ToolPackageManager().get_installed()) + + # unknown platform + result = clirunner.invoke(package_uninstall_cmd, ["-p", "unknown_platform"]) + assert isinstance(result.exception, UnknownPackageError) diff --git a/tests/commands/pkg/test_update.py b/tests/commands/pkg/test_update.py new file mode 100644 index 00000000..c70e2b25 --- /dev/null +++ b/tests/commands/pkg/test_update.py @@ -0,0 +1,356 @@ +# Copyright (c) 2014-present PlatformIO +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# pylint: disable=unused-argument + +import os + +from platformio import fs +from platformio.package.commands.install import package_install_cmd +from platformio.package.commands.update import package_update_cmd +from platformio.package.exception import UnknownPackageError +from platformio.package.manager.library import LibraryPackageManager +from platformio.package.manager.platform import PlatformPackageManager +from platformio.package.manager.tool import ToolPackageManager +from platformio.package.meta import PackageSpec +from platformio.project.config import ProjectConfig + +PROJECT_OUTDATED_CONFIG_TPL = """ +[env:devkit] +platform = platformio/atmelavr@^2 +framework = arduino +board = attiny88 +lib_deps = milesburton/DallasTemperature@~3.8.0 +""" + +PROJECT_UPDATED_CONFIG_TPL = """ +[env:devkit] +platform = platformio/atmelavr@<4 +framework = arduino +board = attiny88 +lib_deps = milesburton/DallasTemperature@^3.8.0 +""" + + +def pkgs_to_specs(pkgs): + return [ + PackageSpec(name=pkg.metadata.name, requirements=pkg.metadata.version) + for pkg in pkgs + ] + + +def test_global_packages( + clirunner, validate_cliresult, func_isolated_pio_core, tmp_path +): + # libraries + result = clirunner.invoke( + package_install_cmd, + ["--global", "-l", "bblanchon/ArduinoJson@^5"], + ) + validate_cliresult(result) + assert pkgs_to_specs(LibraryPackageManager().get_installed()) == [ + PackageSpec("ArduinoJson@5.13.4") + ] + # update to the latest version + result = clirunner.invoke( + package_update_cmd, + ["--global", "-l", "bblanchon/ArduinoJson"], + ) + validate_cliresult(result) + pkgs = LibraryPackageManager().get_installed() + assert len(pkgs) == 1 + assert pkgs[0].metadata.version.major > 5 + # custom storage + storage_dir = tmp_path / "custom_lib_storage" + storage_dir.mkdir() + result = clirunner.invoke( + package_install_cmd, + [ + "--global", + "--storage-dir", + str(storage_dir), + "-l", + "bblanchon/ArduinoJson@^5", + ], + ) + validate_cliresult(result) + assert pkgs_to_specs(LibraryPackageManager(storage_dir).get_installed()) == [ + PackageSpec("ArduinoJson@5.13.4") + ] + # update to the latest version + result = clirunner.invoke( + package_update_cmd, + ["--global", "--storage-dir", str(storage_dir), "-l", "bblanchon/ArduinoJson"], + ) + validate_cliresult(result) + pkgs = LibraryPackageManager(storage_dir).get_installed() + assert len(pkgs) == 1 + assert pkgs[0].metadata.version.major > 5 + + # tools + result = clirunner.invoke( + package_install_cmd, + ["--global", "-t", "platformio/framework-arduino-avr-attiny@~1.4"], + ) + validate_cliresult(result) + assert pkgs_to_specs(ToolPackageManager().get_installed()) == [ + PackageSpec("framework-arduino-avr-attiny@1.4.1") + ] + # update to the latest version + result = clirunner.invoke( + package_update_cmd, + ["--global", "-t", "platformio/framework-arduino-avr-attiny@^1"], + ) + validate_cliresult(result) + pkgs = ToolPackageManager().get_installed() + assert len(pkgs) == 1 + assert pkgs[0].metadata.version.major == 1 + assert pkgs[0].metadata.version.minor > 4 + + # platforms + result = clirunner.invoke( + package_install_cmd, + ["--global", "-p", "platformio/atmelavr@^2", "--skip-dependencies"], + ) + validate_cliresult(result) + assert pkgs_to_specs(PlatformPackageManager().get_installed()) == [ + PackageSpec("atmelavr@2.2.0") + ] + # update to the latest version + result = clirunner.invoke( + package_update_cmd, + ["--global", "-p", "platformio/atmelavr", "--skip-dependencies"], + ) + validate_cliresult(result) + pkgs = PlatformPackageManager().get_installed() + assert len(pkgs) == 1 + assert pkgs[0].metadata.version.major > 2 + + # update unknown package + result = clirunner.invoke( + package_update_cmd, + ["--global", "-l", "platformio/unknown_package_for_update"], + ) + assert isinstance(result.exception, UnknownPackageError) + + +def test_project(clirunner, validate_cliresult, isolated_pio_core, tmp_path): + project_dir = tmp_path / "project" + project_dir.mkdir() + (project_dir / "platformio.ini").write_text(PROJECT_OUTDATED_CONFIG_TPL) + result = clirunner.invoke( + package_install_cmd, + ["-d", str(project_dir)], + ) + validate_cliresult(result) + with fs.cd(str(project_dir)): + config = ProjectConfig() + lm = LibraryPackageManager( + os.path.join(config.get("platformio", "libdeps_dir"), "devkit") + ) + assert pkgs_to_specs(lm.get_installed()) == [ + PackageSpec("DallasTemperature@3.8.1"), + PackageSpec("OneWire@2.3.6"), + ] + assert pkgs_to_specs(PlatformPackageManager().get_installed()) == [ + PackageSpec("atmelavr@2.2.0") + ] + assert pkgs_to_specs(ToolPackageManager().get_installed()) == [ + PackageSpec("framework-arduino-avr-attiny@1.3.2"), + PackageSpec("toolchain-atmelavr@1.50400.190710"), + ] + assert config.get("env:devkit", "lib_deps") == [ + "milesburton/DallasTemperature@~3.8.0" + ] + + # update packages + (project_dir / "platformio.ini").write_text(PROJECT_UPDATED_CONFIG_TPL) + result = clirunner.invoke(package_update_cmd) + validate_cliresult(result) + config = ProjectConfig() + lm = LibraryPackageManager( + os.path.join(config.get("platformio", "libdeps_dir"), "devkit") + ) + pkgs = PlatformPackageManager().get_installed() + assert len(pkgs) == 1 + assert pkgs[0].metadata.name == "atmelavr" + assert pkgs[0].metadata.version.major == 3 + assert pkgs_to_specs(lm.get_installed()) == [ + PackageSpec("DallasTemperature@3.9.1"), + PackageSpec("OneWire@2.3.6"), + ] + assert pkgs_to_specs(ToolPackageManager().get_installed()) == [ + PackageSpec("framework-arduino-avr-attiny@1.3.2"), + PackageSpec("toolchain-atmelavr@1.70300.191015"), + PackageSpec("toolchain-atmelavr@1.50400.190710"), + ] + assert config.get("env:devkit", "lib_deps") == [ + "milesburton/DallasTemperature@^3.8.0" + ] + + # update again + result = clirunner.invoke(package_update_cmd) + validate_cliresult(result) + assert "Already up-to-date." in result.output + + # update again in the silent ,pde + result = clirunner.invoke(package_update_cmd, ["--silent"]) + validate_cliresult(result) + assert not result.output + + +def test_custom_project_libraries( + clirunner, validate_cliresult, isolated_pio_core, tmp_path +): + project_dir = tmp_path / "project" + project_dir.mkdir() + (project_dir / "platformio.ini").write_text(PROJECT_OUTDATED_CONFIG_TPL) + spec = "milesburton/DallasTemperature@~3.8.0" + result = clirunner.invoke( + package_install_cmd, + ["-d", str(project_dir), "-e", "devkit", "-l", spec], + ) + validate_cliresult(result) + with fs.cd(str(project_dir)): + config = ProjectConfig() + assert config.get("env:devkit", "lib_deps") == [spec] + lm = LibraryPackageManager( + os.path.join(config.get("platformio", "libdeps_dir"), "devkit") + ) + assert pkgs_to_specs(lm.get_installed()) == [ + PackageSpec("DallasTemperature@3.8.1"), + PackageSpec("OneWire@2.3.6"), + ] + # update package + result = clirunner.invoke( + package_update_cmd, + ["-e", "devkit", "-l", "milesburton/DallasTemperature@^3.8.0"], + ) + assert ProjectConfig().get("env:devkit", "lib_deps") == [ + "milesburton/DallasTemperature@^3.8.0" + ] + # try again + result = clirunner.invoke( + package_update_cmd, + ["-e", "devkit", "-l", "milesburton/DallasTemperature@^3.8.0"], + ) + validate_cliresult(result) + assert "Already up-to-date." in result.output + + # install library without saving to config + result = clirunner.invoke( + package_update_cmd, + ["-e", "devkit", "-l", "milesburton/DallasTemperature@^3", "--no-save"], + ) + validate_cliresult(result) + assert "Already up-to-date." in result.output + config = ProjectConfig() + lm = LibraryPackageManager( + os.path.join(config.get("platformio", "libdeps_dir"), "devkit") + ) + assert pkgs_to_specs(lm.get_installed()) == [ + PackageSpec("DallasTemperature@3.9.1"), + PackageSpec("OneWire@2.3.6"), + ] + assert config.get("env:devkit", "lib_deps") == [ + "milesburton/DallasTemperature@^3.8.0" + ] + + # unknown libraries + result = clirunner.invoke( + package_update_cmd, ["-l", "platformio/unknown_library"] + ) + assert isinstance(result.exception, UnknownPackageError) + + +def test_custom_project_tools( + clirunner, validate_cliresult, func_isolated_pio_core, tmp_path +): + project_dir = tmp_path / "project" + project_dir.mkdir() + (project_dir / "platformio.ini").write_text(PROJECT_OUTDATED_CONFIG_TPL) + spec = "toolchain-atmelavr@~1.50400.0" + result = clirunner.invoke( + package_install_cmd, + ["-d", str(project_dir), "-e", "devkit", "-t", spec], + ) + validate_cliresult(result) + with fs.cd(str(project_dir)): + assert ProjectConfig().get("env:devkit", "platform_packages") == [ + "platformio/toolchain-atmelavr@~1.50400.0" + ] + assert pkgs_to_specs(ToolPackageManager().get_installed()) == [ + PackageSpec("toolchain-atmelavr@1.50400.190710") + ] + result = clirunner.invoke( + package_update_cmd, + ["-e", "devkit", "-t", "toolchain-atmelavr@^1"], + ) + validate_cliresult(result) + assert ProjectConfig().get("env:devkit", "platform_packages") == [ + "platformio/toolchain-atmelavr@^1" + ] + assert pkgs_to_specs(ToolPackageManager().get_installed()) == [ + PackageSpec("toolchain-atmelavr@1.70300.191015") + ] + + # install without saving to config + result = clirunner.invoke( + package_update_cmd, + ["-e", "devkit", "-t", "toolchain-atmelavr@~1.70300.191015", "--no-save"], + ) + validate_cliresult(result) + assert "Already up-to-date." in result.output + assert ProjectConfig().get("env:devkit", "platform_packages") == [ + "platformio/toolchain-atmelavr@^1" + ] + + # unknown tool + result = clirunner.invoke(package_update_cmd, ["-t", "platformio/unknown_tool"]) + assert isinstance(result.exception, UnknownPackageError) + + +def test_custom_project_platforms( + clirunner, validate_cliresult, func_isolated_pio_core, tmp_path +): + project_dir = tmp_path / "project" + project_dir.mkdir() + (project_dir / "platformio.ini").write_text(PROJECT_OUTDATED_CONFIG_TPL) + spec = "atmelavr@^2" + result = clirunner.invoke( + package_install_cmd, + ["-d", str(project_dir), "-e", "devkit", "-p", spec, "--skip-dependencies"], + ) + validate_cliresult(result) + with fs.cd(str(project_dir)): + assert pkgs_to_specs(PlatformPackageManager().get_installed()) == [ + PackageSpec("atmelavr@2.2.0") + ] + assert ProjectConfig().get("env:devkit", "platform") == "platformio/atmelavr@^2" + + # update + result = clirunner.invoke( + package_install_cmd, + ["-e", "devkit", "-p", "platformio/atmelavr@^3", "--skip-dependencies"], + ) + validate_cliresult(result) + assert pkgs_to_specs(PlatformPackageManager().get_installed()) == [ + PackageSpec("atmelavr@3.4.0"), + PackageSpec("atmelavr@2.2.0"), + ] + assert ProjectConfig().get("env:devkit", "platform") == "platformio/atmelavr@^2" + + # unknown platform + result = clirunner.invoke(package_install_cmd, ["-p", "unknown_platform"]) + assert isinstance(result.exception, UnknownPackageError) diff --git a/tests/commands/test_check.py b/tests/commands/test_check.py index d4b3b79b..746a81dd 100644 --- a/tests/commands/test_check.py +++ b/tests/commands/test_check.py @@ -15,6 +15,7 @@ # pylint: disable=redefined-outer-name import json +import sys from os.path import isfile, join import pytest @@ -121,6 +122,56 @@ def test_check_tool_defines_passed(clirunner, check_dir): assert "__GNUC__" in output +def test_check_tool_complex_defines_handled( + clirunner, validate_cliresult, tmpdir_factory +): + project_dir = tmpdir_factory.mktemp("project_dir") + + project_dir.join("platformio.ini").write( + DEFAULT_CONFIG + + R""" +check_tool = cppcheck, clangtidy, pvs-studio +build_flags = + -DEXTERNAL_INCLUDE_FILE=\"test.h\" + "-DDEFINE_WITH_SPACE="Hello World!"" +""" + ) + + src_dir = project_dir.mkdir("src") + src_dir.join("test.h").write( + """ +#ifndef TEST_H +#define TEST_H +#define ARBITRARY_CONST_VALUE 10 +#endif +""" + ) + + src_dir.join("main.c").write( + PVS_STUDIO_FREE_LICENSE_HEADER + + """ +#if !defined(EXTERNAL_INCLUDE_FILE) +#error "EXTERNAL_INCLUDE_FILE is not declared!" +#else +#include EXTERNAL_INCLUDE_FILE +#endif + +int main() +{ + /* Index out of bounds */ + int arr[ARBITRARY_CONST_VALUE]; + for(int i=0; i < ARBITRARY_CONST_VALUE+1; i++) { + arr[i] = 0; /* High */ + } + return 0; +} +""" + ) + + default_result = clirunner.invoke(cmd_check, ["--project-dir", str(project_dir)]) + validate_cliresult(default_result) + + def test_check_language_standard_definition_passed(clirunner, tmpdir): config = DEFAULT_CONFIG + "\nbuild_flags = -std=c++17" tmpdir.join("platformio.ini").write(config) @@ -466,6 +517,38 @@ def test_check_pvs_studio_fails_without_license(clirunner, tmpdir): assert "license was not entered" in verbose_result.output.lower() +@pytest.mark.skipif( + sys.platform != "win32", + reason="For some reason the error message is different on Windows", +) +def test_check_pvs_studio_fails_broken_license(clirunner, tmpdir): + config = ( + DEFAULT_CONFIG + + """ +check_tool = pvs-studio +check_flags = --lic-file=./pvs-studio.lic +""" + ) + + tmpdir.join("platformio.ini").write(config) + tmpdir.mkdir("src").join("main.c").write(TEST_CODE) + tmpdir.join("pvs-studio.lic").write( + """ +TEST +TEST-TEST-TEST-TEST +""" + ) + + default_result = clirunner.invoke(cmd_check, ["--project-dir", str(tmpdir)]) + verbose_result = clirunner.invoke(cmd_check, ["--project-dir", str(tmpdir), "-v"]) + + assert default_result.exit_code != 0 + assert "failed to perform check" in default_result.output.lower() + + assert verbose_result.exit_code != 0 + assert "license information is incorrect" in verbose_result.output.lower() + + def test_check_embedded_platform_all_tools(clirunner, validate_cliresult, tmpdir): config = """ [env:test] diff --git a/tests/commands/test_init.py b/tests/commands/test_init.py index 286197fa..b3d9f013 100644 --- a/tests/commands/test_init.py +++ b/tests/commands/test_init.py @@ -13,55 +13,56 @@ # limitations under the License. import json -from os import getcwd, makedirs -from os.path import getsize, isdir, isfile, join +import os -import pytest - -from platformio import proc -from platformio.commands import platform as cli_platform from platformio.commands.boards import cli as cmd_boards -from platformio.commands.project import project_init as cmd_init +from platformio.package.commands.exec import package_exec_cmd +from platformio.project.commands.init import project_init_cmd from platformio.project.config import ProjectConfig from platformio.project.exception import ProjectEnvsNotAvailableError def validate_pioproject(pioproject_dir): - pioconf_path = join(pioproject_dir, "platformio.ini") - assert isfile(pioconf_path) and getsize(pioconf_path) > 0 - assert isdir(join(pioproject_dir, "src")) and isdir(join(pioproject_dir, "lib")) + pioconf_path = os.path.join(pioproject_dir, "platformio.ini") + assert os.path.isfile(pioconf_path) and os.path.getsize(pioconf_path) > 0 + assert os.path.isdir(os.path.join(pioproject_dir, "src")) and os.path.isdir( + os.path.join(pioproject_dir, "lib") + ) def test_init_default(clirunner, validate_cliresult): with clirunner.isolated_filesystem(): - result = clirunner.invoke(cmd_init) + result = clirunner.invoke(project_init_cmd) validate_cliresult(result) - validate_pioproject(getcwd()) + validate_pioproject(os.getcwd()) def test_init_ext_folder(clirunner, validate_cliresult): with clirunner.isolated_filesystem(): ext_folder_name = "ext_folder" - makedirs(ext_folder_name) - result = clirunner.invoke(cmd_init, ["-d", ext_folder_name]) + os.makedirs(ext_folder_name) + result = clirunner.invoke(project_init_cmd, ["-d", ext_folder_name]) validate_cliresult(result) - validate_pioproject(join(getcwd(), ext_folder_name)) + validate_pioproject(os.path.join(os.getcwd(), ext_folder_name)) def test_init_duplicated_boards(clirunner, validate_cliresult, tmpdir): with tmpdir.as_cwd(): for _ in range(2): - result = clirunner.invoke(cmd_init, ["-b", "uno", "-b", "uno"]) + result = clirunner.invoke( + project_init_cmd, + ["-b", "uno", "-b", "uno", "--no-install-dependencies"], + ) validate_cliresult(result) validate_pioproject(str(tmpdir)) - config = ProjectConfig(join(getcwd(), "platformio.ini")) + config = ProjectConfig(os.path.join(os.getcwd(), "platformio.ini")) config.validate() assert set(config.sections()) == set(["env:uno"]) def test_init_ide_without_board(clirunner, tmpdir): with tmpdir.as_cwd(): - result = clirunner.invoke(cmd_init, ["--ide", "atom"]) + result = clirunner.invoke(project_init_cmd, ["--ide", "atom"]) assert result.exit_code != 0 assert isinstance(result.exception, ProjectEnvsNotAvailableError) @@ -69,7 +70,16 @@ def test_init_ide_without_board(clirunner, tmpdir): def test_init_ide_vscode(clirunner, validate_cliresult, tmpdir): with tmpdir.as_cwd(): result = clirunner.invoke( - cmd_init, ["--ide", "vscode", "-b", "uno", "-b", "teensy31"] + project_init_cmd, + [ + "--ide", + "vscode", + "-b", + "uno", + "-b", + "teensy31", + "--no-install-dependencies", + ], ) validate_cliresult(result) validate_pioproject(str(tmpdir)) @@ -83,7 +93,10 @@ def test_init_ide_vscode(clirunner, validate_cliresult, tmpdir): ) # switch to NodeMCU - result = clirunner.invoke(cmd_init, ["--ide", "vscode", "-b", "nodemcuv2"]) + result = clirunner.invoke( + project_init_cmd, + ["--ide", "vscode", "-b", "nodemcuv2", "--no-install-dependencies"], + ) validate_cliresult(result) validate_pioproject(str(tmpdir)) assert ( @@ -92,7 +105,10 @@ def test_init_ide_vscode(clirunner, validate_cliresult, tmpdir): ) # switch to teensy31 via env name - result = clirunner.invoke(cmd_init, ["--ide", "vscode", "-e", "teensy31"]) + result = clirunner.invoke( + project_init_cmd, + ["--ide", "vscode", "-e", "teensy31", "--no-install-dependencies"], + ) validate_cliresult(result) validate_pioproject(str(tmpdir)) assert ( @@ -101,7 +117,9 @@ def test_init_ide_vscode(clirunner, validate_cliresult, tmpdir): ) # switch to the first board - result = clirunner.invoke(cmd_init, ["--ide", "vscode"]) + result = clirunner.invoke( + project_init_cmd, ["--ide", "vscode", "--no-install-dependencies"] + ) validate_cliresult(result) validate_pioproject(str(tmpdir)) assert ( @@ -112,23 +130,26 @@ def test_init_ide_vscode(clirunner, validate_cliresult, tmpdir): def test_init_ide_eclipse(clirunner, validate_cliresult): with clirunner.isolated_filesystem(): - result = clirunner.invoke(cmd_init, ["-b", "uno", "--ide", "eclipse"]) + result = clirunner.invoke( + project_init_cmd, + ["-b", "uno", "--ide", "eclipse", "--no-install-dependencies"], + ) validate_cliresult(result) - validate_pioproject(getcwd()) - assert all(isfile(f) for f in (".cproject", ".project")) + validate_pioproject(os.getcwd()) + assert all(os.path.isfile(f) for f in (".cproject", ".project")) def test_init_special_board(clirunner, validate_cliresult): with clirunner.isolated_filesystem(): - result = clirunner.invoke(cmd_init, ["-b", "uno"]) + result = clirunner.invoke(project_init_cmd, ["-b", "uno"]) validate_cliresult(result) - validate_pioproject(getcwd()) + validate_pioproject(os.getcwd()) result = clirunner.invoke(cmd_boards, ["Arduino Uno", "--json-output"]) validate_cliresult(result) boards = json.loads(result.output) - config = ProjectConfig(join(getcwd(), "platformio.ini")) + config = ProjectConfig(os.path.join(os.getcwd(), "platformio.ini")) config.validate() expected_result = dict( @@ -145,11 +166,18 @@ def test_init_special_board(clirunner, validate_cliresult): def test_init_enable_auto_uploading(clirunner, validate_cliresult): with clirunner.isolated_filesystem(): result = clirunner.invoke( - cmd_init, ["-b", "uno", "--project-option", "targets=upload"] + project_init_cmd, + [ + "-b", + "uno", + "--project-option", + "targets=upload", + "--no-install-dependencies", + ], ) validate_cliresult(result) - validate_pioproject(getcwd()) - config = ProjectConfig(join(getcwd(), "platformio.ini")) + validate_pioproject(os.getcwd()) + config = ProjectConfig(os.path.join(os.getcwd(), "platformio.ini")) config.validate() expected_result = dict( targets=["upload"], platform="atmelavr", board="uno", framework=["arduino"] @@ -163,11 +191,18 @@ def test_init_enable_auto_uploading(clirunner, validate_cliresult): def test_init_custom_framework(clirunner, validate_cliresult): with clirunner.isolated_filesystem(): result = clirunner.invoke( - cmd_init, ["-b", "teensy31", "--project-option", "framework=mbed"] + project_init_cmd, + [ + "-b", + "teensy31", + "--project-option", + "framework=mbed", + "--no-install-dependencies", + ], ) validate_cliresult(result) - validate_pioproject(getcwd()) - config = ProjectConfig(join(getcwd(), "platformio.ini")) + validate_pioproject(os.getcwd()) + config = ProjectConfig(os.path.join(os.getcwd(), "platformio.ini")) config.validate() expected_result = dict(platform="teensy", board="teensy31", framework=["mbed"]) assert config.has_section("env:teensy31") @@ -177,87 +212,86 @@ def test_init_custom_framework(clirunner, validate_cliresult): def test_init_incorrect_board(clirunner): - result = clirunner.invoke(cmd_init, ["-b", "missed_board"]) + result = clirunner.invoke(project_init_cmd, ["-b", "missed_board"]) assert result.exit_code == 2 assert "Error: Invalid value for" in result.output assert isinstance(result.exception, SystemExit) -@pytest.mark.skipif(not proc.is_ci(), reason="runs on CI") -def test_init_ide_clion(clirunner, isolated_pio_core, validate_cliresult, tmpdir): - result = clirunner.invoke( - cli_platform.platform_install, - [ - "ststm32", - "--skip-default-package", - "--with-package", - "tool-cmake", - "--with-package", - "tool-ninja", - ], - ) - +def test_init_ide_clion(clirunner, validate_cliresult, tmpdir): + project_dir = tmpdir.join("project").mkdir() # Add extra libraries to cover cases with possible unwanted backslashes - lib_extra_dirs = isolated_pio_core.join("extra_libs").mkdir() + lib_extra_dirs = tmpdir.join("extra_libs").mkdir() extra_lib = lib_extra_dirs.join("extra_lib").mkdir() extra_lib.join("extra_lib.h").write(" ") extra_lib.join("extra_lib.cpp").write(" ") - with tmpdir.as_cwd(): + with project_dir.as_cwd(): result = clirunner.invoke( - cmd_init, + project_init_cmd, [ "-b", - "nucleo_f401re", + "uno", "--ide", "clion", "--project-option", "framework=arduino", "--project-option", + "platform_packages=platformio/tool-ninja", + "--project-option", "lib_extra_dirs=%s" % str(lib_extra_dirs), ], ) validate_cliresult(result) - assert all(isfile(f) for f in ("CMakeLists.txt", "CMakeListsPrivate.txt")) + assert all( + os.path.isfile(f) for f in ("CMakeLists.txt", "CMakeListsPrivate.txt") + ) - tmpdir.join("src").join("main.cpp").write( + project_dir.join("src").join("main.cpp").write( """#include #include "extra_lib.h" void setup(){} void loop(){} """ ) - cmake_path = str( - isolated_pio_core.join("packages") - .join("tool-cmake") - .join("bin") - .join("cmake") - ) - tmpdir.join("build_dir").mkdir() - result = proc.exec_command( + project_dir.join("build_dir").mkdir() + result = clirunner.invoke( + package_exec_cmd, [ - cmake_path, - "-DCMAKE_BUILD_TYPE=nucleo_f401re", + "-p", + "tool-cmake", + "--", + "cmake", + "-DCMAKE_BUILD_TYPE=uno", "-DCMAKE_MAKE_PROGRAM=%s" - % str( - isolated_pio_core.join("packages").join("tool-ninja").join("ninja") + % os.path.join( + ProjectConfig().get("platformio", "packages_dir"), + "tool-ninja", + "ninja", ), "-G", "Ninja", "-S", - str(tmpdir), + str(project_dir), "-B", "build_dir", - ] + ], ) + validate_cliresult(result) - # Check if CMake was able to generate a native project for Ninja - assert result["returncode"] == 0, result["out"] - - result = proc.exec_command( - [cmake_path, "--build", "build_dir", "--target", "Debug"] + # build + result = clirunner.invoke( + package_exec_cmd, + [ + "-p", + "tool-cmake", + "--", + "cmake", + "--build", + "build_dir", + "--target", + "Debug", + ], ) - - assert result["returncode"] == 0 - assert "[SUCCESS]" in str(result["out"]) + validate_cliresult(result) diff --git a/tests/commands/test_lib.py b/tests/commands/test_lib.py index b2541841..9429aaba 100644 --- a/tests/commands/test_lib.py +++ b/tests/commands/test_lib.py @@ -17,6 +17,7 @@ import json import os +import pytest import semantic_version from platformio.clients.registry import RegistryClient @@ -225,5 +226,13 @@ def test_update(clirunner, validate_cliresult, isolated_pio_core, tmpdir_factory result = clirunner.invoke( cmd_lib, ["-d", str(storage_dir), "update", "--dry-run", "ArduinoJson @ ^5"] ) + with pytest.raises( + AssertionError, + match="This command is deprecated", + ): + validate_cliresult(result) + result = clirunner.invoke( + cmd_lib, ["-d", str(storage_dir), "update", "ArduinoJson @ ^5"] + ) validate_cliresult(result) - assert "Incompatible" in result.stdout + assert "ArduinoJson@5.13.4 is already up-to-date" in result.stdout diff --git a/tests/commands/test_lib_complex.py b/tests/commands/test_lib_complex.py index d74bf207..ef0c0c4b 100644 --- a/tests/commands/test_lib_complex.py +++ b/tests/commands/test_lib_complex.py @@ -20,6 +20,7 @@ import re from platformio.commands import PlatformioCLI from platformio.commands.lib.command import cli as cmd_lib from platformio.package.exception import UnknownPackageError +from platformio.util import strip_ansi_codes PlatformioCLI.leftover_args = ["--json-output"] # hook for click @@ -248,13 +249,13 @@ def test_global_lib_update(clirunner, validate_cliresult): assert "__pkg_dir" in oudated[0] result = clirunner.invoke(cmd_lib, ["-g", "update", oudated[0]["__pkg_dir"]]) validate_cliresult(result) - assert "Removing NeoPixelBus @ 2.2.4" in result.output + assert "Removing NeoPixelBus @ 2.2.4" in strip_ansi_codes(result.output) # update rest libraries result = clirunner.invoke(cmd_lib, ["-g", "update"]) validate_cliresult(result) - assert result.output.count("[Detached]") == 1 - assert result.output.count("[Up-to-date]") == 13 + assert result.output.count("+sha.") == 4 + assert result.output.count("already up-to-date") == 14 # update unknown library result = clirunner.invoke(cmd_lib, ["-g", "update", "Unknown"]) @@ -270,7 +271,7 @@ def test_global_lib_uninstall(clirunner, validate_cliresult, isolated_pio_core): items = sorted(items, key=lambda item: item["__pkg_dir"]) result = clirunner.invoke(cmd_lib, ["-g", "uninstall", items[0]["__pkg_dir"]]) validate_cliresult(result) - assert ("Removing %s" % items[0]["name"]) in result.output + assert ("Removing %s" % items[0]["name"]) in strip_ansi_codes(result.output) # uninstall the rest libraries result = clirunner.invoke( @@ -288,17 +289,18 @@ def test_global_lib_uninstall(clirunner, validate_cliresult, isolated_pio_core): items1 = [d.basename for d in isolated_pio_core.join("lib").listdir()] items2 = [ - "ArduinoJson", - "ArduinoJson@src-69ebddd821f771debe7ee734d3c7fa81", "AsyncMqttClient", - "AsyncTCP", - "ESP32WebServer", - "ESPAsyncTCP", - "NeoPixelBus", - "PJON", - "PJON@src-79de467ebe19de18287becff0a1fb42d", "platformio-libmirror", "PubSubClient", + "ArduinoJson@src-69ebddd821f771debe7ee734d3c7fa81", + "ESPAsyncTCP@1.2.0", + "AsyncTCP", + "ArduinoJson", + "ESPAsyncTCP", + "ESP32WebServer", + "PJON", + "NeoPixelBus", + "PJON@src-79de467ebe19de18287becff0a1fb42d", "SomeLib", ] assert set(items1) == set(items2) @@ -335,17 +337,14 @@ def test_lib_stats(clirunner, validate_cliresult): result = clirunner.invoke(cmd_lib, ["stats", "--json-output"]) validate_cliresult(result) - assert ( - set( - [ - "dlweek", - "added", - "updated", - "topkeywords", - "dlmonth", - "dlday", - "lastkeywords", - ] - ) - == set(json.loads(result.output).keys()) - ) + assert set( + [ + "dlweek", + "added", + "updated", + "topkeywords", + "dlmonth", + "dlday", + "lastkeywords", + ] + ) == set(json.loads(result.output).keys()) diff --git a/tests/commands/test_platform.py b/tests/commands/test_platform.py index 508eae22..604e392f 100644 --- a/tests/commands/test_platform.py +++ b/tests/commands/test_platform.py @@ -18,6 +18,7 @@ import json from platformio.commands import platform as cli_platform from platformio.package.exception import UnknownPackageError +from platformio.util import strip_ansi_codes def test_search_json_output(clirunner, validate_cliresult, isolated_pio_core): @@ -72,8 +73,9 @@ def test_install_known_version(clirunner, validate_cliresult, isolated_pio_core) ["atmelavr@2.0.0", "--skip-default-package", "--with-package", "tool-avrdude"], ) validate_cliresult(result) - assert "atmelavr @ 2.0.0" in result.output - assert "Installing tool-avrdude @" in result.output + output = strip_ansi_codes(result.output) + assert "atmelavr @ 2.0.0" in output + assert "Installing tool-avrdude @" in output assert len(isolated_pio_core.join("packages").listdir()) == 1 @@ -120,8 +122,9 @@ def test_update_check(clirunner, validate_cliresult, isolated_pio_core): def test_update_raw(clirunner, validate_cliresult, isolated_pio_core): result = clirunner.invoke(cli_platform.platform_update) validate_cliresult(result) - assert "Removing atmelavr @ 2.0.0" in result.output - assert "Platform Manager: Installing platformio/atmelavr @" in result.output + output = strip_ansi_codes(result.output) + assert "Removing atmelavr @ 2.0.0" in output + assert "Platform Manager: Installing platformio/atmelavr @" in output assert len(isolated_pio_core.join("packages").listdir()) == 2 diff --git a/tests/test_builder.py b/tests/commands/test_run.py similarity index 83% rename from tests/test_builder.py rename to tests/commands/test_run.py index f220e50c..f5935668 100644 --- a/tests/test_builder.py +++ b/tests/commands/test_run.py @@ -12,6 +12,8 @@ # See the License for the specific language governing permissions and # limitations under the License. +from pathlib import Path + from platformio.commands.run.command import cli as cmd_run @@ -176,3 +178,46 @@ int main() { for level in (0, 1, 2) ) assert all("-O%s" % optimization not in line for optimization in ("g", "s")) + + +def test_symlinked_libs(clirunner, validate_cliresult, tmp_path: Path): + external_pkg_dir = tmp_path / "External" + external_pkg_dir.mkdir() + (external_pkg_dir / "External.h").write_text( + """ +#define EXTERNAL 1 +""" + ) + (external_pkg_dir / "library.json").write_text( + """ +{ + "name": "External", + "version": "1.0.0" +} +""" + ) + + project_dir = tmp_path / "project" + src_dir = project_dir / "src" + src_dir.mkdir(parents=True) + (src_dir / "main.c").write_text( + """ +#include +# +#if !defined(EXTERNAL) +#error "EXTERNAL is not defined" +#endif + +int main() { +} +""" + ) + (project_dir / "platformio.ini").write_text( + """ +[env:native] +platform = native +lib_deps = symlink://../External + """ + ) + result = clirunner.invoke(cmd_run, ["--project-dir", str(project_dir), "--verbose"]) + validate_cliresult(result) diff --git a/tests/commands/test_test.py b/tests/commands/test_test.py index f7552ed8..db7acb13 100644 --- a/tests/commands/test_test.py +++ b/tests/commands/test_test.py @@ -13,14 +13,19 @@ # limitations under the License. import os +import sys +import xml.etree.ElementTree as ET +from pathlib import Path import pytest from platformio import proc -from platformio.commands.test.command import cli as cmd_test +from platformio.fs import load_json +from platformio.test.command import test_cmd as pio_test_cmd -def test_local_env(): +def test_calculator_example(tmp_path: Path): + junit_output_path = tmp_path / "junit.xml" result = proc.exec_command( [ "platformio", @@ -28,78 +33,280 @@ def test_local_env(): "-d", os.path.join("examples", "unit-testing", "calculator"), "-e", + "uno", + "-e", "native", + "--junit-output-path", + str(junit_output_path), ] ) - if result["returncode"] != 1: - pytest.fail(str(result)) + assert result["returncode"] != 0 # pylint: disable=unsupported-membership-test - assert all(s in result["err"] for s in ("PASSED", "FAILED")), result["out"] + assert all( + s in (result["err"] + result["out"]) for s in ("ERRORED", "PASSED", "FAILED") + ), result["out"] + + # test JUnit output + junit_testsuites = ET.parse(junit_output_path).getroot() + assert int(junit_testsuites.get("tests")) == 11 + assert int(junit_testsuites.get("errors")) == 2 + assert int(junit_testsuites.get("failures")) == 1 + assert len(junit_testsuites.findall("testsuite")) == 6 + junit_errored_testcase = junit_testsuites.find( + ".//testcase[@name='uno:test_embedded']" + ) + assert junit_errored_testcase.get("status") == "ERRORED" + assert junit_errored_testcase.find("error").get("type") == "UnitTestSuiteError" + junit_failed_testcase = junit_testsuites.find( + ".//testsuite[@name='native:test_desktop']" + "/testcase[@name='test_calculator_division']" + ) + assert junit_failed_testcase.get("status") == "FAILED" + assert junit_failed_testcase.find("failure").get("message") == "Expected 32 Was 33" -def test_multiple_env_build(clirunner, validate_cliresult, tmpdir): +def test_list_tests(clirunner, validate_cliresult, tmp_path: Path): + json_output_path = tmp_path / "report.json" + result = clirunner.invoke( + pio_test_cmd, + [ + "-d", + os.path.join("examples", "unit-testing", "calculator"), + "--list-tests", + "--json-output-path", + str(json_output_path), + ], + ) + validate_cliresult(result) + # test JSON + json_report = load_json(str(json_output_path)) + assert json_report["testcase_nums"] == 0 + assert json_report["failure_nums"] == 0 + assert json_report["skipped_nums"] == 0 + assert len(json_report["test_suites"]) == 6 - project_dir = tmpdir.mkdir("project") - project_dir.join("platformio.ini").write( + +def test_group_and_custom_runner(clirunner, validate_cliresult, tmp_path: Path): + project_dir = tmp_path / "project" + project_dir.mkdir() + (project_dir / "platformio.ini").write_text( """ -[env:teensy31] -platform = teensy -framework = arduino -board = teensy31 - [env:native] platform = native +test_framework = custom +""" + ) + test_dir = project_dir / "test" -[env:espressif8266] -platform = espressif8266 -framework = arduino -board = nodemcuv2 + # non-test folder, does not start with "test_" + disabled_dir = test_dir / "disabled" + disabled_dir.mkdir(parents=True) + (disabled_dir / "main.c").write_text( + """ +#include + +int main() { + printf("Disabled test suite\\n") +} + """ + ) + + # root + (test_dir / "my_extra.h").write_text( + """ +#ifndef MY_EXTRA_H +#define MY_EXTRA_H + +#include + +void my_extra_fun(void); +#endif +""" + ) + (test_dir / "my_extra.c").write_text( + """ +#include "my_extra.h" + +void my_extra_fun(void) { + printf("Called from my_extra_fun\\n"); +} """ ) - project_dir.mkdir("test").join("test_main.cpp").write( + # test group + test_group = test_dir / "group" + test_group.mkdir(parents=True) + (test_group / "test_custom_runner.py").write_text( + """ +import click + +from platformio.test.runners.unity import UnityTestRunner + +class CustomTestRunner(UnityTestRunner): + def teardown(self): + click.echo("CustomTestRunner::TearDown called") +""" + ) + + # test suite + test_suite_dir = test_group / "test_nested" + test_include_dir = test_suite_dir / "include" + test_include_dir.mkdir(parents=True) + (test_include_dir / "my_nested.h").write_text( + """ +#define TEST_ONE 1 +""" + ) + (test_suite_dir / "main.c").write_text( """ #include -#ifdef ARDUINO -void setup() -#else -int main() -#endif -{ - UNITY_BEGIN(); - UNITY_END(); +#include +#include +void setUp(){ + my_extra_fun(); } -void loop() {} -""" - ) +void tearDown(void) { + // clean stuff up here +} + +void dummy_test(void) { + TEST_ASSERT_EQUAL(1, TEST_ONE); +} + +int main() { + UNITY_BEGIN(); + RUN_TEST(dummy_test); + UNITY_END(); +} + """ + ) result = clirunner.invoke( - cmd_test, - ["-d", str(project_dir), "--without-testing", "--without-uploading"], + pio_test_cmd, + ["-d", str(project_dir), "-e", "native", "--verbose"], ) - validate_cliresult(result) - assert "Multiple ways to build" not in result.output + assert "Called from my_extra_fun" in result.output + assert "CustomTestRunner::TearDown called" in result.output + assert "Disabled test suite" not in result.output -def test_setup_teardown_are_compilable(clirunner, validate_cliresult, tmpdir): - +def test_crashed_program(clirunner, tmpdir): project_dir = tmpdir.mkdir("project") project_dir.join("platformio.ini").write( """ -[env:embedded] -platform = ststm32 -framework = stm32cube -board = nucleo_f401re -test_transport = custom - [env:native] platform = native - """ ) + test_dir = project_dir.mkdir("test") + test_dir.join("test_main.c").write( + """ +#include +#include +void setUp(){ + printf("setUp called"); +} +void tearDown(){ + printf("tearDown called"); +} + +void dummy_test(void) { + TEST_ASSERT_EQUAL(1, 1); +} + +int main(int argc, char *argv[]) { + printf("Address boundary error is %s", argv[-1]); + UNITY_BEGIN(); + RUN_TEST(dummy_test); + UNITY_END(); + return 0; +} +""" + ) + result = clirunner.invoke( + pio_test_cmd, + ["-d", str(project_dir), "-e", "native"], + ) + assert result.exit_code != 0 + assert any( + s in result.output for s in ("Program received signal", "Program errored with") + ) + + +@pytest.mark.skipif( + sys.platform != "darwin", reason="runs only on macOS (issue with SimAVR)" +) +def test_custom_testing_command(clirunner, validate_cliresult, tmp_path: Path): + project_dir = tmp_path / "project" + project_dir.mkdir() + (project_dir / "platformio.ini").write_text( + """ +[env:uno] +platform = atmelavr +framework = arduino +board = uno + +platform_packages = + platformio/tool-simavr @ ^1 +test_speed = 9600 +test_testing_command = + ${platformio.packages_dir}/tool-simavr/bin/simavr + -m + atmega328p + -f + 16000000L + ${platformio.build_dir}/${this.__env__}/firmware.elf +""" + ) + test_dir = project_dir / "test" / "test_dummy" + test_dir.mkdir(parents=True) + (test_dir / "test_main.cpp").write_text( + """ +#include +#include + +void setUp(void) { + // set stuff up here +} + +void tearDown(void) { + // clean stuff up here +} + +void dummy_test(void) { + TEST_ASSERT_EQUAL(1, 1); +} + +void setup() { + UNITY_BEGIN(); + RUN_TEST(dummy_test); + UNITY_END(); +} + +void loop() { + delay(1000); +} +""" + ) + result = clirunner.invoke( + pio_test_cmd, + ["-d", str(project_dir), "--without-uploading"], + ) + validate_cliresult(result) + assert "dummy_test" in result.output + + +def test_unity_setup_teardown(clirunner, validate_cliresult, tmpdir): + project_dir = tmpdir.mkdir("project") + project_dir.join("platformio.ini").write( + """ +[env:native] +platform = native +""" + ) test_dir = project_dir.mkdir("test") test_dir.join("test_main.c").write( """ @@ -124,12 +331,103 @@ int main() { } """ ) - - native_result = clirunner.invoke( - cmd_test, + result = clirunner.invoke( + pio_test_cmd, ["-d", str(project_dir), "-e", "native"], ) + validate_cliresult(result) + assert all(f in result.output for f in ("setUp called", "tearDown called")) + +def test_unity_custom_config(clirunner, validate_cliresult, tmp_path: Path): + project_dir = tmp_path / "project" + project_dir.mkdir() + (project_dir / "platformio.ini").write_text( + """ +[env:native] +platform = native +""" + ) + test_dir = project_dir / "test" / "native" / "test_component" + test_dir.mkdir(parents=True) + (test_dir.parent / "unity_config.h").write_text( + """ +#include + +#define CUSTOM_UNITY_CONFIG + +#define UNITY_OUTPUT_CHAR(c) putchar(c) +#define UNITY_OUTPUT_FLUSH() fflush(stdout) +""" + ) + (test_dir / "test_main.c").write_text( + """ +#include +#include + +void setUp(){ +#ifdef CUSTOM_UNITY_CONFIG + printf("Found custom unity_config.h\\n"); +#endif +} +void tearDown(){ +} + +void dummy_test(void) { + TEST_ASSERT_EQUAL(1, 1); +} + +int main() { + UNITY_BEGIN(); + RUN_TEST(dummy_test); + UNITY_END(); +} +""" + ) + result = clirunner.invoke( + pio_test_cmd, + ["-d", str(project_dir), "-e", "native", "--verbose"], + ) + validate_cliresult(result) + assert all(f in result.output for f in ("Found custom unity_config", "dummy_test")) + + +def test_legacy_unity_custom_transport(clirunner, validate_cliresult, tmpdir): + project_dir = tmpdir.mkdir("project") + project_dir.join("platformio.ini").write( + """ +[env:embedded] +platform = ststm32 +framework = stm32cube +board = nucleo_f401re +test_transport = custom +""" + ) + + test_dir = project_dir.mkdir("test") + test_dir.join("test_main.c").write( + """ +#include + +void setUp(void) { + // set stuff up here +} + +void tearDown(void) { + // clean stuff up here +} + +void dummy_test(void) { + TEST_ASSERT_EQUAL(1, 1); +} + +int main() { + UNITY_BEGIN(); + RUN_TEST(dummy_test); + UNITY_END(); +} +""" + ) test_dir.join("unittest_transport.h").write( """ #ifdef __cplusplus @@ -146,25 +444,195 @@ void unittest_uart_end(){} #endif """ ) - - embedded_result = clirunner.invoke( - cmd_test, + result = clirunner.invoke( + pio_test_cmd, [ "-d", str(project_dir), "--without-testing", "--without-uploading", - "-e", - "embedded", ], ) + validate_cliresult(result) - validate_cliresult(native_result) - validate_cliresult(embedded_result) - print("native_result.output", native_result.output) - print("embedded_result.output", embedded_result.output) - assert all(f in native_result.output for f in ("setUp called", "tearDown called")) - assert all( - "[FAILED]" not in out for out in (native_result.output, embedded_result.output) +@pytest.mark.skipif( + sys.platform == "win32" and os.environ.get("GITHUB_ACTIONS") == "true", + reason="skip Github Actions on Windows (MinGW issue)", +) +def test_doctest_framework(clirunner, tmp_path: Path): + project_dir = tmp_path / "project" + project_dir.mkdir() + (project_dir / "platformio.ini").write_text( + """ +[env:native] +platform = native +test_framework = doctest +""" ) + test_dir = project_dir / "test" / "test_dummy" + test_dir.mkdir(parents=True) + (test_dir / "test_main.cpp").write_text( + """ +#define DOCTEST_CONFIG_IMPLEMENT +#include + +TEST_CASE("[math] basic stuff") +{ + CHECK(6 > 5); + CHECK(6 > 7); +} + +TEST_CASE("should be skipped " * doctest::skip()) +{ + CHECK(2 > 5); +} + +TEST_CASE("vectors can be sized and resized") +{ + std::vector v(5); + + REQUIRE(v.size() == 5); + REQUIRE(v.capacity() >= 5); + + SUBCASE("adding to the vector increases it's size") + { + v.push_back(1); + + CHECK(v.size() == 6); + CHECK(v.capacity() >= 6); + } + SUBCASE("reserving increases just the capacity") + { + v.reserve(6); + + CHECK(v.size() == 5); + CHECK(v.capacity() >= 6); + } +} + +TEST_CASE("WARN level of asserts don't fail the test case") +{ + WARN(0); + WARN_FALSE(1); + WARN_EQ(1, 0); +} + +TEST_SUITE("scoped test suite") +{ + TEST_CASE("part of scoped") + { + FAIL("Error message"); + } + + TEST_CASE("part of scoped 2") + { + FAIL(""); + } +} + +int main(int argc, char **argv) +{ + doctest::Context context; + context.setOption("success", true); + context.setOption("no-exitcode", true); + context.applyCommandLine(argc, argv); + return context.run(); +} +""" + ) + junit_output_path = tmp_path / "junit.xml" + result = clirunner.invoke( + pio_test_cmd, + [ + "-d", + str(project_dir), + "--junit-output-path", + str(junit_output_path), + ], + ) + assert result.exit_code != 0 + # test JUnit output + junit_testsuites = ET.parse(junit_output_path).getroot() + assert int(junit_testsuites.get("tests")) == 8 + assert int(junit_testsuites.get("errors")) == 0 + assert int(junit_testsuites.get("failures")) == 3 + assert len(junit_testsuites.findall("testsuite")) == 1 + junit_failed_testcase = junit_testsuites.find( + ".//testcase[@name='scoped test suite/part of scoped']" + ) + assert junit_failed_testcase.get("status") == "FAILED" + assert junit_failed_testcase.find("failure").get("message") == "Error message" + assert "TEST SUITE: scoped test suite" in junit_failed_testcase.find("failure").text + + # test program arguments + json_output_path = tmp_path / "report.json" + result = clirunner.invoke( + pio_test_cmd, + [ + "-d", + str(project_dir), + "--json-output-path", + str(json_output_path), + "-a", + "-aa=1", # fail after the 1 error + ], + ) + assert result.exit_code != 0 + assert "1 test cases" in result.output + # test JSON + json_report = load_json(str(json_output_path)) + assert json_report["testcase_nums"] == 1 + assert json_report["failure_nums"] == 1 + + +def test_googletest_framework(clirunner, tmp_path: Path): + project_dir = os.path.join("examples", "unit-testing", "googletest") + junit_output_path = tmp_path / "junit.xml" + result = clirunner.invoke( + pio_test_cmd, + [ + "-d", + project_dir, + "-e", + "native", + "--junit-output-path", + str(junit_output_path), + ], + ) + assert result.exit_code != 0 + # test JUnit output + junit_testsuites = ET.parse(junit_output_path).getroot() + assert int(junit_testsuites.get("tests")) == 4 + assert int(junit_testsuites.get("errors")) == 0 + assert int(junit_testsuites.get("failures")) == 1 + assert len(junit_testsuites.findall("testsuite")) == 4 + junit_failed_testcase = junit_testsuites.find(".//testcase[@name='FooTest.Bar']") + assert junit_failed_testcase.get("status") == "FAILED" + assert "test_main.cpp" in junit_failed_testcase.get("file") + assert junit_failed_testcase.get("line") == "26" + assert junit_failed_testcase.find("failure").get("message") == "Failure" + assert "Expected equality" in junit_failed_testcase.find("failure").text + + # test program arguments + json_output_path = tmp_path / "report.json" + result = clirunner.invoke( + pio_test_cmd, + [ + "-d", + project_dir, + "-e", + "native", + "--json-output-path", + str(json_output_path), + "-a", + "--gtest_filter=-FooTest.Bar", + ], + ) + assert result.exit_code == 0 + # test JSON + json_report = load_json(str(json_output_path)) + assert json_report["testcase_nums"] == 3 + assert json_report["failure_nums"] == 0 + assert json_report["skipped_nums"] == 1 + assert len(json_report["test_suites"]) == 4 diff --git a/tests/commands/test_update.py b/tests/commands/test_update.py index 90cb09c7..6edddfa3 100644 --- a/tests/commands/test_update.py +++ b/tests/commands/test_update.py @@ -19,7 +19,7 @@ from platformio.commands.update import cli as cmd_update def test_update(clirunner, validate_cliresult, isolated_pio_core): matches = ("Platform Manager", "Library Manager") - result = clirunner.invoke(cmd_update, ["--only-check"]) + result = clirunner.invoke(cmd_update) validate_cliresult(result) assert all(m in result.output for m in matches) result = clirunner.invoke(cmd_update) diff --git a/tests/conftest.py b/tests/conftest.py index b3b1bc88..6f4a6088 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -37,8 +37,10 @@ def validate_cliresult(): @pytest.fixture(scope="session") -def clirunner(request): +def clirunner(request, tmpdir_factory): + cache_dir = tmpdir_factory.mktemp(".cache") backup_env_vars = { + "PLATFORMIO_CACHE_DIR": {"new": str(cache_dir)}, "PLATFORMIO_WORKSPACE_DIR": {"new": None}, } for key, item in backup_env_vars.items(): @@ -61,18 +63,28 @@ def clirunner(request): return CliRunner() -@pytest.fixture(scope="module") -def isolated_pio_core(request, tmpdir_factory): +def _isolated_pio_core(request, tmpdir_factory): core_dir = tmpdir_factory.mktemp(".platformio") os.environ["PLATFORMIO_CORE_DIR"] = str(core_dir) def fin(): - del os.environ["PLATFORMIO_CORE_DIR"] + if "PLATFORMIO_CORE_DIR" in os.environ: + del os.environ["PLATFORMIO_CORE_DIR"] request.addfinalizer(fin) return core_dir +@pytest.fixture(scope="module") +def isolated_pio_core(request, tmpdir_factory): + return _isolated_pio_core(request, tmpdir_factory) + + +@pytest.fixture(scope="function") +def func_isolated_pio_core(request, tmpdir_factory): + return _isolated_pio_core(request, tmpdir_factory) + + @pytest.fixture(scope="function") def without_internet(monkeypatch): monkeypatch.setattr(http, "_internet_on", lambda: False) diff --git a/tests/package/test_manager.py b/tests/package/test_manager.py index c82d3c68..896a7231 100644 --- a/tests/package/test_manager.py +++ b/tests/package/test_manager.py @@ -14,14 +14,15 @@ # pylint: disable=unused-argument +import logging import os import time +from pathlib import Path import pytest import semantic_version from platformio import fs, util -from platformio.compat import PY2 from platformio.package.exception import ( MissingPackageManifestError, UnknownPackageError, @@ -37,13 +38,14 @@ def test_download(isolated_pio_core): url = "https://github.com/platformio/platformio-core/archive/v4.3.4.zip" checksum = "69d59642cb91e64344f2cdc1d3b98c5cd57679b5f6db7accc7707bd4c5d9664a" lm = LibraryPackageManager() - archive_path = lm.download(url, checksum, silent=True) + lm.set_log_level(logging.ERROR) + archive_path = lm.download(url, checksum) assert fs.calculate_file_hashsum("sha256", archive_path) == checksum - lm.cleanup_expired_downloads() + lm.cleanup_expired_downloads(time.time()) assert os.path.isfile(archive_path) # test outdated downloads lm.set_download_utime(archive_path, time.time() - lm.DOWNLOAD_CACHE_EXPIRE - 1) - lm.cleanup_expired_downloads() + lm.cleanup_expired_downloads(time.time()) assert not os.path.isfile(archive_path) # check that key is deleted from DB with open(lm.get_download_usagedb_path(), encoding="utf8") as fp: @@ -103,7 +105,7 @@ def test_build_legacy_spec(isolated_pio_core, tmpdir_factory): ) assert pm.build_legacy_spec(str(pkg1_dir)) == PackageSpec( name="StreamSpy-0.0.1.tar", - url="https://dl.platformio.org/e8936b7/StreamSpy-0.0.1.tar.gz", + uri="https://dl.platformio.org/e8936b7/StreamSpy-0.0.1.tar.gz", ) # without src manifest @@ -145,17 +147,17 @@ def test_build_metadata(isolated_pio_core, tmpdir_factory): assert metadata.version.build[1] == vcs_revision -@pytest.mark.skipif(PY2, reason="Requires Python 3.5 or higher") -def test_install_from_url(isolated_pio_core, tmpdir_factory): +def test_install_from_uri(isolated_pio_core, tmpdir_factory): tmp_dir = tmpdir_factory.mktemp("tmp") storage_dir = tmpdir_factory.mktemp("storage") lm = LibraryPackageManager(str(storage_dir)) + lm.set_log_level(logging.ERROR) # install from local directory src_dir = tmp_dir.join("local-lib-dir").mkdir() src_dir.join("main.cpp").write("") spec = PackageSpec("file://%s" % src_dir) - pkg = lm.install(spec, silent=True) + pkg = lm.install(spec) assert os.path.isfile(os.path.join(pkg.path, "main.cpp")) manifest = lm.load_manifest(pkg) assert manifest["name"] == "local-lib-dir" @@ -171,7 +173,7 @@ def test_install_from_url(isolated_pio_core, tmpdir_factory): ) tarball_path = PackagePacker(str(src_dir)).pack(str(tmp_dir)) spec = PackageSpec("file://%s" % tarball_path) - pkg = lm.install(spec, silent=True) + pkg = lm.install(spec) assert os.path.isfile(os.path.join(pkg.path, "src", "main.cpp")) assert pkg == lm.get_package(spec) assert spec == pkg.metadata.spec @@ -185,7 +187,7 @@ version = 5.2.7 """ ) spec = PackageSpec("company/wifilib @ ^5") - pkg = lm.install_from_url("file://%s" % src_dir, spec) + pkg = lm.install_from_uri("file://%s" % src_dir, spec) assert str(pkg.metadata.version) == "5.2.7" # check package folder names @@ -198,38 +200,41 @@ version = 5.2.7 def test_install_from_registry(isolated_pio_core, tmpdir_factory): # Libraries lm = LibraryPackageManager(str(tmpdir_factory.mktemp("lib-storage"))) + lm.set_log_level(logging.ERROR) # library with dependencies - lm.install("AsyncMqttClient-esphome @ 0.8.6", silent=True) + lm.install("AsyncMqttClient-esphome @ 0.8.6") assert len(lm.get_installed()) == 3 pkg = lm.get_package("AsyncTCP-esphome") assert pkg.metadata.spec.owner == "esphome" assert not lm.get_package("non-existing-package") # mbed library - assert lm.install("wolfSSL", silent=True) + assert lm.install("wolfSSL") assert len(lm.get_installed()) == 4 # case sensitive author name - assert lm.install("DallasTemperature", silent=True) + assert lm.install("DallasTemperature") assert lm.get_package("OneWire").metadata.version.major >= 2 assert len(lm.get_installed()) == 6 # test conflicted names lm = LibraryPackageManager(str(tmpdir_factory.mktemp("conflicted-storage"))) - lm.install("z3t0/IRremote@2.6.1", silent=True) - lm.install("mbed-yuhki50/IRremote", silent=True) + lm.set_log_level(logging.ERROR) + lm.install("z3t0/IRremote@2.6.1") + lm.install("mbed-yuhki50/IRremote") assert len(lm.get_installed()) == 2 # Tools tm = ToolPackageManager(str(tmpdir_factory.mktemp("tool-storage"))) - pkg = tm.install("platformio/tool-stlink @ ~1.10400.0", silent=True) + tm.set_log_level(logging.ERROR) + pkg = tm.install("platformio/tool-stlink @ ~1.10400.0") manifest = tm.load_manifest(pkg) assert tm.is_system_compatible(manifest.get("system")) assert util.get_systype() in manifest.get("system", []) # Test unknown with pytest.raises(UnknownPackageError): - tm.install("unknown-package-tool @ 9.1.1", silent=True) + tm.install("unknown-package-tool @ 9.1.1") with pytest.raises(UnknownPackageError): - tm.install("owner/unknown-package-tool", silent=True) + tm.install("owner/unknown-package-tool") def test_install_lib_depndencies(isolated_pio_core, tmpdir_factory): @@ -259,7 +264,8 @@ def test_install_lib_depndencies(isolated_pio_core, tmpdir_factory): ) lm = LibraryPackageManager(str(tmpdir_factory.mktemp("lib-storage"))) - lm.install("file://%s" % str(src_dir), silent=True) + lm.set_log_level(logging.ERROR) + lm.install("file://%s" % str(src_dir)) installed = lm.get_installed() assert len(installed) == 4 assert set(["external-repo", "ArduinoJson", "lib-with-deps", "OneWire"]) == set( @@ -269,19 +275,170 @@ def test_install_lib_depndencies(isolated_pio_core, tmpdir_factory): def test_install_force(isolated_pio_core, tmpdir_factory): lm = LibraryPackageManager(str(tmpdir_factory.mktemp("lib-storage"))) + lm.set_log_level(logging.ERROR) # install #64 ArduinoJson - pkg = lm.install("64 @ ^5", silent=True) + pkg = lm.install("64 @ ^5") assert pkg.metadata.version.major == 5 # try install the latest without specification - pkg = lm.install("64", silent=True) + pkg = lm.install("64") assert pkg.metadata.version.major == 5 assert len(lm.get_installed()) == 1 # re-install the latest - pkg = lm.install(64, silent=True, force=True) + pkg = lm.install(64, force=True) assert len(lm.get_installed()) == 1 assert pkg.metadata.version.major > 5 +def test_symlink(tmp_path: Path): + external_pkg_dir = tmp_path / "External" + external_pkg_dir.mkdir() + (external_pkg_dir / "library.json").write_text( + """ +{ + "name": "External", + "version": "1.0.0" +} +""" + ) + + storage_dir = tmp_path / "storage" + installed_pkg_dir = storage_dir / "installed" + installed_pkg_dir.mkdir(parents=True) + (installed_pkg_dir / "library.json").write_text( + """ +{ + "name": "Installed", + "version": "1.0.0" +} +""" + ) + + spec = "CustomExternal=symlink://%s" % str(external_pkg_dir) + lm = LibraryPackageManager(str(storage_dir)) + lm.set_log_level(logging.ERROR) + pkg = lm.install(spec) + assert os.path.isfile(str(storage_dir / "CustomExternal.pio-link")) + assert pkg.metadata.name == "External" + assert pkg.metadata.version.major == 1 + assert ["External", "Installed"] == [ + pkg.metadata.name for pkg in lm.get_installed() + ] + pkg = lm.get_package("External") + assert Path(pkg.path) == external_pkg_dir + assert pkg.metadata.spec.uri.startswith("symlink://") + assert lm.get_package(spec).metadata.spec.uri.startswith("symlink://") + + # try to update + lm.update(pkg) + + # uninstall + lm.uninstall("External") + assert ["Installed"] == [pkg.metadata.name for pkg in lm.get_installed()] + # ensure original package was not rmeoved + assert external_pkg_dir.is_dir() + + # install again, remove from a disk + assert lm.install("symlink://%s" % str(external_pkg_dir)) + assert os.path.isfile(str(storage_dir / "External.pio-link")) + assert ["External", "Installed"] == [ + pkg.metadata.name for pkg in lm.get_installed() + ] + fs.rmtree(str(external_pkg_dir)) + lm.memcache_reset() + assert ["Installed"] == [pkg.metadata.name for pkg in lm.get_installed()] + + +def test_scripts(isolated_pio_core, tmp_path: Path): + pkg_dir = tmp_path / "foo" + scripts_dir = pkg_dir / "scripts" + scripts_dir.mkdir(parents=True) + (scripts_dir / "script.py").write_text( + """ +import sys +from pathlib import Path + +action = "postinstall" if len(sys.argv) == 1 else sys.argv[1] +Path("%s.flag" % action).touch() + +if action == "preuninstall": + Path("../%s.flag" % action).touch() +""" + ) + (pkg_dir / "library.json").write_text( + """ +{ + "name": "foo", + "version": "1.0.0", + "scripts": { + "postinstall": "scripts/script.py", + "preuninstall2": ["scripts/script.py", "preuninstall"] + } +} +""" + ) + + storage_dir = tmp_path / "storage" + lm = LibraryPackageManager(str(storage_dir)) + lm.set_log_level(logging.ERROR) + lm.install("file://%s" % str(pkg_dir)) + assert os.path.isfile(os.path.join(lm.get_package("foo").path, "postinstall.flag")) + lm.uninstall("foo") + (storage_dir / "preuninstall.flag").is_file() + + +def test_install_circular_dependencies(tmp_path: Path): + storage_dir = tmp_path / "storage" + # Foo + pkg_dir = storage_dir / "foo" + pkg_dir.mkdir(parents=True) + (pkg_dir / "library.json").write_text( + """ +{ + "name": "Foo", + "version": "1.0.0", + "dependencies": { + "Bar": "*" + } +} +""" + ) + # Bar + pkg_dir = storage_dir / "bar" + pkg_dir.mkdir(parents=True) + (pkg_dir / "library.json").write_text( + """ +{ + "name": "Bar", + "version": "1.0.0", + "dependencies": { + "Foo": "*" + } +} +""" + ) + + lm = LibraryPackageManager(str(storage_dir)) + lm.set_log_level(logging.ERROR) + assert len(lm.get_installed()) == 2 + + # root library + pkg_dir = tmp_path / "root" + pkg_dir.mkdir(parents=True) + (pkg_dir / "library.json").write_text( + """ +{ + "name": "Root", + "version": "1.0.0", + "dependencies": { + "Foo": "^1.0.0", + "Bar": "^1.0.0" + } +} +""" + ) + lm.install("file://%s" % str(pkg_dir)) + + def test_get_installed(isolated_pio_core, tmpdir_factory): storage_dir = tmpdir_factory.mktemp("storage") pm = ToolPackageManager(str(storage_dir)) @@ -364,54 +521,57 @@ def test_uninstall(isolated_pio_core, tmpdir_factory): tmp_dir = tmpdir_factory.mktemp("tmp") storage_dir = tmpdir_factory.mktemp("storage") lm = LibraryPackageManager(str(storage_dir)) + lm.set_log_level(logging.ERROR) # foo @ 1.0.0 pkg_dir = tmp_dir.join("foo").mkdir() pkg_dir.join("library.json").write('{"name": "foo", "version": "1.0.0"}') - foo_1_0_0_pkg = lm.install_from_url("file://%s" % pkg_dir, "foo") + foo_1_0_0_pkg = lm.install_from_uri("file://%s" % pkg_dir, "foo") # foo @ 1.3.0 pkg_dir = tmp_dir.join("foo-1.3.0").mkdir() pkg_dir.join("library.json").write('{"name": "foo", "version": "1.3.0"}') - lm.install_from_url("file://%s" % pkg_dir, "foo") + lm.install_from_uri("file://%s" % pkg_dir, "foo") # bar pkg_dir = tmp_dir.join("bar").mkdir() pkg_dir.join("library.json").write('{"name": "bar", "version": "1.0.0"}') - bar_pkg = lm.install("file://%s" % pkg_dir, silent=True) + bar_pkg = lm.install("file://%s" % pkg_dir) assert len(lm.get_installed()) == 3 assert os.path.isdir(os.path.join(str(storage_dir), "foo")) assert os.path.isdir(os.path.join(str(storage_dir), "foo@1.0.0")) # check detaching - assert lm.uninstall("FOO", silent=True) + assert lm.uninstall("FOO") assert len(lm.get_installed()) == 2 assert os.path.isdir(os.path.join(str(storage_dir), "foo")) assert not os.path.isdir(os.path.join(str(storage_dir), "foo@1.0.0")) # uninstall the rest - assert lm.uninstall(foo_1_0_0_pkg.path, silent=True) - assert lm.uninstall(bar_pkg, silent=True) + assert lm.uninstall(foo_1_0_0_pkg.path) + assert lm.uninstall(bar_pkg) assert not lm.get_installed() # test uninstall dependencies - assert lm.install("AsyncMqttClient-esphome @ 0.8.4", silent=True) + assert lm.install("AsyncMqttClient-esphome @ 0.8.4") assert len(lm.get_installed()) == 3 - assert lm.uninstall("AsyncMqttClient-esphome", silent=True, skip_dependencies=True) + assert lm.uninstall("AsyncMqttClient-esphome", skip_dependencies=True) assert len(lm.get_installed()) == 2 lm = LibraryPackageManager(str(storage_dir)) - assert lm.install("AsyncMqttClient-esphome @ 0.8.4", silent=True) - assert lm.uninstall("AsyncMqttClient-esphome", silent=True) + lm.set_log_level(logging.ERROR) + assert lm.install("AsyncMqttClient-esphome @ 0.8.4") + assert lm.uninstall("AsyncMqttClient-esphome") assert not lm.get_installed() def test_registry(isolated_pio_core): lm = LibraryPackageManager() + lm.set_log_level(logging.ERROR) # reveal ID assert lm.reveal_registry_package_id(PackageSpec(id=13)) == 13 - assert lm.reveal_registry_package_id(PackageSpec(name="OneWire"), silent=True) == 1 + assert lm.reveal_registry_package_id(PackageSpec(name="OneWire")) == 1 with pytest.raises(UnknownPackageError): lm.reveal_registry_package_id(PackageSpec(name="/non-existing-package/")) @@ -435,15 +595,16 @@ def test_registry(isolated_pio_core): def test_update_with_metadata(isolated_pio_core, tmpdir_factory): storage_dir = tmpdir_factory.mktemp("storage") lm = LibraryPackageManager(str(storage_dir)) + lm.set_log_level(logging.ERROR) # test non SemVer in registry - pkg = lm.install("adafruit/Adafruit NeoPixel @ <1.9", silent=True) + pkg = lm.install("adafruit/Adafruit NeoPixel @ <1.9") outdated = lm.outdated(pkg) assert str(outdated.current) == "1.8.7" assert outdated.latest > semantic_version.Version("1.10.0") - pkg = lm.install("ArduinoJson @ 5.10.1", silent=True) - # tesy latest + pkg = lm.install("ArduinoJson @ 5.10.1") + # test latest outdated = lm.outdated(pkg) assert str(outdated.current) == "5.10.1" assert outdated.wanted is None @@ -457,14 +618,15 @@ def test_update_with_metadata(isolated_pio_core, tmpdir_factory): assert outdated.latest > semantic_version.Version("6.16.0") # update to the wanted 5.x - new_pkg = lm.update("ArduinoJson@^5", PackageSpec("ArduinoJson@^5"), silent=True) + new_pkg = lm.update("ArduinoJson@^5", PackageSpec("ArduinoJson@^5")) assert str(new_pkg.metadata.version) == "5.13.4" # check that old version is removed assert len(lm.get_installed()) == 2 # update to the latest lm = LibraryPackageManager(str(storage_dir)) - pkg = lm.update("ArduinoJson", silent=True) + lm.set_log_level(logging.ERROR) + pkg = lm.update("ArduinoJson") assert pkg.metadata.version == outdated.latest @@ -485,6 +647,7 @@ def test_update_without_metadata(isolated_pio_core, tmpdir_factory): # update lm = LibraryPackageManager(str(storage_dir)) - new_pkg = lm.update(pkg, silent=True) + lm.set_log_level(logging.ERROR) + new_pkg = lm.update(pkg) assert len(lm.get_installed()) == 4 assert new_pkg.metadata.spec.owner == "ottowinter" diff --git a/tests/package/test_manifest.py b/tests/package/test_manifest.py index a07a035e..c16ce04b 100644 --- a/tests/package/test_manifest.py +++ b/tests/package/test_manifest.py @@ -322,6 +322,9 @@ def test_library_json_schema(): "frameworks": "arduino", "platforms": "*", "license": "MIT", + "scripts": { + "postinstall": "script.py" + }, "examples": [ { "name": "JsonConfigFile", @@ -372,6 +375,7 @@ def test_library_json_schema(): "frameworks": ["arduino"], "platforms": ["*"], "license": "MIT", + "scripts": {"postinstall": "script.py"}, "examples": [ { "name": "JsonConfigFile", @@ -426,6 +430,25 @@ def test_library_json_schema(): }, ) + # test multiple licenses + contents = """ +{ + "name": "MultiLicense", + "version": "1.0.0", + "license": "MIT AND (LGPL-2.1-or-later OR BSD-3-Clause)" +} +""" + raw_data = parser.LibraryJsonManifestParser(contents).as_dict() + data = ManifestSchema().load_manifest(raw_data) + assert not jsondiff.diff( + data, + { + "name": "MultiLicense", + "version": "1.0.0", + "license": "MIT AND (LGPL-2.1-or-later OR BSD-3-Clause)", + }, + ) + def test_library_properties_schema(): contents = """ @@ -859,6 +882,11 @@ def test_broken_schemas(): ManifestValidationError, match=("Invalid semantic versioning format") ): ManifestSchema().load_manifest(dict(name="MyPackage", version="broken_version")) + # version with leading zeros + with pytest.raises( + ManifestValidationError, match=("Invalid semantic versioning format") + ): + ManifestSchema().load_manifest(dict(name="MyPackage", version="01.02.00")) # broken value for Nested with pytest.raises(ManifestValidationError, match=r"authors.*Invalid input type"): diff --git a/tests/package/test_meta.py b/tests/package/test_meta.py index 1cda6409..4faabeba 100644 --- a/tests/package/test_meta.py +++ b/tests/package/test_meta.py @@ -82,22 +82,25 @@ def test_spec_requirements(): def test_spec_local_urls(tmpdir_factory): assert PackageSpec("file:///tmp/foo.tar.gz") == PackageSpec( - url="file:///tmp/foo.tar.gz", name="foo" + uri="file:///tmp/foo.tar.gz", name="foo" ) assert PackageSpec("customName=file:///tmp/bar.zip") == PackageSpec( - url="file:///tmp/bar.zip", name="customName" + uri="file:///tmp/bar.zip", name="customName" ) assert PackageSpec("file:///tmp/some-lib/") == PackageSpec( - url="file:///tmp/some-lib/", name="some-lib" + uri="file:///tmp/some-lib/", name="some-lib" + ) + assert PackageSpec("symlink:///tmp/soft-link/") == PackageSpec( + uri="symlink:///tmp/soft-link/", name="soft-link" ) # detached package assert PackageSpec("file:///tmp/some-lib@src-67e1043a673d2") == PackageSpec( - url="file:///tmp/some-lib@src-67e1043a673d2", name="some-lib" + uri="file:///tmp/some-lib@src-67e1043a673d2", name="some-lib" ) # detached folder without scheme pkg_dir = tmpdir_factory.mktemp("storage").join("detached@1.2.3").mkdir() assert PackageSpec(str(pkg_dir)) == PackageSpec( - name="detached", url="file://%s" % pkg_dir + name="detached", uri="file://%s" % pkg_dir ) @@ -105,14 +108,14 @@ def test_spec_external_urls(): assert PackageSpec( "https://github.com/platformio/platformio-core/archive/develop.zip" ) == PackageSpec( - url="https://github.com/platformio/platformio-core/archive/develop.zip", + uri="https://github.com/platformio/platformio-core/archive/develop.zip", name="platformio-core", ) assert PackageSpec( "https://github.com/platformio/platformio-core/archive/develop.zip?param=value" " @ !=2" ) == PackageSpec( - url="https://github.com/platformio/platformio-core/archive/" + uri="https://github.com/platformio/platformio-core/archive/" "develop.zip?param=value", name="platformio-core", requirements="!=2", @@ -125,7 +128,7 @@ def test_spec_external_urls(): assert spec.has_custom_name() assert spec.name == "Custom-Name" assert spec == PackageSpec( - url="https://github.com/platformio/platformio-core/archive/develop.tar.gz", + uri="https://github.com/platformio/platformio-core/archive/develop.tar.gz", name="Custom-Name", requirements="4.4.0", ) @@ -133,40 +136,40 @@ def test_spec_external_urls(): def test_spec_vcs_urls(): assert PackageSpec("https://github.com/platformio/platformio-core") == PackageSpec( - name="platformio-core", url="git+https://github.com/platformio/platformio-core" + name="platformio-core", uri="git+https://github.com/platformio/platformio-core" ) assert PackageSpec("https://gitlab.com/username/reponame") == PackageSpec( - name="reponame", url="git+https://gitlab.com/username/reponame" + name="reponame", uri="git+https://gitlab.com/username/reponame" ) assert PackageSpec( "wolfSSL=https://os.mbed.com/users/wolfSSL/code/wolfSSL/" ) == PackageSpec( - name="wolfSSL", url="hg+https://os.mbed.com/users/wolfSSL/code/wolfSSL/" + name="wolfSSL", uri="hg+https://os.mbed.com/users/wolfSSL/code/wolfSSL/" ) assert PackageSpec( "https://github.com/platformio/platformio-core.git#master" ) == PackageSpec( name="platformio-core", - url="git+https://github.com/platformio/platformio-core.git#master", + uri="git+https://github.com/platformio/platformio-core.git#master", ) assert PackageSpec( "core=git+ssh://github.com/platformio/platformio-core.git#v4.4.0@4.4.0" ) == PackageSpec( name="core", - url="git+ssh://github.com/platformio/platformio-core.git#v4.4.0", + uri="git+ssh://github.com/platformio/platformio-core.git#v4.4.0", requirements="4.4.0", ) assert PackageSpec( "username@github.com:platformio/platformio-core.git" ) == PackageSpec( name="platformio-core", - url="git+username@github.com:platformio/platformio-core.git", + uri="git+username@github.com:platformio/platformio-core.git", ) assert PackageSpec( "pkg=git+git@github.com:platformio/platformio-core.git @ ^1.2.3,!=5" ) == PackageSpec( name="pkg", - url="git+git@github.com:platformio/platformio-core.git", + uri="git+git@github.com:platformio/platformio-core.git", requirements="^1.2.3,!=5", ) assert PackageSpec( @@ -176,7 +179,7 @@ def test_spec_vcs_urls(): ) == PackageSpec( owner="platformio", name="external-repo", - url="git+https://github.com/platformio/platformio-core", + uri="git+https://github.com/platformio/platformio-core", ) @@ -188,7 +191,7 @@ def test_spec_as_dict(): "id": None, "name": "foo", "requirements": "1.2.3", - "url": None, + "uri": None, }, ) assert not jsondiff.diff( @@ -201,7 +204,7 @@ def test_spec_as_dict(): "id": None, "name": "platformio-core", "requirements": "!=2", - "url": "https://github.com/platformio/platformio-core/archive/develop.zip?param=value", + "uri": "https://github.com/platformio/platformio-core/archive/develop.zip?param=value", }, ) @@ -255,7 +258,7 @@ def test_metadata_as_dict(): "id": None, "name": "toolchain", "requirements": "~2.0.0", - "url": None, + "uri": None, }, }, ) diff --git a/tests/package/test_pack.py b/tests/package/test_pack.py index cc898f9d..558591b3 100644 --- a/tests/package/test_pack.py +++ b/tests/package/test_pack.py @@ -27,8 +27,8 @@ from platformio.package.pack import PackagePacker def test_base(tmpdir_factory): pkg_dir = tmpdir_factory.mktemp("package") pkg_dir.join(".git").mkdir().join("file").write("") - pkg_dir.join(".gitignore").write("tests") - pkg_dir.join("._ignored").write("") + pkg_dir.join(".gitignore").write("") + pkg_dir.join("._hidden_file").write("") pkg_dir.join("main.cpp").write("#include ") p = PackagePacker(str(pkg_dir)) # test missed manifest @@ -95,6 +95,42 @@ def test_filters(tmpdir_factory): ) +def test_gitgnore_filters(tmpdir_factory): + pkg_dir = tmpdir_factory.mktemp("package") + pkg_dir.join(".git").mkdir().join("file").write("") + pkg_dir.join(".gitignore").write( + """ +# comment + +gi_file +gi_folder +gi_folder_* + +**/main_nested.h + +gi_keep_file +!gi_keep_file +LICENSE +""" + ) + pkg_dir.join("LICENSE").write("") + pkg_dir.join("gi_keep_file").write("") + pkg_dir.join("gi_file").write("") + pkg_dir.mkdir("gi_folder").join("main.h").write("#ifndef") + pkg_dir.mkdir("gi_folder_name").join("main.h").write("#ifndef") + pkg_dir.mkdir("gi_nested_folder").mkdir("a").mkdir("b").join("main_nested.h").write( + "#ifndef" + ) + pkg_dir.join("library.json").write('{"name": "foo", "version": "1.0.0"}') + p = PackagePacker(str(pkg_dir)) + with fs.cd(str(pkg_dir)): + p.pack() + with tarfile.open(os.path.join(str(pkg_dir), "foo-1.0.0.tar.gz"), "r:gz") as tar: + assert set(tar.getnames()) == set( + ["library.json", "LICENSE", ".gitignore", "gi_keep_file"] + ) + + def test_symlinks(tmpdir_factory): # Windows does not support symbolic links if IS_WINDOWS: diff --git a/tests/project/__init__.py b/tests/project/__init__.py new file mode 100644 index 00000000..b0514903 --- /dev/null +++ b/tests/project/__init__.py @@ -0,0 +1,13 @@ +# Copyright (c) 2014-present PlatformIO +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. diff --git a/tests/test_projectconf.py b/tests/project/test_config.py similarity index 92% rename from tests/test_projectconf.py rename to tests/project/test_config.py index 0fada8c3..25b7f545 100644 --- a/tests/test_projectconf.py +++ b/tests/project/test_config.py @@ -14,13 +14,15 @@ # pylint: disable=redefined-outer-name +import configparser import os import sys +from pathlib import Path import pytest from platformio import fs -from platformio.project.config import ConfigParser, ProjectConfig +from platformio.project.config import ProjectConfig from platformio.project.exception import InvalidProjectConfError, UnknownEnvNamesError BASE_CONFIG = """ @@ -154,7 +156,7 @@ def test_defaults(config): def test_sections(config): - with pytest.raises(ConfigParser.NoSectionError): + with pytest.raises(configparser.NoSectionError): config.getraw("unknown_section", "unknown_option") assert config.sections() == [ @@ -181,6 +183,7 @@ def test_envs(config): "extra_2", ] assert config.default_envs() == ["base", "extra_2"] + assert config.get_default_env() == "base" def test_options(config): @@ -276,10 +279,10 @@ def test_sysenv_options(config): def test_getraw_value(config): # unknown option - with pytest.raises(ConfigParser.NoOptionError): + with pytest.raises(configparser.NoOptionError): config.getraw("custom", "unknown_option") # unknown option even if exists in [env] - with pytest.raises(ConfigParser.NoOptionError): + with pytest.raises(configparser.NoOptionError): config.getraw("platformio", "monitor_speed") # default @@ -308,6 +311,9 @@ def test_getraw_value(config): ) assert config.getraw("platformio", "build_dir") == "~/tmp/pio-$PROJECT_HASH" + # renamed option + assert config.getraw("env:base", "debug_load_cmd") == ["load"] + def test_get_value(config): assert config.get("custom", "debug_flags") == "-D DEBUG=1" @@ -341,6 +347,7 @@ def test_get_value(config): assert config.get("platformio", "src_dir") == os.path.abspath( os.path.join(os.getcwd(), "source") ) + assert "$PROJECT_HASH" not in config.get("platformio", "build_dir") def test_items(config): @@ -577,3 +584,43 @@ core_dir = ~/.pio fs.rmtree(win_core_root_dir) except PermissionError: pass + + +def test_this(tmp_path: Path): + project_conf = tmp_path / "platformio.ini" + project_conf.write_text( + """ +[common] +board = uno + +[env:myenv] +extends = common +build_flags = -D${this.__env__} +custom_option = ${this.board} + """ + ) + config = ProjectConfig(str(project_conf)) + assert config.get("env:myenv", "custom_option") == "uno" + assert config.get("env:myenv", "build_flags") == ["-Dmyenv"] + + +def test_nested_interpolation(tmp_path: Path): + project_conf = tmp_path / "platformio.ini" + project_conf.write_text( + """ +[platformio] +build_dir = ~/tmp/pio-$PROJECT_HASH + +[env:myenv] +test_testing_command = + ${platformio.packages_dir}/tool-simavr/bin/simavr + -m + atmega328p + -f + 16000000L + ${platformio.build_dir}/${this.__env__}/firmware.elf + """ + ) + config = ProjectConfig(str(project_conf)) + testing_command = config.get("env:myenv", "test_testing_command") + assert "$" not in " ".join(testing_command) diff --git a/tests/project/test_savedeps.py b/tests/project/test_savedeps.py new file mode 100644 index 00000000..9740ce71 --- /dev/null +++ b/tests/project/test_savedeps.py @@ -0,0 +1,224 @@ +# Copyright (c) 2014-present PlatformIO +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from platformio.package.meta import PackageSpec +from platformio.project.config import ProjectConfig +from platformio.project.savedeps import save_project_dependencies + +PROJECT_CONFIG_TPL = """ +[env] +board = uno +framework = arduino +lib_deps = + SPI +platform_packages = + platformio/tool-jlink@^1.75001.0 + +[env:bare] + +[env:release] +platform = platformio/espressif32 +lib_deps = + milesburton/DallasTemperature@^3.8 + +[env:debug] +platform = platformio/espressif32@^3.4.0 +lib_deps = + ${env.lib_deps} + milesburton/DallasTemperature@^3.9.1 + bblanchon/ArduinoJson +platform_packages = + ${env.platform_packages} + platformio/framework-arduinoespressif32 @ https://github.com/espressif/arduino-esp32.git +""" + + +def test_save_libraries(tmp_path): + project_dir = tmp_path / "project" + project_dir.mkdir() + (project_dir / "platformio.ini").write_text(PROJECT_CONFIG_TPL) + specs = [ + PackageSpec("milesburton/DallasTemperature@^3.9"), + PackageSpec("adafruit/Adafruit GPS Library@^1.6.0"), + PackageSpec("https://github.com/nanopb/nanopb.git"), + ] + + # add to the sepcified environment + save_project_dependencies( + str(project_dir), specs, scope="lib_deps", action="add", environments=["debug"] + ) + config = ProjectConfig.get_instance(str(project_dir / "platformio.ini")) + assert config.get("env:debug", "lib_deps") == [ + "SPI", + "bblanchon/ArduinoJson", + "milesburton/DallasTemperature@^3.9", + "adafruit/Adafruit GPS Library@^1.6.0", + "https://github.com/nanopb/nanopb.git", + ] + assert config.get("env:bare", "lib_deps") == ["SPI"] + assert config.get("env:release", "lib_deps") == [ + "milesburton/DallasTemperature@^3.8" + ] + + # add to the the all environments + save_project_dependencies(str(project_dir), specs, scope="lib_deps", action="add") + config = ProjectConfig.get_instance(str(project_dir / "platformio.ini")) + assert config.get("env:debug", "lib_deps") == [ + "SPI", + "bblanchon/ArduinoJson", + "milesburton/DallasTemperature@^3.9", + "adafruit/Adafruit GPS Library@^1.6.0", + "https://github.com/nanopb/nanopb.git", + ] + assert config.get("env:bare", "lib_deps") == [ + "milesburton/DallasTemperature@^3.9", + "adafruit/Adafruit GPS Library@^1.6.0", + "https://github.com/nanopb/nanopb.git", + ] + assert config.get("env:release", "lib_deps") == [ + "milesburton/DallasTemperature@^3.9", + "adafruit/Adafruit GPS Library@^1.6.0", + "https://github.com/nanopb/nanopb.git", + ] + + # remove deps from env + save_project_dependencies( + str(project_dir), + [PackageSpec("milesburton/DallasTemperature")], + scope="lib_deps", + action="remove", + environments=["release"], + ) + config = ProjectConfig.get_instance(str(project_dir / "platformio.ini")) + assert config.get("env:release", "lib_deps") == [ + "adafruit/Adafruit GPS Library@^1.6.0", + "https://github.com/nanopb/nanopb.git", + ] + # invalid requirements + save_project_dependencies( + str(project_dir), + [PackageSpec("adafruit/Adafruit GPS Library@^9.9.9")], + scope="lib_deps", + action="remove", + environments=["release"], + ) + config = ProjectConfig.get_instance(str(project_dir / "platformio.ini")) + assert config.get("env:release", "lib_deps") == [ + "https://github.com/nanopb/nanopb.git", + ] + + # remove deps from all envs + save_project_dependencies( + str(project_dir), specs, scope="lib_deps", action="remove" + ) + config = ProjectConfig.get_instance(str(project_dir / "platformio.ini")) + assert config.get("env:debug", "lib_deps") == [ + "SPI", + "bblanchon/ArduinoJson", + ] + assert config.get("env:bare", "lib_deps") == ["SPI"] + assert config.get("env:release", "lib_deps") == ["SPI"] + + +def test_save_tools(tmp_path): + project_dir = tmp_path / "project" + project_dir.mkdir() + (project_dir / "platformio.ini").write_text(PROJECT_CONFIG_TPL) + specs = [ + PackageSpec("platformio/framework-espidf@^2"), + PackageSpec("platformio/tool-esptoolpy"), + ] + + # add to the sepcified environment + save_project_dependencies( + str(project_dir), + specs, + scope="platform_packages", + action="add", + environments=["debug"], + ) + config = ProjectConfig.get_instance(str(project_dir / "platformio.ini")) + assert config.get("env:debug", "platform_packages") == [ + "platformio/tool-jlink@^1.75001.0", + "platformio/framework-arduinoespressif32 @ https://github.com/espressif/arduino-esp32.git", + "platformio/framework-espidf@^2", + "platformio/tool-esptoolpy", + ] + assert config.get("env:bare", "platform_packages") == [ + "platformio/tool-jlink@^1.75001.0" + ] + assert config.get("env:release", "platform_packages") == [ + "platformio/tool-jlink@^1.75001.0" + ] + + # add to the the all environments + save_project_dependencies( + str(project_dir), specs, scope="platform_packages", action="add" + ) + config = ProjectConfig.get_instance(str(project_dir / "platformio.ini")) + assert config.get("env:debug", "platform_packages") == [ + "platformio/tool-jlink@^1.75001.0", + "platformio/framework-arduinoespressif32 @ https://github.com/espressif/arduino-esp32.git", + "platformio/framework-espidf@^2", + "platformio/tool-esptoolpy", + ] + assert config.get("env:bare", "platform_packages") == [ + "platformio/framework-espidf@^2", + "platformio/tool-esptoolpy", + ] + assert config.get("env:release", "platform_packages") == [ + "platformio/framework-espidf@^2", + "platformio/tool-esptoolpy", + ] + + # remove deps from env + save_project_dependencies( + str(project_dir), + [PackageSpec("platformio/framework-espidf")], + scope="platform_packages", + action="remove", + environments=["release"], + ) + config = ProjectConfig.get_instance(str(project_dir / "platformio.ini")) + assert config.get("env:release", "platform_packages") == [ + "platformio/tool-esptoolpy", + ] + # invalid requirements + save_project_dependencies( + str(project_dir), + [PackageSpec("platformio/tool-esptoolpy@9.9.9")], + scope="platform_packages", + action="remove", + environments=["release"], + ) + config = ProjectConfig.get_instance(str(project_dir / "platformio.ini")) + assert config.get("env:release", "platform_packages") == [ + "platformio/tool-jlink@^1.75001.0", + ] + + # remove deps from all envs + save_project_dependencies( + str(project_dir), specs, scope="platform_packages", action="remove" + ) + config = ProjectConfig.get_instance(str(project_dir / "platformio.ini")) + assert config.get("env:debug", "platform_packages") == [ + "platformio/tool-jlink@^1.75001.0", + "platformio/framework-arduinoespressif32 @ https://github.com/espressif/arduino-esp32.git", + ] + assert config.get("env:bare", "platform_packages") == [ + "platformio/tool-jlink@^1.75001.0" + ] + assert config.get("env:release", "platform_packages") == [ + "platformio/tool-jlink@^1.75001.0" + ] diff --git a/tests/test_examples.py b/tests/test_examples.py index 0be516eb..27164fd7 100644 --- a/tests/test_examples.py +++ b/tests/test_examples.py @@ -19,7 +19,6 @@ from glob import glob import pytest from platformio import fs, proc -from platformio.compat import PY2 from platformio.package.manager.platform import PlatformPackageManager from platformio.platform.factory import PlatformFactory from platformio.project.config import ProjectConfig @@ -48,8 +47,6 @@ def pytest_generate_tests(metafunc): for root, _, files in os.walk(examples_dir): if "platformio.ini" not in files or ".skiptest" in files: continue - if "zephyr-" in root and PY2: - continue group = os.path.basename(root) if "-" in group: group = group.split("-", 1)[0] diff --git a/tests/test_maintenance.py b/tests/test_maintenance.py index 0d863b0d..8a403ae5 100644 --- a/tests/test_maintenance.py +++ b/tests/test_maintenance.py @@ -14,9 +14,6 @@ # pylint: disable=unused-argument -import json -import os -import re from time import time from platformio import app, maintenance @@ -51,103 +48,3 @@ def test_check_pio_upgrade(clirunner, isolated_pio_core, validate_cliresult): # restore original version _patch_pio_version(origin_version) - - -def test_check_lib_updates(clirunner, isolated_pio_core, validate_cliresult): - # install obsolete library - result = clirunner.invoke(cli_pio, ["lib", "-g", "install", "ArduinoJson@<6.13"]) - validate_cliresult(result) - - # reset check time - interval = int(app.get_setting("check_libraries_interval")) * 3600 * 24 - last_check = {"libraries_update": time() - interval - 1} - app.set_state_item("last_check", last_check) - - result = clirunner.invoke(cli_pio, ["lib", "-g", "list"]) - validate_cliresult(result) - assert "There are the new updates for libraries (ArduinoJson)" in result.output - - -def test_check_and_update_libraries(clirunner, isolated_pio_core, validate_cliresult): - # enable library auto-updates - result = clirunner.invoke( - cli_pio, ["settings", "set", "auto_update_libraries", "Yes"] - ) - - # reset check time - interval = int(app.get_setting("check_libraries_interval")) * 3600 * 24 - last_check = {"libraries_update": time() - interval - 1} - app.set_state_item("last_check", last_check) - - # fetch installed version - result = clirunner.invoke(cli_pio, ["lib", "-g", "list", "--json-output"]) - validate_cliresult(result) - prev_data = json.loads(result.output) - assert len(prev_data) == 1 - - # initiate auto-updating - result = clirunner.invoke(cli_pio, ["lib", "-g", "show", "ArduinoJson"]) - validate_cliresult(result) - assert "There are the new updates for libraries (ArduinoJson)" in result.output - assert "Please wait while updating libraries" in result.output - assert re.search( - r"Updating bblanchon/ArduinoJson\s+6\.12\.0\s+\[Updating to [\d\.]+\]", - result.output, - ) - - # check updated version - result = clirunner.invoke(cli_pio, ["lib", "-g", "list", "--json-output"]) - validate_cliresult(result) - assert prev_data[0]["version"] != json.loads(result.output)[0]["version"] - - -def test_check_platform_updates(clirunner, isolated_pio_core, validate_cliresult): - # install obsolete platform - result = clirunner.invoke(cli_pio, ["platform", "install", "native"]) - validate_cliresult(result) - os.remove(str(isolated_pio_core.join("platforms", "native", ".piopm"))) - manifest_path = isolated_pio_core.join("platforms", "native", "platform.json") - manifest = json.loads(manifest_path.read()) - manifest["version"] = "0.0.0" - manifest_path.write(json.dumps(manifest)) - - # reset check time - interval = int(app.get_setting("check_platforms_interval")) * 3600 * 24 - last_check = {"platforms_update": time() - interval - 1} - app.set_state_item("last_check", last_check) - - result = clirunner.invoke(cli_pio, ["platform", "list"]) - validate_cliresult(result) - assert "There are the new updates for platforms (native)" in result.output - - -def test_check_and_update_platforms(clirunner, isolated_pio_core, validate_cliresult): - # enable library auto-updates - result = clirunner.invoke( - cli_pio, ["settings", "set", "auto_update_platforms", "Yes"] - ) - - # reset check time - interval = int(app.get_setting("check_platforms_interval")) * 3600 * 24 - last_check = {"platforms_update": time() - interval - 1} - app.set_state_item("last_check", last_check) - - # fetch installed version - result = clirunner.invoke(cli_pio, ["platform", "list", "--json-output"]) - validate_cliresult(result) - prev_data = json.loads(result.output) - assert len(prev_data) == 1 - - # initiate auto-updating - result = clirunner.invoke(cli_pio, ["platform", "show", "native"]) - validate_cliresult(result) - assert "There are the new updates for platforms (native)" in result.output - assert "Please wait while updating platforms" in result.output - assert re.search( - r"Updating native\s+0.0.0\s+\[Updating to [\d\.]+\]", result.output - ) - - # check updated version - result = clirunner.invoke(cli_pio, ["platform", "list", "--json-output"]) - validate_cliresult(result) - assert prev_data[0]["version"] != json.loads(result.output)[0]["version"]