mirror of
https://github.com/platformio/platformio-core.git
synced 2025-09-28 14:50:56 +02:00
Merge branch 'release/v5.2.0'
This commit is contained in:
1
.github/workflows/core.yml
vendored
1
.github/workflows/core.yml
vendored
@@ -42,3 +42,4 @@ jobs:
|
|||||||
job_name: '*Core*'
|
job_name: '*Core*'
|
||||||
commit: true
|
commit: true
|
||||||
url: ${{ secrets.SLACK_BUILD_WEBHOOK }}
|
url: ${{ secrets.SLACK_BUILD_WEBHOOK }}
|
||||||
|
token: ${{ secrets.SLACK_GITHUB_TOKEN }}
|
||||||
|
79
.github/workflows/docs.yml
vendored
79
.github/workflows/docs.yml
vendored
@@ -4,13 +4,14 @@ on: [push, pull_request]
|
|||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
build:
|
build:
|
||||||
|
name: Build Docs
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v2
|
- uses: actions/checkout@v2
|
||||||
with:
|
with:
|
||||||
submodules: "recursive"
|
submodules: "recursive"
|
||||||
- name: Set up Python
|
- name: Set up Python
|
||||||
uses: actions/setup-python@v1
|
uses: actions/setup-python@v2
|
||||||
with:
|
with:
|
||||||
python-version: 3.7
|
python-version: 3.7
|
||||||
- name: Install dependencies
|
- name: Install dependencies
|
||||||
@@ -30,3 +31,79 @@ jobs:
|
|||||||
job_name: '*Docs*'
|
job_name: '*Docs*'
|
||||||
commit: true
|
commit: true
|
||||||
url: ${{ secrets.SLACK_BUILD_WEBHOOK }}
|
url: ${{ secrets.SLACK_BUILD_WEBHOOK }}
|
||||||
|
token: ${{ secrets.SLACK_GITHUB_TOKEN }}
|
||||||
|
|
||||||
|
- name: Preserve Docs
|
||||||
|
if: ${{ github.event_name == 'push' }}
|
||||||
|
run: |
|
||||||
|
tar -czvf docs.tar.gz -C docs/_build html rtdpage
|
||||||
|
|
||||||
|
- name: Save artifact
|
||||||
|
if: ${{ github.event_name == 'push' }}
|
||||||
|
uses: actions/upload-artifact@v2
|
||||||
|
with:
|
||||||
|
name: docs
|
||||||
|
path: ./docs.tar.gz
|
||||||
|
|
||||||
|
deploy:
|
||||||
|
name: Deploy Docs
|
||||||
|
needs: build
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
env:
|
||||||
|
DOCS_REPO: platformio/platformio-docs
|
||||||
|
DOCS_DIR: platformio-docs
|
||||||
|
LATEST_DOCS_DIR: latest-docs
|
||||||
|
RELEASE_BUILD: ${{ startsWith(github.ref, 'refs/tags/v') }}
|
||||||
|
if: ${{ github.event_name == 'push' }}
|
||||||
|
steps:
|
||||||
|
- name: Download artifact
|
||||||
|
uses: actions/download-artifact@v2
|
||||||
|
with:
|
||||||
|
name: docs
|
||||||
|
- name: Unpack artifact
|
||||||
|
run: |
|
||||||
|
mkdir ./${{ env.LATEST_DOCS_DIR }}
|
||||||
|
tar -xzf ./docs.tar.gz -C ./${{ env.LATEST_DOCS_DIR }}
|
||||||
|
- name: Delete Artifact
|
||||||
|
uses: geekyeggo/delete-artifact@v1
|
||||||
|
with:
|
||||||
|
name: docs
|
||||||
|
- name: Select Docs type
|
||||||
|
id: get-destination-dir
|
||||||
|
run: |
|
||||||
|
if [[ ${{ env.RELEASE_BUILD }} == true ]]; then
|
||||||
|
echo "::set-output name=dst_dir::stable"
|
||||||
|
else
|
||||||
|
echo "::set-output name=dst_dir::latest"
|
||||||
|
fi
|
||||||
|
- name: Checkout latest Docs
|
||||||
|
continue-on-error: true
|
||||||
|
uses: actions/checkout@v2
|
||||||
|
with:
|
||||||
|
repository: ${{ env.DOCS_REPO }}
|
||||||
|
path: ${{ env.DOCS_DIR }}
|
||||||
|
ref: gh-pages
|
||||||
|
- name: Synchronize Docs
|
||||||
|
run: |
|
||||||
|
rm -rf ${{ env.DOCS_DIR }}/.git
|
||||||
|
rm -rf ${{ env.DOCS_DIR }}/en/${{ steps.get-destination-dir.outputs.dst_dir }}
|
||||||
|
mkdir -p ${{ env.DOCS_DIR }}/en/${{ steps.get-destination-dir.outputs.dst_dir }}
|
||||||
|
cp -rf ${{ env.LATEST_DOCS_DIR }}/html/* ${{ env.DOCS_DIR }}/en/${{ steps.get-destination-dir.outputs.dst_dir }}
|
||||||
|
if [[ ${{ env.RELEASE_BUILD }} == false ]]; then
|
||||||
|
rm -rf ${{ env.DOCS_DIR }}/page
|
||||||
|
mkdir -p ${{ env.DOCS_DIR }}/page
|
||||||
|
cp -rf ${{ env.LATEST_DOCS_DIR }}/rtdpage/* ${{ env.DOCS_DIR }}/page
|
||||||
|
fi
|
||||||
|
- name: Validate Docs
|
||||||
|
run: |
|
||||||
|
if [ -z "$(ls -A ${{ env.DOCS_DIR }})" ]; then
|
||||||
|
echo "Docs folder is empty. Aborting!"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
- name: Deploy to Github Pages
|
||||||
|
uses: peaceiris/actions-gh-pages@v3
|
||||||
|
with:
|
||||||
|
personal_token: ${{ secrets.PERSONAL_TOKEN }}
|
||||||
|
external_repository: ${{ env.DOCS_REPO }}
|
||||||
|
publish_dir: ./${{ env.DOCS_DIR }}
|
||||||
|
commit_message: Sync Docs
|
||||||
|
1
.github/workflows/examples.yml
vendored
1
.github/workflows/examples.yml
vendored
@@ -65,3 +65,4 @@ jobs:
|
|||||||
job_name: '*Examples*'
|
job_name: '*Examples*'
|
||||||
commit: true
|
commit: true
|
||||||
url: ${{ secrets.SLACK_BUILD_WEBHOOK }}
|
url: ${{ secrets.SLACK_BUILD_WEBHOOK }}
|
||||||
|
token: ${{ secrets.SLACK_GITHUB_TOKEN }}
|
||||||
|
@@ -15,6 +15,7 @@ disable=
|
|||||||
useless-object-inheritance,
|
useless-object-inheritance,
|
||||||
useless-import-alias,
|
useless-import-alias,
|
||||||
bad-option-value,
|
bad-option-value,
|
||||||
|
consider-using-dict-items,
|
||||||
|
|
||||||
; PY2 Compat
|
; PY2 Compat
|
||||||
super-with-arguments,
|
super-with-arguments,
|
||||||
|
54
HISTORY.rst
54
HISTORY.rst
@@ -8,6 +8,50 @@ PlatformIO Core 5
|
|||||||
|
|
||||||
**A professional collaborative platform for embedded development**
|
**A professional collaborative platform for embedded development**
|
||||||
|
|
||||||
|
5.2.0 (2021-09-13)
|
||||||
|
~~~~~~~~~~~~~~~~~~
|
||||||
|
|
||||||
|
* **PlatformIO Debugging**
|
||||||
|
|
||||||
|
- Boosted `PlatformIO Debugging <https://docs.platformio.org/page/plus/debugging.html>`__ performance thanks to migrating the codebase to the pure Python 3 Asynchronous I/O stack
|
||||||
|
- `Debug unit tests <https://docs.platformio.org/page/plus/debugging.html#debug-unit-tests>`__ created with `PlatformIO Unit Testing <https://docs.platformio.org/page/plus/unit-testing.html>`__ solution (`issue #948 <https://github.com/platformio/platformio-core/issues/948>`_)
|
||||||
|
- Debug native (desktop) applications on a host machine (`issue #980 <https://github.com/platformio/platformio-core/issues/980>`_)
|
||||||
|
- Support debugging on Windows using Windows CMD/CLI (`pio debug <https://docs.platformio.org/page/core/userguide/cmd_debug.html>`__) (`issue #3793 <https://github.com/platformio/platformio-core/issues/3793>`_)
|
||||||
|
- Configure a custom pattern to determine when debugging server is started with a new `debug_server_ready_pattern <https://docs.platformio.org/page/projectconf/section_env_debug.html#debug-server-ready-pattern>`__ option
|
||||||
|
- Fixed an issue with silent hanging when a custom debug server is not found (`issue #3756 <https://github.com/platformio/platformio-core/issues/3756>`_)
|
||||||
|
|
||||||
|
* **Package Management**
|
||||||
|
|
||||||
|
- Improved a package publishing process:
|
||||||
|
|
||||||
|
* Show package details
|
||||||
|
* Check for conflicting names in the PlatformIO Trusted Registry
|
||||||
|
* Check for duplicates and used version
|
||||||
|
* Validate package manifest
|
||||||
|
|
||||||
|
- Added a new option ``--non-interactive`` to `pio package publish <https://docs.platformio.org/page/core/userguide/package/cmd_publish.html>`__ command
|
||||||
|
|
||||||
|
* **Build System**
|
||||||
|
|
||||||
|
- Process "precompiled" and "ldflags" properties of the "library.properties" manifest (`issue #3994 <https://github.com/platformio/platformio-core/issues/3994>`_)
|
||||||
|
- Upgraded build engine to the SCons 4.2 (`release notes <https://github.com/SCons/scons/blob/rel_4.2.0/CHANGES.txt>`__)
|
||||||
|
- Fixed an issue with broken binary file extension when a custom ``PROGNAME`` contains dot symbols (`issue #3906 <https://github.com/platformio/platformio-core/issues/3906>`_)
|
||||||
|
- Fixed an issue when PlatformIO archives a library that does not contain C/C++ source files (`issue #4019 <https://github.com/platformio/platformio-core/issues/4019>`_)
|
||||||
|
|
||||||
|
* **Static Code Analysis**
|
||||||
|
|
||||||
|
- Updated analysis tools:
|
||||||
|
|
||||||
|
* `Clang-Tidy <https://docs.platformio.org/page/plus/check-tools/clang-tidy.html>`__ v12.0.1 with new modules and extended checks list
|
||||||
|
* `Cppcheck <https://docs.platformio.org/page/plus/check-tools/cppcheck.html>`__ v2.5.0 with improved code analysis and MISRA improvements
|
||||||
|
* `PVS-Studio <https://docs.platformio.org/page/plus/check-tools/pvs-studio.html>`__ v7.14 with support for intermodular analysis, improved MISRA support and new diagnostics
|
||||||
|
|
||||||
|
* **Miscellaneous**
|
||||||
|
|
||||||
|
- Ensure that a serial port is ready before running unit tests on a remote target (`issue #3742 <https://github.com/platformio/platformio-core/issues/3742>`_)
|
||||||
|
- Fixed an error "Unknown development platform" when running unit tests on a clean machine (`issue #3901 <https://github.com/platformio/platformio-core/issues/3901>`_)
|
||||||
|
- Fixed an issue when "main.cpp" was generated for a new project for 8-bit development platforms (`issue #3872 <https://github.com/platformio/platformio-core/issues/3872>`_)
|
||||||
|
|
||||||
5.1.1 (2021-03-17)
|
5.1.1 (2021-03-17)
|
||||||
~~~~~~~~~~~~~~~~~~
|
~~~~~~~~~~~~~~~~~~
|
||||||
|
|
||||||
@@ -199,24 +243,24 @@ Please check `Migration guide from 4.x to 5.0 <https://docs.platformio.org/page/
|
|||||||
PlatformIO Core 4
|
PlatformIO Core 4
|
||||||
-----------------
|
-----------------
|
||||||
|
|
||||||
See `PlatformIO Core 4.0 history <https://docs.platformio.org/en/v4.3.4/core/history.html#platformio-core-4>`__.
|
See `PlatformIO Core 4.0 history <https://github.com/platformio/platformio-core/blob/v4.3.4/HISTORY.rst>`__.
|
||||||
|
|
||||||
PlatformIO Core 3
|
PlatformIO Core 3
|
||||||
-----------------
|
-----------------
|
||||||
|
|
||||||
See `PlatformIO Core 3.0 history <https://docs.platformio.org/en/v4.3.4/core/history.html#platformio-core-3>`__.
|
See `PlatformIO Core 3.0 history <https://github.com/platformio/platformio-core/blob/v3.6.7/HISTORY.rst>`__.
|
||||||
|
|
||||||
PlatformIO Core 2
|
PlatformIO Core 2
|
||||||
-----------------
|
-----------------
|
||||||
|
|
||||||
See `PlatformIO Core 2.0 history <https://docs.platformio.org/en/v4.3.4/core/history.html#platformio-core-2>`__.
|
See `PlatformIO Core 2.0 history <https://github.com/platformio/platformio-core/blob/v2.11.2/HISTORY.rst>`__.
|
||||||
|
|
||||||
PlatformIO Core 1
|
PlatformIO Core 1
|
||||||
-----------------
|
-----------------
|
||||||
|
|
||||||
See `PlatformIO Core 1.0 history <https://docs.platformio.org/en/v4.3.4/core/history.html#platformio-core-1>`__.
|
See `PlatformIO Core 1.0 history <https://github.com/platformio/platformio-core/blob/v1.5.0/HISTORY.rst>`__.
|
||||||
|
|
||||||
PlatformIO Core Preview
|
PlatformIO Core Preview
|
||||||
-----------------------
|
-----------------------
|
||||||
|
|
||||||
See `PlatformIO Core Preview history <https://docs.platformio.org/en/v4.3.4/core/history.html#platformio-core-preview>`__.
|
See `PlatformIO Core Preview history <https://github.com/platformio/platformio-core/blob/v0.10.2/HISTORY.rst>`__.
|
||||||
|
6
Makefile
6
Makefile
@@ -1,14 +1,14 @@
|
|||||||
lint:
|
lint:
|
||||||
pylint -j 6 --rcfile=./.pylintrc ./platformio
|
|
||||||
pylint -j 6 --rcfile=./.pylintrc ./tests
|
pylint -j 6 --rcfile=./.pylintrc ./tests
|
||||||
|
pylint -j 6 --rcfile=./.pylintrc ./platformio
|
||||||
|
|
||||||
isort:
|
isort:
|
||||||
isort ./platformio
|
isort ./platformio
|
||||||
isort ./tests
|
isort ./tests
|
||||||
|
|
||||||
format:
|
format:
|
||||||
black --target-version py27 ./platformio
|
black ./platformio
|
||||||
black --target-version py27 ./tests
|
black ./tests
|
||||||
|
|
||||||
test:
|
test:
|
||||||
py.test --verbose --capture=no --exitfirst -n 6 --dist=loadscope tests --ignore tests/test_examples.py
|
py.test --verbose --capture=no --exitfirst -n 6 --dist=loadscope tests --ignore tests/test_examples.py
|
||||||
|
2
docs
2
docs
Submodule docs updated: 3293903cac...c9d2ef9abe
2
examples
2
examples
Submodule examples updated: a0631a8b07...b4be3d3fa4
@@ -14,7 +14,7 @@
|
|||||||
|
|
||||||
import sys
|
import sys
|
||||||
|
|
||||||
VERSION = (5, 1, 1)
|
VERSION = (5, 2, 0)
|
||||||
__version__ = ".".join([str(s) for s in VERSION])
|
__version__ = ".".join([str(s) for s in VERSION])
|
||||||
|
|
||||||
__title__ = "platformio"
|
__title__ = "platformio"
|
||||||
@@ -50,10 +50,10 @@ __core_packages__ = {
|
|||||||
"contrib-piohome": "~3.3.4",
|
"contrib-piohome": "~3.3.4",
|
||||||
"contrib-pysite": "~2.%d%d.0" % (sys.version_info.major, sys.version_info.minor),
|
"contrib-pysite": "~2.%d%d.0" % (sys.version_info.major, sys.version_info.minor),
|
||||||
"tool-unity": "~1.20500.0",
|
"tool-unity": "~1.20500.0",
|
||||||
"tool-scons": "~2.20501.7" if sys.version_info.major == 2 else "~4.40100.2",
|
"tool-scons": "~4.40200.0",
|
||||||
"tool-cppcheck": "~1.230.0",
|
"tool-cppcheck": "~1.250.0",
|
||||||
"tool-clangtidy": "~1.100000.0",
|
"tool-clangtidy": "~1.120001.0",
|
||||||
"tool-pvs-studio": "~7.11.0",
|
"tool-pvs-studio": "~7.14.0",
|
||||||
}
|
}
|
||||||
|
|
||||||
__check_internet_hosts__ = [
|
__check_internet_hosts__ = [
|
||||||
|
@@ -12,15 +12,17 @@
|
|||||||
# See the License for the specific language governing permissions and
|
# See the License for the specific language governing permissions and
|
||||||
# limitations under the License.
|
# limitations under the License.
|
||||||
|
|
||||||
|
# pylint: disable=import-outside-toplevel
|
||||||
|
|
||||||
import os
|
import os
|
||||||
import sys
|
import sys
|
||||||
from traceback import format_exc
|
from traceback import format_exc
|
||||||
|
|
||||||
import click
|
import click
|
||||||
|
|
||||||
from platformio import __version__, exception, maintenance, util
|
from platformio import __version__, exception
|
||||||
from platformio.commands import PlatformioCLI
|
from platformio.commands import PlatformioCLI
|
||||||
from platformio.compat import CYGWIN
|
from platformio.compat import IS_CYGWIN, ensure_python3
|
||||||
|
|
||||||
try:
|
try:
|
||||||
import click_completion # pylint: disable=import-error
|
import click_completion # pylint: disable=import-error
|
||||||
@@ -60,18 +62,21 @@ def cli(ctx, force, caller, no_ansi):
|
|||||||
except: # pylint: disable=bare-except
|
except: # pylint: disable=bare-except
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
from platformio import maintenance
|
||||||
|
|
||||||
maintenance.on_platformio_start(ctx, force, caller)
|
maintenance.on_platformio_start(ctx, force, caller)
|
||||||
|
|
||||||
|
|
||||||
@cli.resultcallback()
|
@cli.resultcallback()
|
||||||
@click.pass_context
|
@click.pass_context
|
||||||
def process_result(ctx, result, *_, **__):
|
def process_result(ctx, result, *_, **__):
|
||||||
|
from platformio import maintenance
|
||||||
|
|
||||||
maintenance.on_platformio_end(ctx, result)
|
maintenance.on_platformio_end(ctx, result)
|
||||||
|
|
||||||
|
|
||||||
@util.memoized()
|
|
||||||
def configure():
|
def configure():
|
||||||
if CYGWIN:
|
if IS_CYGWIN:
|
||||||
raise exception.CygwinEnvDetected()
|
raise exception.CygwinEnvDetected()
|
||||||
|
|
||||||
# https://urllib3.readthedocs.org
|
# https://urllib3.readthedocs.org
|
||||||
@@ -105,6 +110,7 @@ def main(argv=None):
|
|||||||
assert isinstance(argv, list)
|
assert isinstance(argv, list)
|
||||||
sys.argv = argv
|
sys.argv = argv
|
||||||
try:
|
try:
|
||||||
|
ensure_python3(raise_exception=True)
|
||||||
configure()
|
configure()
|
||||||
cli() # pylint: disable=no-value-for-parameter
|
cli() # pylint: disable=no-value-for-parameter
|
||||||
except SystemExit as e:
|
except SystemExit as e:
|
||||||
@@ -112,6 +118,9 @@ def main(argv=None):
|
|||||||
exit_code = int(e.code)
|
exit_code = int(e.code)
|
||||||
except Exception as e: # pylint: disable=broad-except
|
except Exception as e: # pylint: disable=broad-except
|
||||||
if not isinstance(e, exception.ReturnErrorCode):
|
if not isinstance(e, exception.ReturnErrorCode):
|
||||||
|
if sys.version_info.major != 2:
|
||||||
|
from platformio import maintenance
|
||||||
|
|
||||||
maintenance.on_platformio_exception(e)
|
maintenance.on_platformio_exception(e)
|
||||||
error_str = "Error: "
|
error_str = "Error: "
|
||||||
if isinstance(e, exception.PlatformioException):
|
if isinstance(e, exception.PlatformioException):
|
||||||
|
@@ -24,7 +24,7 @@ import uuid
|
|||||||
from os.path import dirname, isdir, isfile, join, realpath
|
from os.path import dirname, isdir, isfile, join, realpath
|
||||||
|
|
||||||
from platformio import __version__, exception, fs, proc
|
from platformio import __version__, exception, fs, proc
|
||||||
from platformio.compat import WINDOWS, dump_json_to_unicode, hashlib_encode_data
|
from platformio.compat import IS_WINDOWS, hashlib_encode_data
|
||||||
from platformio.package.lockfile import LockFile
|
from platformio.package.lockfile import LockFile
|
||||||
from platformio.project.helpers import get_default_projects_dir, get_project_core_dir
|
from platformio.project.helpers import get_default_projects_dir, get_project_core_dir
|
||||||
|
|
||||||
@@ -114,8 +114,8 @@ class State(object):
|
|||||||
def __exit__(self, type_, value, traceback):
|
def __exit__(self, type_, value, traceback):
|
||||||
if self.modified:
|
if self.modified:
|
||||||
try:
|
try:
|
||||||
with open(self.path, "w") as fp:
|
with open(self.path, mode="w", encoding="utf8") as fp:
|
||||||
fp.write(dump_json_to_unicode(self._storage))
|
fp.write(json.dumps(self._storage))
|
||||||
except IOError:
|
except IOError:
|
||||||
raise exception.HomeDirPermissionsError(get_project_core_dir())
|
raise exception.HomeDirPermissionsError(get_project_core_dir())
|
||||||
self._unlock_state_file()
|
self._unlock_state_file()
|
||||||
@@ -277,7 +277,7 @@ def get_cid():
|
|||||||
uid = uuid.getnode()
|
uid = uuid.getnode()
|
||||||
cid = uuid.UUID(bytes=hashlib.md5(hashlib_encode_data(uid)).digest())
|
cid = uuid.UUID(bytes=hashlib.md5(hashlib_encode_data(uid)).digest())
|
||||||
cid = str(cid)
|
cid = str(cid)
|
||||||
if WINDOWS or os.getuid() > 0: # pylint: disable=no-member
|
if IS_WINDOWS or os.getuid() > 0: # pylint: disable=no-member
|
||||||
set_state_item("cid", cid)
|
set_state_item("cid", cid)
|
||||||
return cid
|
return cid
|
||||||
|
|
||||||
|
@@ -12,9 +12,9 @@
|
|||||||
# See the License for the specific language governing permissions and
|
# See the License for the specific language governing permissions and
|
||||||
# limitations under the License.
|
# limitations under the License.
|
||||||
|
|
||||||
|
import json
|
||||||
|
import os
|
||||||
import sys
|
import sys
|
||||||
from os import environ, makedirs
|
|
||||||
from os.path import isdir, join
|
|
||||||
from time import time
|
from time import time
|
||||||
|
|
||||||
import click
|
import click
|
||||||
@@ -29,7 +29,6 @@ from SCons.Script import Import # pylint: disable=import-error
|
|||||||
from SCons.Script import Variables # pylint: disable=import-error
|
from SCons.Script import Variables # pylint: disable=import-error
|
||||||
|
|
||||||
from platformio import compat, fs
|
from platformio import compat, fs
|
||||||
from platformio.compat import dump_json_to_unicode
|
|
||||||
from platformio.platform.base import PlatformBase
|
from platformio.platform.base import PlatformBase
|
||||||
from platformio.proc import get_pythonexe_path
|
from platformio.proc import get_pythonexe_path
|
||||||
from platformio.project.helpers import get_project_dir
|
from platformio.project.helpers import get_project_dir
|
||||||
@@ -65,18 +64,18 @@ DEFAULT_ENV_OPTIONS = dict(
|
|||||||
"pioide",
|
"pioide",
|
||||||
"piosize",
|
"piosize",
|
||||||
],
|
],
|
||||||
toolpath=[join(fs.get_source_dir(), "builder", "tools")],
|
toolpath=[os.path.join(fs.get_source_dir(), "builder", "tools")],
|
||||||
variables=clivars,
|
variables=clivars,
|
||||||
# Propagating External Environment
|
# Propagating External Environment
|
||||||
ENV=environ,
|
ENV=os.environ,
|
||||||
UNIX_TIME=int(time()),
|
UNIX_TIME=int(time()),
|
||||||
BUILD_DIR=join("$PROJECT_BUILD_DIR", "$PIOENV"),
|
BUILD_DIR=os.path.join("$PROJECT_BUILD_DIR", "$PIOENV"),
|
||||||
BUILD_SRC_DIR=join("$BUILD_DIR", "src"),
|
BUILD_SRC_DIR=os.path.join("$BUILD_DIR", "src"),
|
||||||
BUILD_TEST_DIR=join("$BUILD_DIR", "test"),
|
BUILD_TEST_DIR=os.path.join("$BUILD_DIR", "test"),
|
||||||
COMPILATIONDB_PATH=join("$BUILD_DIR", "compile_commands.json"),
|
COMPILATIONDB_PATH=os.path.join("$BUILD_DIR", "compile_commands.json"),
|
||||||
LIBPATH=["$BUILD_DIR"],
|
LIBPATH=["$BUILD_DIR"],
|
||||||
PROGNAME="program",
|
PROGNAME="program",
|
||||||
PROG_PATH=join("$BUILD_DIR", "$PROGNAME$PROGSUFFIX"),
|
PROG_PATH=os.path.join("$BUILD_DIR", "$PROGNAME$PROGSUFFIX"),
|
||||||
PYTHONEXE=get_pythonexe_path(),
|
PYTHONEXE=get_pythonexe_path(),
|
||||||
IDE_EXTRA_DATA={},
|
IDE_EXTRA_DATA={},
|
||||||
)
|
)
|
||||||
@@ -124,26 +123,26 @@ env.Replace(
|
|||||||
BUILD_CACHE_DIR=config.get_optional_dir("build_cache"),
|
BUILD_CACHE_DIR=config.get_optional_dir("build_cache"),
|
||||||
LIBSOURCE_DIRS=[
|
LIBSOURCE_DIRS=[
|
||||||
config.get_optional_dir("lib"),
|
config.get_optional_dir("lib"),
|
||||||
join("$PROJECT_LIBDEPS_DIR", "$PIOENV"),
|
os.path.join("$PROJECT_LIBDEPS_DIR", "$PIOENV"),
|
||||||
config.get_optional_dir("globallib"),
|
config.get_optional_dir("globallib"),
|
||||||
],
|
],
|
||||||
)
|
)
|
||||||
|
|
||||||
if (
|
if (
|
||||||
compat.WINDOWS
|
compat.IS_WINDOWS
|
||||||
and sys.version_info >= (3, 8)
|
and sys.version_info >= (3, 8)
|
||||||
and env["PROJECT_DIR"].startswith("\\\\")
|
and env["PROJECT_DIR"].startswith("\\\\")
|
||||||
):
|
):
|
||||||
click.secho(
|
click.secho(
|
||||||
"There is a known issue with Python 3.8+ and mapped network drives on "
|
"There is a known issue with Python 3.8+ and mapped network drives on "
|
||||||
"Windows.\nPlease downgrade Python to the latest 3.7. More details at:\n"
|
"Windows.\nSee a solution at:\n"
|
||||||
"https://github.com/platformio/platformio-core/issues/3417",
|
"https://github.com/platformio/platformio-core/issues/3417",
|
||||||
fg="yellow",
|
fg="yellow",
|
||||||
)
|
)
|
||||||
|
|
||||||
if env.subst("$BUILD_CACHE_DIR"):
|
if env.subst("$BUILD_CACHE_DIR"):
|
||||||
if not isdir(env.subst("$BUILD_CACHE_DIR")):
|
if not os.path.isdir(env.subst("$BUILD_CACHE_DIR")):
|
||||||
makedirs(env.subst("$BUILD_CACHE_DIR"))
|
os.makedirs(env.subst("$BUILD_CACHE_DIR"))
|
||||||
env.CacheDir("$BUILD_CACHE_DIR")
|
env.CacheDir("$BUILD_CACHE_DIR")
|
||||||
|
|
||||||
if int(ARGUMENTS.get("ISATTY", 0)):
|
if int(ARGUMENTS.get("ISATTY", 0)):
|
||||||
@@ -160,15 +159,17 @@ elif not int(ARGUMENTS.get("PIOVERBOSE", 0)):
|
|||||||
if "compiledb" in COMMAND_LINE_TARGETS:
|
if "compiledb" in COMMAND_LINE_TARGETS:
|
||||||
env.Tool("compilation_db")
|
env.Tool("compilation_db")
|
||||||
|
|
||||||
if not isdir(env.subst("$BUILD_DIR")):
|
if not os.path.isdir(env.subst("$BUILD_DIR")):
|
||||||
makedirs(env.subst("$BUILD_DIR"))
|
os.makedirs(env.subst("$BUILD_DIR"))
|
||||||
|
|
||||||
env.LoadProjectOptions()
|
env.LoadProjectOptions()
|
||||||
env.LoadPioPlatform()
|
env.LoadPioPlatform()
|
||||||
|
|
||||||
env.SConscriptChdir(0)
|
env.SConscriptChdir(0)
|
||||||
env.SConsignFile(
|
env.SConsignFile(
|
||||||
join("$BUILD_DIR", ".sconsign%d%d" % (sys.version_info[0], sys.version_info[1]))
|
os.path.join(
|
||||||
|
"$BUILD_DIR", ".sconsign%d%d" % (sys.version_info[0], sys.version_info[1])
|
||||||
|
)
|
||||||
)
|
)
|
||||||
|
|
||||||
for item in env.GetExtraScripts("pre"):
|
for item in env.GetExtraScripts("pre"):
|
||||||
@@ -209,7 +210,7 @@ env.AddPreAction(
|
|||||||
),
|
),
|
||||||
)
|
)
|
||||||
|
|
||||||
AlwaysBuild(env.Alias("debug", DEFAULT_TARGETS))
|
AlwaysBuild(env.Alias("__debug", DEFAULT_TARGETS))
|
||||||
AlwaysBuild(env.Alias("__test", DEFAULT_TARGETS))
|
AlwaysBuild(env.Alias("__test", DEFAULT_TARGETS))
|
||||||
|
|
||||||
##############################################################################
|
##############################################################################
|
||||||
@@ -218,17 +219,20 @@ if "envdump" in COMMAND_LINE_TARGETS:
|
|||||||
click.echo(env.Dump())
|
click.echo(env.Dump())
|
||||||
env.Exit(0)
|
env.Exit(0)
|
||||||
|
|
||||||
if "idedata" in COMMAND_LINE_TARGETS:
|
if set(["_idedata", "idedata"]) & set(COMMAND_LINE_TARGETS):
|
||||||
try:
|
try:
|
||||||
Import("projenv")
|
Import("projenv")
|
||||||
except: # pylint: disable=bare-except
|
except: # pylint: disable=bare-except
|
||||||
projenv = env
|
projenv = env
|
||||||
click.echo(
|
data = projenv.DumpIDEData(env)
|
||||||
"\n%s\n"
|
# dump to file for the further reading by project.helpers.load_project_ide_data
|
||||||
% dump_json_to_unicode(
|
with open(
|
||||||
projenv.DumpIDEData(env) # pylint: disable=undefined-variable
|
projenv.subst(os.path.join("$BUILD_DIR", "idedata.json")),
|
||||||
)
|
mode="w",
|
||||||
)
|
encoding="utf8",
|
||||||
|
) as fp:
|
||||||
|
json.dump(data, fp)
|
||||||
|
click.echo("\n%s\n" % json.dumps(data)) # pylint: disable=undefined-variable
|
||||||
env.Exit(0)
|
env.Exit(0)
|
||||||
|
|
||||||
if "sizedata" in COMMAND_LINE_TARGETS:
|
if "sizedata" in COMMAND_LINE_TARGETS:
|
||||||
|
@@ -58,7 +58,7 @@ class __CompilationDbNode(SCons.Node.Python.Value):
|
|||||||
|
|
||||||
|
|
||||||
def changed_since_last_build_node(*args, **kwargs):
|
def changed_since_last_build_node(*args, **kwargs):
|
||||||
""" Dummy decider to force always building"""
|
"""Dummy decider to force always building"""
|
||||||
return True
|
return True
|
||||||
|
|
||||||
|
|
||||||
@@ -152,7 +152,7 @@ def WriteCompilationDb(target, source, env):
|
|||||||
item["file"] = os.path.abspath(item["file"])
|
item["file"] = os.path.abspath(item["file"])
|
||||||
entries.append(item)
|
entries.append(item)
|
||||||
|
|
||||||
with open(str(target[0]), "w") as target_file:
|
with open(str(target[0]), mode="w", encoding="utf8") as target_file:
|
||||||
json.dump(
|
json.dump(
|
||||||
entries, target_file, sort_keys=True, indent=4, separators=(",", ": ")
|
entries, target_file, sort_keys=True, indent=4, separators=(",", ": ")
|
||||||
)
|
)
|
||||||
|
@@ -14,13 +14,12 @@
|
|||||||
|
|
||||||
from __future__ import absolute_import
|
from __future__ import absolute_import
|
||||||
|
|
||||||
|
import glob
|
||||||
import os
|
import os
|
||||||
from glob import glob
|
|
||||||
|
|
||||||
import SCons.Defaults # pylint: disable=import-error
|
import SCons.Defaults # pylint: disable=import-error
|
||||||
import SCons.Subst # pylint: disable=import-error
|
import SCons.Subst # pylint: disable=import-error
|
||||||
|
|
||||||
from platformio.compat import glob_escape
|
|
||||||
from platformio.package.manager.core import get_core_package_dir
|
from platformio.package.manager.core import get_core_package_dir
|
||||||
from platformio.proc import exec_command, where_is_program
|
from platformio.proc import exec_command, where_is_program
|
||||||
|
|
||||||
@@ -49,7 +48,7 @@ def _dump_includes(env):
|
|||||||
for pkg in p.get_installed_packages():
|
for pkg in p.get_installed_packages():
|
||||||
if p.get_package_type(pkg.metadata.name) != "toolchain":
|
if p.get_package_type(pkg.metadata.name) != "toolchain":
|
||||||
continue
|
continue
|
||||||
toolchain_dir = glob_escape(pkg.path)
|
toolchain_dir = glob.escape(pkg.path)
|
||||||
toolchain_incglobs = [
|
toolchain_incglobs = [
|
||||||
os.path.join(toolchain_dir, "*", "include", "c++", "*"),
|
os.path.join(toolchain_dir, "*", "include", "c++", "*"),
|
||||||
os.path.join(toolchain_dir, "*", "include", "c++", "*", "*-*-*"),
|
os.path.join(toolchain_dir, "*", "include", "c++", "*", "*-*-*"),
|
||||||
@@ -57,7 +56,9 @@ def _dump_includes(env):
|
|||||||
os.path.join(toolchain_dir, "*", "include*"),
|
os.path.join(toolchain_dir, "*", "include*"),
|
||||||
]
|
]
|
||||||
for g in toolchain_incglobs:
|
for g in toolchain_incglobs:
|
||||||
includes["toolchain"].extend([os.path.realpath(inc) for inc in glob(g)])
|
includes["toolchain"].extend(
|
||||||
|
[os.path.realpath(inc) for inc in glob.glob(g)]
|
||||||
|
)
|
||||||
|
|
||||||
# include Unity framework if there are tests in project
|
# include Unity framework if there are tests in project
|
||||||
includes["unity"] = []
|
includes["unity"] = []
|
||||||
@@ -156,7 +157,7 @@ def _subst_cmd(env, cmd):
|
|||||||
|
|
||||||
|
|
||||||
def DumpIDEData(env, globalenv):
|
def DumpIDEData(env, globalenv):
|
||||||
""" env here is `projenv`"""
|
"""env here is `projenv`"""
|
||||||
|
|
||||||
data = {
|
data = {
|
||||||
"env_name": env["PIOENV"],
|
"env_name": env["PIOENV"],
|
||||||
|
@@ -33,7 +33,7 @@ from SCons.Script import DefaultEnvironment # pylint: disable=import-error
|
|||||||
from platformio import exception, fs, util
|
from platformio import exception, fs, util
|
||||||
from platformio.builder.tools import platformio as piotool
|
from platformio.builder.tools import platformio as piotool
|
||||||
from platformio.clients.http import InternetIsOffline
|
from platformio.clients.http import InternetIsOffline
|
||||||
from platformio.compat import WINDOWS, hashlib_encode_data, string_types
|
from platformio.compat import IS_WINDOWS, hashlib_encode_data, string_types
|
||||||
from platformio.package.exception import UnknownPackageError
|
from platformio.package.exception import UnknownPackageError
|
||||||
from platformio.package.manager.library import LibraryPackageManager
|
from platformio.package.manager.library import LibraryPackageManager
|
||||||
from platformio.package.manifest.parser import (
|
from platformio.package.manifest.parser import (
|
||||||
@@ -86,7 +86,9 @@ class LibBuilderFactory(object):
|
|||||||
fname, piotool.SRC_BUILD_EXT + piotool.SRC_HEADER_EXT
|
fname, piotool.SRC_BUILD_EXT + piotool.SRC_HEADER_EXT
|
||||||
):
|
):
|
||||||
continue
|
continue
|
||||||
with io.open(os.path.join(root, fname), errors="ignore") as fp:
|
with io.open(
|
||||||
|
os.path.join(root, fname), encoding="utf8", errors="ignore"
|
||||||
|
) as fp:
|
||||||
content = fp.read()
|
content = fp.read()
|
||||||
if not content:
|
if not content:
|
||||||
continue
|
continue
|
||||||
@@ -126,9 +128,9 @@ class LibBuilderBase(object):
|
|||||||
|
|
||||||
self._is_dependent = False
|
self._is_dependent = False
|
||||||
self._is_built = False
|
self._is_built = False
|
||||||
self._depbuilders = list()
|
self._depbuilders = []
|
||||||
self._circular_deps = list()
|
self._circular_deps = []
|
||||||
self._processed_files = list()
|
self._processed_files = []
|
||||||
|
|
||||||
# reset source filter, could be overridden with extra script
|
# reset source filter, could be overridden with extra script
|
||||||
self.env["SRC_FILTER"] = ""
|
self.env["SRC_FILTER"] = ""
|
||||||
@@ -142,7 +144,7 @@ class LibBuilderBase(object):
|
|||||||
def __contains__(self, path):
|
def __contains__(self, path):
|
||||||
p1 = self.path
|
p1 = self.path
|
||||||
p2 = path
|
p2 = path
|
||||||
if WINDOWS:
|
if IS_WINDOWS:
|
||||||
p1 = p1.lower()
|
p1 = p1.lower()
|
||||||
p2 = p2.lower()
|
p2 = p2.lower()
|
||||||
if p1 == p2:
|
if p1 == p2:
|
||||||
@@ -459,12 +461,22 @@ class LibBuilderBase(object):
|
|||||||
for key in ("CPPPATH", "LIBPATH", "LIBS", "LINKFLAGS"):
|
for key in ("CPPPATH", "LIBPATH", "LIBS", "LINKFLAGS"):
|
||||||
self.env.PrependUnique(**{key: lb.env.get(key)})
|
self.env.PrependUnique(**{key: lb.env.get(key)})
|
||||||
|
|
||||||
if self.lib_archive:
|
do_not_archive = not self.lib_archive
|
||||||
|
if not do_not_archive:
|
||||||
|
nodes = self.env.CollectBuildFiles(
|
||||||
|
self.build_dir, self.src_dir, self.src_filter
|
||||||
|
)
|
||||||
|
if nodes:
|
||||||
libs.append(
|
libs.append(
|
||||||
self.env.BuildLibrary(self.build_dir, self.src_dir, self.src_filter)
|
self.env.BuildLibrary(
|
||||||
|
self.build_dir, self.src_dir, self.src_filter, nodes
|
||||||
|
)
|
||||||
)
|
)
|
||||||
else:
|
else:
|
||||||
|
do_not_archive = True
|
||||||
|
if do_not_archive:
|
||||||
self.env.BuildSources(self.build_dir, self.src_dir, self.src_filter)
|
self.env.BuildSources(self.build_dir, self.src_dir, self.src_filter)
|
||||||
|
|
||||||
return libs
|
return libs
|
||||||
|
|
||||||
|
|
||||||
@@ -545,6 +557,21 @@ class ArduinoLibBuilder(LibBuilderBase):
|
|||||||
def is_platforms_compatible(self, platforms):
|
def is_platforms_compatible(self, platforms):
|
||||||
return util.items_in_list(platforms, self._manifest.get("platforms") or ["*"])
|
return util.items_in_list(platforms, self._manifest.get("platforms") or ["*"])
|
||||||
|
|
||||||
|
@property
|
||||||
|
def build_flags(self):
|
||||||
|
ldflags = [
|
||||||
|
LibBuilderBase.build_flags.fget(self), # pylint: disable=no-member
|
||||||
|
self._manifest.get("ldflags"),
|
||||||
|
]
|
||||||
|
if self._manifest.get("precompiled") in ("true", "full"):
|
||||||
|
# add to LDPATH {build.mcu} folder
|
||||||
|
board_config = self.env.BoardConfig()
|
||||||
|
self.env.PrependUnique(
|
||||||
|
LIBPATH=os.path.join(self.src_dir, board_config.get("build.cpu"))
|
||||||
|
)
|
||||||
|
ldflags = [flag for flag in ldflags if flag] # remove empty
|
||||||
|
return " ".join(ldflags) if ldflags else None
|
||||||
|
|
||||||
|
|
||||||
class MbedLibBuilder(LibBuilderBase):
|
class MbedLibBuilder(LibBuilderBase):
|
||||||
def load_manifest(self):
|
def load_manifest(self):
|
||||||
@@ -671,7 +698,7 @@ class MbedLibBuilder(LibBuilderBase):
|
|||||||
|
|
||||||
def _mbed_conf_append_macros(self, mbed_config_path, macros):
|
def _mbed_conf_append_macros(self, mbed_config_path, macros):
|
||||||
lines = []
|
lines = []
|
||||||
with open(mbed_config_path) as fp:
|
with open(mbed_config_path, encoding="utf8") as fp:
|
||||||
for line in fp.readlines():
|
for line in fp.readlines():
|
||||||
line = line.strip()
|
line = line.strip()
|
||||||
if line == "#endif":
|
if line == "#endif":
|
||||||
@@ -690,7 +717,7 @@ class MbedLibBuilder(LibBuilderBase):
|
|||||||
if len(tokens) < 2 or tokens[1] not in macros:
|
if len(tokens) < 2 or tokens[1] not in macros:
|
||||||
lines.append(line)
|
lines.append(line)
|
||||||
lines.append("")
|
lines.append("")
|
||||||
with open(mbed_config_path, "w") as fp:
|
with open(mbed_config_path, mode="w", encoding="utf8") as fp:
|
||||||
fp.write("\n".join(lines))
|
fp.write("\n".join(lines))
|
||||||
|
|
||||||
|
|
||||||
|
@@ -21,20 +21,20 @@ import re
|
|||||||
from SCons.Platform import TempFileMunge # pylint: disable=import-error
|
from SCons.Platform import TempFileMunge # pylint: disable=import-error
|
||||||
from SCons.Subst import quote_spaces # pylint: disable=import-error
|
from SCons.Subst import quote_spaces # pylint: disable=import-error
|
||||||
|
|
||||||
from platformio.compat import WINDOWS, hashlib_encode_data
|
from platformio.compat import IS_WINDOWS, hashlib_encode_data
|
||||||
|
|
||||||
# There are the next limits depending on a platform:
|
# There are the next limits depending on a platform:
|
||||||
# - Windows = 8192
|
# - Windows = 8192
|
||||||
# - Unix = 131072
|
# - Unix = 131072
|
||||||
# We need ~512 characters for compiler and temporary file paths
|
# We need ~512 characters for compiler and temporary file paths
|
||||||
MAX_LINE_LENGTH = (8192 if WINDOWS else 131072) - 512
|
MAX_LINE_LENGTH = (8192 if IS_WINDOWS else 131072) - 512
|
||||||
|
|
||||||
WINPATHSEP_RE = re.compile(r"\\([^\"'\\]|$)")
|
WINPATHSEP_RE = re.compile(r"\\([^\"'\\]|$)")
|
||||||
|
|
||||||
|
|
||||||
def tempfile_arg_esc_func(arg):
|
def tempfile_arg_esc_func(arg):
|
||||||
arg = quote_spaces(arg)
|
arg = quote_spaces(arg)
|
||||||
if not WINDOWS:
|
if not IS_WINDOWS:
|
||||||
return arg
|
return arg
|
||||||
# GCC requires double Windows slashes, let's use UNIX separator
|
# GCC requires double Windows slashes, let's use UNIX separator
|
||||||
return WINPATHSEP_RE.sub(r"/\1", arg)
|
return WINPATHSEP_RE.sub(r"/\1", arg)
|
||||||
@@ -65,7 +65,7 @@ def _file_long_data(env, data):
|
|||||||
)
|
)
|
||||||
if os.path.isfile(tmp_file):
|
if os.path.isfile(tmp_file):
|
||||||
return tmp_file
|
return tmp_file
|
||||||
with open(tmp_file, "w") as fp:
|
with open(tmp_file, mode="w", encoding="utf8") as fp:
|
||||||
fp.write(data)
|
fp.write(data)
|
||||||
return tmp_file
|
return tmp_file
|
||||||
|
|
||||||
|
@@ -15,16 +15,17 @@
|
|||||||
from __future__ import absolute_import
|
from __future__ import absolute_import
|
||||||
|
|
||||||
import atexit
|
import atexit
|
||||||
|
import glob
|
||||||
import io
|
import io
|
||||||
import os
|
import os
|
||||||
import re
|
import re
|
||||||
import sys
|
import sys
|
||||||
from tempfile import mkstemp
|
import tempfile
|
||||||
|
|
||||||
import click
|
import click
|
||||||
|
|
||||||
from platformio import fs, util
|
from platformio import fs, util
|
||||||
from platformio.compat import get_filesystem_encoding, get_locale_encoding, glob_escape
|
from platformio.compat import get_filesystem_encoding, get_locale_encoding
|
||||||
from platformio.package.manager.core import get_core_package_dir
|
from platformio.package.manager.core import get_core_package_dir
|
||||||
from platformio.proc import exec_command
|
from platformio.proc import exec_command
|
||||||
|
|
||||||
@@ -116,7 +117,7 @@ class InoToCPPConverter(object):
|
|||||||
return out_file
|
return out_file
|
||||||
|
|
||||||
def _gcc_preprocess(self, contents, out_file):
|
def _gcc_preprocess(self, contents, out_file):
|
||||||
tmp_path = mkstemp()[1]
|
tmp_path = tempfile.mkstemp()[1]
|
||||||
self.write_safe_contents(tmp_path, contents)
|
self.write_safe_contents(tmp_path, contents)
|
||||||
self.env.Execute(
|
self.env.Execute(
|
||||||
self.env.VerboseAction(
|
self.env.VerboseAction(
|
||||||
@@ -229,7 +230,7 @@ class InoToCPPConverter(object):
|
|||||||
|
|
||||||
|
|
||||||
def ConvertInoToCpp(env):
|
def ConvertInoToCpp(env):
|
||||||
src_dir = glob_escape(env.subst("$PROJECT_SRC_DIR"))
|
src_dir = glob.escape(env.subst("$PROJECT_SRC_DIR"))
|
||||||
ino_nodes = env.Glob(os.path.join(src_dir, "*.ino")) + env.Glob(
|
ino_nodes = env.Glob(os.path.join(src_dir, "*.ino")) + env.Glob(
|
||||||
os.path.join(src_dir, "*.pde")
|
os.path.join(src_dir, "*.pde")
|
||||||
)
|
)
|
||||||
@@ -333,7 +334,13 @@ def ConfigureDebugFlags(env):
|
|||||||
for scope in ("ASFLAGS", "CCFLAGS", "LINKFLAGS"):
|
for scope in ("ASFLAGS", "CCFLAGS", "LINKFLAGS"):
|
||||||
_cleanup_debug_flags(scope)
|
_cleanup_debug_flags(scope)
|
||||||
|
|
||||||
debug_flags = env.ParseFlags(env.GetProjectOption("debug_build_flags"))
|
debug_flags = env.ParseFlags(
|
||||||
|
env.get("PIODEBUGFLAGS")
|
||||||
|
if env.get("PIODEBUGFLAGS")
|
||||||
|
and not env.GetProjectOptions(as_dict=True).get("debug_build_flags")
|
||||||
|
else env.GetProjectOption("debug_build_flags")
|
||||||
|
)
|
||||||
|
|
||||||
env.MergeFlags(debug_flags)
|
env.MergeFlags(debug_flags)
|
||||||
optimization_flags = [
|
optimization_flags = [
|
||||||
f for f in debug_flags.get("CCFLAGS", []) if f.startswith(("-O", "-g"))
|
f for f in debug_flags.get("CCFLAGS", []) if f.startswith(("-O", "-g"))
|
||||||
|
@@ -21,7 +21,7 @@ from SCons.Script import ARGUMENTS # pylint: disable=import-error
|
|||||||
from SCons.Script import COMMAND_LINE_TARGETS # pylint: disable=import-error
|
from SCons.Script import COMMAND_LINE_TARGETS # pylint: disable=import-error
|
||||||
|
|
||||||
from platformio import fs, util
|
from platformio import fs, util
|
||||||
from platformio.compat import WINDOWS
|
from platformio.compat import IS_MACOS, IS_WINDOWS
|
||||||
from platformio.package.meta import PackageItem
|
from platformio.package.meta import PackageItem
|
||||||
from platformio.package.version import get_original_version
|
from platformio.package.version import get_original_version
|
||||||
from platformio.platform.exception import UnknownBoard
|
from platformio.platform.exception import UnknownBoard
|
||||||
@@ -71,7 +71,6 @@ def LoadPioPlatform(env):
|
|||||||
env["PIOPLATFORM"] = p.name
|
env["PIOPLATFORM"] = p.name
|
||||||
|
|
||||||
# Add toolchains and uploaders to $PATH and $*_LIBRARY_PATH
|
# Add toolchains and uploaders to $PATH and $*_LIBRARY_PATH
|
||||||
systype = util.get_systype()
|
|
||||||
for pkg in p.get_installed_packages():
|
for pkg in p.get_installed_packages():
|
||||||
type_ = p.get_package_type(pkg.metadata.name)
|
type_ = p.get_package_type(pkg.metadata.name)
|
||||||
if type_ not in ("toolchain", "uploader", "debugger"):
|
if type_ not in ("toolchain", "uploader", "debugger"):
|
||||||
@@ -83,12 +82,12 @@ def LoadPioPlatform(env):
|
|||||||
else pkg.path,
|
else pkg.path,
|
||||||
)
|
)
|
||||||
if (
|
if (
|
||||||
not WINDOWS
|
not IS_WINDOWS
|
||||||
and os.path.isdir(os.path.join(pkg.path, "lib"))
|
and os.path.isdir(os.path.join(pkg.path, "lib"))
|
||||||
and type_ != "toolchain"
|
and type_ != "toolchain"
|
||||||
):
|
):
|
||||||
env.PrependENVPath(
|
env.PrependENVPath(
|
||||||
"DYLD_LIBRARY_PATH" if "darwin" in systype else "LD_LIBRARY_PATH",
|
"DYLD_LIBRARY_PATH" if IS_MACOS else "LD_LIBRARY_PATH",
|
||||||
os.path.join(pkg.path, "lib"),
|
os.path.join(pkg.path, "lib"),
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@@ -16,6 +16,7 @@
|
|||||||
|
|
||||||
from __future__ import absolute_import
|
from __future__ import absolute_import
|
||||||
|
|
||||||
|
import json
|
||||||
import sys
|
import sys
|
||||||
from os import environ, makedirs, remove
|
from os import environ, makedirs, remove
|
||||||
from os.path import isdir, join, splitdrive
|
from os.path import isdir, join, splitdrive
|
||||||
@@ -23,9 +24,8 @@ from os.path import isdir, join, splitdrive
|
|||||||
from elftools.elf.descriptions import describe_sh_flags
|
from elftools.elf.descriptions import describe_sh_flags
|
||||||
from elftools.elf.elffile import ELFFile
|
from elftools.elf.elffile import ELFFile
|
||||||
|
|
||||||
from platformio.compat import dump_json_to_unicode
|
from platformio.compat import IS_WINDOWS
|
||||||
from platformio.proc import exec_command
|
from platformio.proc import exec_command
|
||||||
from platformio.util import get_systype
|
|
||||||
|
|
||||||
|
|
||||||
def _run_tool(cmd, env, tool_args):
|
def _run_tool(cmd, env, tool_args):
|
||||||
@@ -37,7 +37,7 @@ def _run_tool(cmd, env, tool_args):
|
|||||||
makedirs(build_dir)
|
makedirs(build_dir)
|
||||||
tmp_file = join(build_dir, "size-data-longcmd.txt")
|
tmp_file = join(build_dir, "size-data-longcmd.txt")
|
||||||
|
|
||||||
with open(tmp_file, "w") as fp:
|
with open(tmp_file, mode="w", encoding="utf8") as fp:
|
||||||
fp.write("\n".join(tool_args))
|
fp.write("\n".join(tool_args))
|
||||||
|
|
||||||
cmd.append("@" + tmp_file)
|
cmd.append("@" + tmp_file)
|
||||||
@@ -164,7 +164,7 @@ def _collect_symbols_info(env, elffile, elf_path, sections):
|
|||||||
location = symbol_locations.get(hex(symbol["addr"]))
|
location = symbol_locations.get(hex(symbol["addr"]))
|
||||||
if not location or "?" in location:
|
if not location or "?" in location:
|
||||||
continue
|
continue
|
||||||
if "windows" in get_systype():
|
if IS_WINDOWS:
|
||||||
drive, tail = splitdrive(location)
|
drive, tail = splitdrive(location)
|
||||||
location = join(drive.upper(), tail)
|
location = join(drive.upper(), tail)
|
||||||
symbol["file"] = location
|
symbol["file"] = location
|
||||||
@@ -220,7 +220,7 @@ def DumpSizeData(_, target, source, env): # pylint: disable=unused-argument
|
|||||||
"sections": sections,
|
"sections": sections,
|
||||||
}
|
}
|
||||||
|
|
||||||
files = dict()
|
files = {}
|
||||||
for symbol in _collect_symbols_info(env, elffile, elf_path, sections):
|
for symbol in _collect_symbols_info(env, elffile, elf_path, sections):
|
||||||
file_path = symbol.get("file") or "unknown"
|
file_path = symbol.get("file") or "unknown"
|
||||||
if not files.get(file_path, {}):
|
if not files.get(file_path, {}):
|
||||||
@@ -235,14 +235,16 @@ def DumpSizeData(_, target, source, env): # pylint: disable=unused-argument
|
|||||||
|
|
||||||
files[file_path]["symbols"].append(symbol)
|
files[file_path]["symbols"].append(symbol)
|
||||||
|
|
||||||
data["memory"]["files"] = list()
|
data["memory"]["files"] = []
|
||||||
for k, v in files.items():
|
for k, v in files.items():
|
||||||
file_data = {"path": k}
|
file_data = {"path": k}
|
||||||
file_data.update(v)
|
file_data.update(v)
|
||||||
data["memory"]["files"].append(file_data)
|
data["memory"]["files"].append(file_data)
|
||||||
|
|
||||||
with open(join(env.subst("$BUILD_DIR"), "sizedata.json"), "w") as fp:
|
with open(
|
||||||
fp.write(dump_json_to_unicode(data))
|
join(env.subst("$BUILD_DIR"), "sizedata.json"), mode="w", encoding="utf8"
|
||||||
|
) as fp:
|
||||||
|
fp.write(json.dumps(data))
|
||||||
|
|
||||||
|
|
||||||
def exists(_):
|
def exists(_):
|
||||||
|
@@ -31,7 +31,7 @@ def VerboseAction(_, act, actstr):
|
|||||||
|
|
||||||
def PioClean(env, clean_dir):
|
def PioClean(env, clean_dir):
|
||||||
def _relpath(path):
|
def _relpath(path):
|
||||||
if compat.WINDOWS:
|
if compat.IS_WINDOWS:
|
||||||
prefix = os.getcwd()[:2].lower()
|
prefix = os.getcwd()[:2].lower()
|
||||||
if (
|
if (
|
||||||
":" not in prefix
|
":" not in prefix
|
||||||
|
@@ -26,7 +26,7 @@ from SCons.Script import ARGUMENTS # pylint: disable=import-error
|
|||||||
from serial import Serial, SerialException
|
from serial import Serial, SerialException
|
||||||
|
|
||||||
from platformio import exception, fs, util
|
from platformio import exception, fs, util
|
||||||
from platformio.compat import WINDOWS
|
from platformio.compat import IS_WINDOWS
|
||||||
from platformio.proc import exec_command
|
from platformio.proc import exec_command
|
||||||
|
|
||||||
# pylint: disable=unused-argument
|
# pylint: disable=unused-argument
|
||||||
@@ -134,7 +134,7 @@ def AutodetectUploadPort(*args, **kwargs):
|
|||||||
continue
|
continue
|
||||||
port = item["port"]
|
port = item["port"]
|
||||||
if upload_protocol.startswith("blackmagic"):
|
if upload_protocol.startswith("blackmagic"):
|
||||||
if WINDOWS and port.startswith("COM") and len(port) > 4:
|
if IS_WINDOWS and port.startswith("COM") and len(port) > 4:
|
||||||
port = "\\\\.\\%s" % port
|
port = "\\\\.\\%s" % port
|
||||||
if "GDB" in item["description"]:
|
if "GDB" in item["description"]:
|
||||||
return port
|
return port
|
||||||
@@ -236,9 +236,9 @@ def CheckUploadSize(_, target, source, env):
|
|||||||
def _format_availale_bytes(value, total):
|
def _format_availale_bytes(value, total):
|
||||||
percent_raw = float(value) / float(total)
|
percent_raw = float(value) / float(total)
|
||||||
blocks_per_progress = 10
|
blocks_per_progress = 10
|
||||||
used_blocks = int(round(blocks_per_progress * percent_raw))
|
used_blocks = min(
|
||||||
if used_blocks > blocks_per_progress:
|
int(round(blocks_per_progress * percent_raw)), blocks_per_progress
|
||||||
used_blocks = blocks_per_progress
|
)
|
||||||
return "[{:{}}] {: 6.1%} (used {:d} bytes from {:d} bytes)".format(
|
return "[{:{}}] {: 6.1%} (used {:d} bytes from {:d} bytes)".format(
|
||||||
"=" * used_blocks, blocks_per_progress, percent_raw, value, total
|
"=" * used_blocks, blocks_per_progress, percent_raw, value, total
|
||||||
)
|
)
|
||||||
|
@@ -27,7 +27,7 @@ from SCons.Script import Export # pylint: disable=import-error
|
|||||||
from SCons.Script import SConscript # pylint: disable=import-error
|
from SCons.Script import SConscript # pylint: disable=import-error
|
||||||
|
|
||||||
from platformio import __version__, fs
|
from platformio import __version__, fs
|
||||||
from platformio.compat import MACOS, string_types
|
from platformio.compat import IS_MACOS, string_types
|
||||||
from platformio.package.version import pepver_to_semver
|
from platformio.package.version import pepver_to_semver
|
||||||
|
|
||||||
SRC_HEADER_EXT = ["h", "hpp"]
|
SRC_HEADER_EXT = ["h", "hpp"]
|
||||||
@@ -50,7 +50,7 @@ def GetBuildType(env):
|
|||||||
return (
|
return (
|
||||||
"debug"
|
"debug"
|
||||||
if (
|
if (
|
||||||
set(["debug", "sizedata"]) & set(COMMAND_LINE_TARGETS)
|
set(["__debug", "sizedata"]) & set(COMMAND_LINE_TARGETS)
|
||||||
or env.GetProjectOption("build_type") == "debug"
|
or env.GetProjectOption("build_type") == "debug"
|
||||||
)
|
)
|
||||||
else "release"
|
else "release"
|
||||||
@@ -69,13 +69,14 @@ def BuildProgram(env):
|
|||||||
if (
|
if (
|
||||||
env.get("LIBS")
|
env.get("LIBS")
|
||||||
and env.GetCompilerType() == "gcc"
|
and env.GetCompilerType() == "gcc"
|
||||||
and (env.PioPlatform().is_embedded() or not MACOS)
|
and (env.PioPlatform().is_embedded() or not IS_MACOS)
|
||||||
):
|
):
|
||||||
env.Prepend(_LIBFLAGS="-Wl,--start-group ")
|
env.Prepend(_LIBFLAGS="-Wl,--start-group ")
|
||||||
env.Append(_LIBFLAGS=" -Wl,--end-group")
|
env.Append(_LIBFLAGS=" -Wl,--end-group")
|
||||||
|
|
||||||
program = env.Program(
|
program = env.Program(
|
||||||
os.path.join("$BUILD_DIR", env.subst("$PROGNAME")), env["PIOBUILDFILES"]
|
os.path.join("$BUILD_DIR", env.subst("$PROGNAME$PROGSUFFIX")),
|
||||||
|
env["PIOBUILDFILES"],
|
||||||
)
|
)
|
||||||
env.Replace(PIOMAINPROG=program)
|
env.Replace(PIOMAINPROG=program)
|
||||||
|
|
||||||
@@ -345,11 +346,10 @@ def BuildFrameworks(env, frameworks):
|
|||||||
env.Exit(1)
|
env.Exit(1)
|
||||||
|
|
||||||
|
|
||||||
def BuildLibrary(env, variant_dir, src_dir, src_filter=None):
|
def BuildLibrary(env, variant_dir, src_dir, src_filter=None, nodes=None):
|
||||||
env.ProcessUnFlags(env.get("BUILD_UNFLAGS"))
|
env.ProcessUnFlags(env.get("BUILD_UNFLAGS"))
|
||||||
return env.StaticLibrary(
|
nodes = nodes or env.CollectBuildFiles(variant_dir, src_dir, src_filter)
|
||||||
env.subst(variant_dir), env.CollectBuildFiles(variant_dir, src_dir, src_filter)
|
return env.StaticLibrary(env.subst(variant_dir), nodes)
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
def BuildSources(env, variant_dir, src_dir, src_filter=None):
|
def BuildSources(env, variant_dir, src_dir, src_filter=None):
|
||||||
|
@@ -78,9 +78,9 @@ class ContentCache(object):
|
|||||||
if not os.path.isdir(os.path.dirname(cache_path)):
|
if not os.path.isdir(os.path.dirname(cache_path)):
|
||||||
os.makedirs(os.path.dirname(cache_path))
|
os.makedirs(os.path.dirname(cache_path))
|
||||||
try:
|
try:
|
||||||
with codecs.open(cache_path, "wb", encoding="utf8") as fp:
|
with codecs.open(cache_path, mode="wb", encoding="utf8") as fp:
|
||||||
fp.write(data)
|
fp.write(data)
|
||||||
with open(self._db_path, "a") as fp:
|
with open(self._db_path, mode="a", encoding="utf8") as fp:
|
||||||
fp.write("%s=%s\n" % (str(expire_time), os.path.basename(cache_path)))
|
fp.write("%s=%s\n" % (str(expire_time), os.path.basename(cache_path)))
|
||||||
except UnicodeError:
|
except UnicodeError:
|
||||||
if os.path.isfile(cache_path):
|
if os.path.isfile(cache_path):
|
||||||
@@ -92,7 +92,7 @@ class ContentCache(object):
|
|||||||
return self._unlock_dbindex()
|
return self._unlock_dbindex()
|
||||||
|
|
||||||
def delete(self, keys=None):
|
def delete(self, keys=None):
|
||||||
""" Keys=None, delete expired items """
|
"""Keys=None, delete expired items"""
|
||||||
if not os.path.isfile(self._db_path):
|
if not os.path.isfile(self._db_path):
|
||||||
return None
|
return None
|
||||||
if not keys:
|
if not keys:
|
||||||
@@ -102,7 +102,7 @@ class ContentCache(object):
|
|||||||
paths_for_delete = [self.get_cache_path(k) for k in keys]
|
paths_for_delete = [self.get_cache_path(k) for k in keys]
|
||||||
found = False
|
found = False
|
||||||
newlines = []
|
newlines = []
|
||||||
with open(self._db_path) as fp:
|
with open(self._db_path, encoding="utf8") as fp:
|
||||||
for line in fp.readlines():
|
for line in fp.readlines():
|
||||||
line = line.strip()
|
line = line.strip()
|
||||||
if "=" not in line:
|
if "=" not in line:
|
||||||
@@ -129,7 +129,7 @@ class ContentCache(object):
|
|||||||
pass
|
pass
|
||||||
|
|
||||||
if found and self._lock_dbindex():
|
if found and self._lock_dbindex():
|
||||||
with open(self._db_path, "w") as fp:
|
with open(self._db_path, mode="w", encoding="utf8") as fp:
|
||||||
fp.write("\n".join(newlines) + "\n")
|
fp.write("\n".join(newlines) + "\n")
|
||||||
self._unlock_dbindex()
|
self._unlock_dbindex()
|
||||||
|
|
||||||
|
@@ -207,6 +207,9 @@ class AccountClient(HTTPClient): # pylint:disable=too-many-public-methods
|
|||||||
app.set_state_item("account", account)
|
app.set_state_item("account", account)
|
||||||
return result
|
return result
|
||||||
|
|
||||||
|
def get_logged_username(self):
|
||||||
|
return self.get_account_info(offline=True).get("profile").get("username")
|
||||||
|
|
||||||
def destroy_account(self):
|
def destroy_account(self):
|
||||||
return self.send_auth_request("delete", "/v1/account")
|
return self.send_auth_request("delete", "/v1/account")
|
||||||
|
|
||||||
|
@@ -80,7 +80,7 @@ class EndpointSessionIterator(object):
|
|||||||
return self
|
return self
|
||||||
|
|
||||||
def next(self):
|
def next(self):
|
||||||
""" For Python 2 compatibility """
|
"""For Python 2 compatibility"""
|
||||||
return self.__next__()
|
return self.__next__()
|
||||||
|
|
||||||
def __next__(self):
|
def __next__(self):
|
||||||
@@ -101,7 +101,10 @@ class HTTPClient(object):
|
|||||||
def __del__(self):
|
def __del__(self):
|
||||||
if not self._session:
|
if not self._session:
|
||||||
return
|
return
|
||||||
|
try:
|
||||||
self._session.close()
|
self._session.close()
|
||||||
|
except: # pylint: disable=bare-except
|
||||||
|
pass
|
||||||
self._session = None
|
self._session = None
|
||||||
|
|
||||||
def _next_session(self):
|
def _next_session(self):
|
||||||
|
@@ -15,7 +15,6 @@
|
|||||||
from platformio import __registry_api__, fs
|
from platformio import __registry_api__, fs
|
||||||
from platformio.clients.account import AccountClient
|
from platformio.clients.account import AccountClient
|
||||||
from platformio.clients.http import HTTPClient, HTTPClientError
|
from platformio.clients.http import HTTPClient, HTTPClientError
|
||||||
from platformio.package.meta import PackageType
|
|
||||||
|
|
||||||
# pylint: disable=too-many-arguments
|
# pylint: disable=too-many-arguments
|
||||||
|
|
||||||
@@ -32,18 +31,13 @@ class RegistryClient(HTTPClient):
|
|||||||
kwargs["headers"] = headers
|
kwargs["headers"] = headers
|
||||||
return self.fetch_json_data(*args, **kwargs)
|
return self.fetch_json_data(*args, **kwargs)
|
||||||
|
|
||||||
def publish_package(
|
def publish_package( # pylint: disable=redefined-builtin
|
||||||
self, archive_path, owner=None, released_at=None, private=False, notify=True
|
self, owner, type, archive_path, released_at=None, private=False, notify=True
|
||||||
):
|
):
|
||||||
account = AccountClient()
|
|
||||||
if not owner:
|
|
||||||
owner = (
|
|
||||||
account.get_account_info(offline=True).get("profile").get("username")
|
|
||||||
)
|
|
||||||
with open(archive_path, "rb") as fp:
|
with open(archive_path, "rb") as fp:
|
||||||
return self.send_auth_request(
|
return self.send_auth_request(
|
||||||
"post",
|
"post",
|
||||||
"/v3/packages/%s/%s" % (owner, PackageType.from_archive(archive_path)),
|
"/v3/packages/%s/%s" % (owner, type),
|
||||||
params={
|
params={
|
||||||
"private": 1 if private else 0,
|
"private": 1 if private else 0,
|
||||||
"notify": 1 if notify else 0,
|
"notify": 1 if notify else 0,
|
||||||
@@ -59,13 +53,8 @@ class RegistryClient(HTTPClient):
|
|||||||
)
|
)
|
||||||
|
|
||||||
def unpublish_package( # pylint: disable=redefined-builtin
|
def unpublish_package( # pylint: disable=redefined-builtin
|
||||||
self, type, name, owner=None, version=None, undo=False
|
self, owner, type, name, version=None, undo=False
|
||||||
):
|
):
|
||||||
account = AccountClient()
|
|
||||||
if not owner:
|
|
||||||
owner = (
|
|
||||||
account.get_account_info(offline=True).get("profile").get("username")
|
|
||||||
)
|
|
||||||
path = "/v3/packages/%s/%s/%s" % (owner, type, name)
|
path = "/v3/packages/%s/%s/%s" % (owner, type, name)
|
||||||
if version:
|
if version:
|
||||||
path += "/" + version
|
path += "/" + version
|
||||||
|
@@ -184,7 +184,7 @@ def account_destroy():
|
|||||||
click.confirm(
|
click.confirm(
|
||||||
"Are you sure you want to delete the %s user account?\n"
|
"Are you sure you want to delete the %s user account?\n"
|
||||||
"Warning! All linked data will be permanently removed and can not be restored."
|
"Warning! All linked data will be permanently removed and can not be restored."
|
||||||
% client.get_account_info().get("profile").get("username"),
|
% client.get_logged_username(),
|
||||||
abort=True,
|
abort=True,
|
||||||
)
|
)
|
||||||
client.destroy_account()
|
client.destroy_account()
|
||||||
|
@@ -13,12 +13,12 @@
|
|||||||
# limitations under the License.
|
# limitations under the License.
|
||||||
|
|
||||||
import json
|
import json
|
||||||
|
import shutil
|
||||||
|
|
||||||
import click
|
import click
|
||||||
from tabulate import tabulate
|
from tabulate import tabulate
|
||||||
|
|
||||||
from platformio import fs
|
from platformio import fs
|
||||||
from platformio.compat import dump_json_to_unicode
|
|
||||||
from platformio.package.manager.platform import PlatformPackageManager
|
from platformio.package.manager.platform import PlatformPackageManager
|
||||||
|
|
||||||
|
|
||||||
@@ -41,7 +41,7 @@ def cli(query, installed, json_output): # pylint: disable=R0912
|
|||||||
grpboards[board["platform"]] = []
|
grpboards[board["platform"]] = []
|
||||||
grpboards[board["platform"]].append(board)
|
grpboards[board["platform"]].append(board)
|
||||||
|
|
||||||
terminal_width, _ = click.get_terminal_size()
|
terminal_width, _ = shutil.get_terminal_size()
|
||||||
for (platform, boards) in sorted(grpboards.items()):
|
for (platform, boards) in sorted(grpboards.items()):
|
||||||
click.echo("")
|
click.echo("")
|
||||||
click.echo("Platform: ", nl=False)
|
click.echo("Platform: ", nl=False)
|
||||||
@@ -83,4 +83,4 @@ def _print_boards_json(query, installed=False):
|
|||||||
if query.lower() not in search_data.lower():
|
if query.lower() not in search_data.lower():
|
||||||
continue
|
continue
|
||||||
result.append(board)
|
result.append(board)
|
||||||
click.echo(dump_json_to_unicode(result))
|
click.echo(json.dumps(result))
|
||||||
|
@@ -15,7 +15,9 @@
|
|||||||
# pylint: disable=too-many-arguments,too-many-locals,too-many-branches
|
# pylint: disable=too-many-arguments,too-many-locals,too-many-branches
|
||||||
# pylint: disable=redefined-builtin,too-many-statements
|
# pylint: disable=redefined-builtin,too-many-statements
|
||||||
|
|
||||||
|
import json
|
||||||
import os
|
import os
|
||||||
|
import shutil
|
||||||
from collections import Counter
|
from collections import Counter
|
||||||
from os.path import dirname, isfile
|
from os.path import dirname, isfile
|
||||||
from time import time
|
from time import time
|
||||||
@@ -26,7 +28,6 @@ from tabulate import tabulate
|
|||||||
from platformio import app, exception, fs, util
|
from platformio import app, exception, fs, util
|
||||||
from platformio.commands.check.defect import DefectItem
|
from platformio.commands.check.defect import DefectItem
|
||||||
from platformio.commands.check.tools import CheckToolFactory
|
from platformio.commands.check.tools import CheckToolFactory
|
||||||
from platformio.compat import dump_json_to_unicode
|
|
||||||
from platformio.project.config import ProjectConfig
|
from platformio.project.config import ProjectConfig
|
||||||
from platformio.project.helpers import find_project_dir_above, get_project_dir
|
from platformio.project.helpers import find_project_dir_above, get_project_dir
|
||||||
|
|
||||||
@@ -163,7 +164,7 @@ def cli(
|
|||||||
print_processing_footer(result)
|
print_processing_footer(result)
|
||||||
|
|
||||||
if json_output:
|
if json_output:
|
||||||
click.echo(dump_json_to_unicode(results_to_json(results)))
|
click.echo(json.dumps(results_to_json(results)))
|
||||||
elif not silent:
|
elif not silent:
|
||||||
print_check_summary(results)
|
print_check_summary(results)
|
||||||
|
|
||||||
@@ -193,7 +194,7 @@ def print_processing_header(tool, envname, envdump):
|
|||||||
"Checking %s > %s (%s)"
|
"Checking %s > %s (%s)"
|
||||||
% (click.style(envname, fg="cyan", bold=True), tool, "; ".join(envdump))
|
% (click.style(envname, fg="cyan", bold=True), tool, "; ".join(envdump))
|
||||||
)
|
)
|
||||||
terminal_width, _ = click.get_terminal_size()
|
terminal_width, _ = shutil.get_terminal_size()
|
||||||
click.secho("-" * terminal_width, bold=True)
|
click.secho("-" * terminal_width, bold=True)
|
||||||
|
|
||||||
|
|
||||||
@@ -214,7 +215,7 @@ def print_processing_footer(result):
|
|||||||
|
|
||||||
|
|
||||||
def collect_component_stats(result):
|
def collect_component_stats(result):
|
||||||
components = dict()
|
components = {}
|
||||||
|
|
||||||
def _append_defect(component, defect):
|
def _append_defect(component, defect):
|
||||||
if not components.get(component):
|
if not components.get(component):
|
||||||
@@ -249,7 +250,7 @@ def print_defects_stats(results):
|
|||||||
|
|
||||||
severity_labels = list(DefectItem.SEVERITY_LABELS.values())
|
severity_labels = list(DefectItem.SEVERITY_LABELS.values())
|
||||||
severity_labels.reverse()
|
severity_labels.reverse()
|
||||||
tabular_data = list()
|
tabular_data = []
|
||||||
for k, v in component_stats.items():
|
for k, v in component_stats.items():
|
||||||
tool_defect = [v.get(s, 0) for s in severity_labels]
|
tool_defect = [v.get(s, 0) for s in severity_labels]
|
||||||
tabular_data.append([k] + tool_defect)
|
tabular_data.append([k] + tool_defect)
|
||||||
|
@@ -12,12 +12,13 @@
|
|||||||
# See the License for the specific language governing permissions and
|
# See the License for the specific language governing permissions and
|
||||||
# limitations under the License.
|
# limitations under the License.
|
||||||
|
|
||||||
|
import glob
|
||||||
import os
|
import os
|
||||||
from tempfile import NamedTemporaryFile
|
import tempfile
|
||||||
|
|
||||||
import click
|
import click
|
||||||
|
|
||||||
from platformio import compat, fs, proc
|
from platformio import fs, proc
|
||||||
from platformio.commands.check.defect import DefectItem
|
from platformio.commands.check.defect import DefectItem
|
||||||
from platformio.project.helpers import load_project_ide_data
|
from platformio.project.helpers import load_project_ide_data
|
||||||
|
|
||||||
@@ -104,7 +105,7 @@ class CheckToolBase(object): # pylint: disable=too-many-instance-attributes
|
|||||||
return {lang: _extract_defines(lang, incflags_file) for lang in ("c", "c++")}
|
return {lang: _extract_defines(lang, incflags_file) for lang in ("c", "c++")}
|
||||||
|
|
||||||
def _create_tmp_file(self, data):
|
def _create_tmp_file(self, data):
|
||||||
with NamedTemporaryFile("w", delete=False) as fp:
|
with tempfile.NamedTemporaryFile("w", delete=False) as fp:
|
||||||
fp.write(data)
|
fp.write(data)
|
||||||
self._tmp_files.append(fp.name)
|
self._tmp_files.append(fp.name)
|
||||||
return fp.name
|
return fp.name
|
||||||
@@ -207,7 +208,7 @@ class CheckToolBase(object): # pylint: disable=too-many-instance-attributes
|
|||||||
result["c++"].append(os.path.realpath(path))
|
result["c++"].append(os.path.realpath(path))
|
||||||
|
|
||||||
for pattern in patterns:
|
for pattern in patterns:
|
||||||
for item in compat.glob_recursive(pattern):
|
for item in glob.glob(pattern, recursive=True):
|
||||||
if not os.path.isdir(item):
|
if not os.path.isdir(item):
|
||||||
_add_file(item)
|
_add_file(item)
|
||||||
for root, _, files in os.walk(item, followlinks=True):
|
for root, _, files in os.walk(item, followlinks=True):
|
||||||
|
@@ -64,7 +64,7 @@ class CppcheckCheckTool(CheckToolBase):
|
|||||||
if any(f not in self._buffer for f in self.defect_fields):
|
if any(f not in self._buffer for f in self.defect_fields):
|
||||||
return None
|
return None
|
||||||
|
|
||||||
args = dict()
|
args = {}
|
||||||
for field in self._buffer.split(self._field_delimiter):
|
for field in self._buffer.split(self._field_delimiter):
|
||||||
field = field.strip().replace('"', "")
|
field = field.strip().replace('"', "")
|
||||||
name, value = field.split("=", 1)
|
name, value = field.split("=", 1)
|
||||||
|
@@ -19,9 +19,10 @@ from xml.etree.ElementTree import fromstring
|
|||||||
|
|
||||||
import click
|
import click
|
||||||
|
|
||||||
from platformio import proc, util
|
from platformio import proc
|
||||||
from platformio.commands.check.defect import DefectItem
|
from platformio.commands.check.defect import DefectItem
|
||||||
from platformio.commands.check.tools.base import CheckToolBase
|
from platformio.commands.check.tools.base import CheckToolBase
|
||||||
|
from platformio.compat import IS_WINDOWS
|
||||||
from platformio.package.manager.core import get_core_package_dir
|
from platformio.package.manager.core import get_core_package_dir
|
||||||
|
|
||||||
|
|
||||||
@@ -34,18 +35,18 @@ class PvsStudioCheckTool(CheckToolBase): # pylint: disable=too-many-instance-at
|
|||||||
self._tmp_cmd_file = self._generate_tmp_file_path() + ".cmd"
|
self._tmp_cmd_file = self._generate_tmp_file_path() + ".cmd"
|
||||||
self.tool_path = os.path.join(
|
self.tool_path = os.path.join(
|
||||||
get_core_package_dir("tool-pvs-studio"),
|
get_core_package_dir("tool-pvs-studio"),
|
||||||
"x64" if "windows" in util.get_systype() else "bin",
|
"x64" if IS_WINDOWS else "bin",
|
||||||
"pvs-studio",
|
"pvs-studio",
|
||||||
)
|
)
|
||||||
super(PvsStudioCheckTool, self).__init__(*args, **kwargs)
|
super(PvsStudioCheckTool, self).__init__(*args, **kwargs)
|
||||||
|
|
||||||
with open(self._tmp_cfg_file, "w") as fp:
|
with open(self._tmp_cfg_file, mode="w", encoding="utf8") as fp:
|
||||||
fp.write(
|
fp.write(
|
||||||
"exclude-path = "
|
"exclude-path = "
|
||||||
+ self.config.get_optional_dir("packages").replace("\\", "/")
|
+ self.config.get_optional_dir("packages").replace("\\", "/")
|
||||||
)
|
)
|
||||||
|
|
||||||
with open(self._tmp_cmd_file, "w") as fp:
|
with open(self._tmp_cmd_file, mode="w", encoding="utf8") as fp:
|
||||||
fp.write(
|
fp.write(
|
||||||
" ".join(
|
" ".join(
|
||||||
['-I"%s"' % inc.replace("\\", "/") for inc in self.cpp_includes]
|
['-I"%s"' % inc.replace("\\", "/") for inc in self.cpp_includes]
|
||||||
@@ -70,9 +71,7 @@ class PvsStudioCheckTool(CheckToolBase): # pylint: disable=too-many-instance-at
|
|||||||
def _demangle_report(self, output_file):
|
def _demangle_report(self, output_file):
|
||||||
converter_tool = os.path.join(
|
converter_tool = os.path.join(
|
||||||
get_core_package_dir("tool-pvs-studio"),
|
get_core_package_dir("tool-pvs-studio"),
|
||||||
"HtmlGenerator"
|
"HtmlGenerator" if IS_WINDOWS else os.path.join("bin", "plog-converter"),
|
||||||
if "windows" in util.get_systype()
|
|
||||||
else os.path.join("bin", "plog-converter"),
|
|
||||||
)
|
)
|
||||||
|
|
||||||
cmd = (
|
cmd = (
|
||||||
|
@@ -12,14 +12,14 @@
|
|||||||
# See the License for the specific language governing permissions and
|
# See the License for the specific language governing permissions and
|
||||||
# limitations under the License.
|
# limitations under the License.
|
||||||
|
|
||||||
from os import getenv, makedirs, remove
|
import glob
|
||||||
from os.path import basename, isdir, isfile, join, realpath
|
import os
|
||||||
from shutil import copyfile, copytree
|
import shutil
|
||||||
from tempfile import mkdtemp
|
import tempfile
|
||||||
|
|
||||||
import click
|
import click
|
||||||
|
|
||||||
from platformio import app, compat, fs
|
from platformio import app, fs
|
||||||
from platformio.commands.project import project_init as cmd_project_init
|
from platformio.commands.project import project_init as cmd_project_init
|
||||||
from platformio.commands.project import validate_boards
|
from platformio.commands.project import validate_boards
|
||||||
from platformio.commands.run.command import cli as cmd_run
|
from platformio.commands.run.command import cli as cmd_run
|
||||||
@@ -33,8 +33,8 @@ def validate_path(ctx, param, value): # pylint: disable=unused-argument
|
|||||||
for i, p in enumerate(value):
|
for i, p in enumerate(value):
|
||||||
if p.startswith("~"):
|
if p.startswith("~"):
|
||||||
value[i] = fs.expanduser(p)
|
value[i] = fs.expanduser(p)
|
||||||
value[i] = realpath(value[i])
|
value[i] = os.path.realpath(value[i])
|
||||||
if not compat.glob_recursive(value[i]):
|
if not glob.glob(value[i], recursive=True):
|
||||||
invalid_path = p
|
invalid_path = p
|
||||||
break
|
break
|
||||||
try:
|
try:
|
||||||
@@ -51,7 +51,7 @@ def validate_path(ctx, param, value): # pylint: disable=unused-argument
|
|||||||
@click.option("-b", "--board", multiple=True, metavar="ID", callback=validate_boards)
|
@click.option("-b", "--board", multiple=True, metavar="ID", callback=validate_boards)
|
||||||
@click.option(
|
@click.option(
|
||||||
"--build-dir",
|
"--build-dir",
|
||||||
default=mkdtemp,
|
default=tempfile.mkdtemp,
|
||||||
type=click.Path(file_okay=False, dir_okay=True, writable=True, resolve_path=True),
|
type=click.Path(file_okay=False, dir_okay=True, writable=True, resolve_path=True),
|
||||||
)
|
)
|
||||||
@click.option("--keep-build-dir", is_flag=True)
|
@click.option("--keep-build-dir", is_flag=True)
|
||||||
@@ -78,28 +78,28 @@ def cli( # pylint: disable=too-many-arguments, too-many-branches
|
|||||||
verbose,
|
verbose,
|
||||||
):
|
):
|
||||||
|
|
||||||
if not src and getenv("PLATFORMIO_CI_SRC"):
|
if not src and os.getenv("PLATFORMIO_CI_SRC"):
|
||||||
src = validate_path(ctx, None, getenv("PLATFORMIO_CI_SRC").split(":"))
|
src = validate_path(ctx, None, os.getenv("PLATFORMIO_CI_SRC").split(":"))
|
||||||
if not src:
|
if not src:
|
||||||
raise click.BadParameter("Missing argument 'src'")
|
raise click.BadParameter("Missing argument 'src'")
|
||||||
|
|
||||||
try:
|
try:
|
||||||
app.set_session_var("force_option", True)
|
app.set_session_var("force_option", True)
|
||||||
|
|
||||||
if not keep_build_dir and isdir(build_dir):
|
if not keep_build_dir and os.path.isdir(build_dir):
|
||||||
fs.rmtree(build_dir)
|
fs.rmtree(build_dir)
|
||||||
if not isdir(build_dir):
|
if not os.path.isdir(build_dir):
|
||||||
makedirs(build_dir)
|
os.makedirs(build_dir)
|
||||||
|
|
||||||
for dir_name, patterns in dict(lib=lib, src=src).items():
|
for dir_name, patterns in dict(lib=lib, src=src).items():
|
||||||
if not patterns:
|
if not patterns:
|
||||||
continue
|
continue
|
||||||
contents = []
|
contents = []
|
||||||
for p in patterns:
|
for p in patterns:
|
||||||
contents += compat.glob_recursive(p)
|
contents += glob.glob(p, recursive=True)
|
||||||
_copy_contents(join(build_dir, dir_name), contents)
|
_copy_contents(os.path.join(build_dir, dir_name), contents)
|
||||||
|
|
||||||
if project_conf and isfile(project_conf):
|
if project_conf and os.path.isfile(project_conf):
|
||||||
_copy_project_conf(build_dir, project_conf)
|
_copy_project_conf(build_dir, project_conf)
|
||||||
elif not board:
|
elif not board:
|
||||||
raise CIBuildEnvsEmpty()
|
raise CIBuildEnvsEmpty()
|
||||||
@@ -126,48 +126,50 @@ def _copy_contents(dst_dir, contents):
|
|||||||
items = {"dirs": set(), "files": set()}
|
items = {"dirs": set(), "files": set()}
|
||||||
|
|
||||||
for path in contents:
|
for path in contents:
|
||||||
if isdir(path):
|
if os.path.isdir(path):
|
||||||
items["dirs"].add(path)
|
items["dirs"].add(path)
|
||||||
elif isfile(path):
|
elif os.path.isfile(path):
|
||||||
items["files"].add(path)
|
items["files"].add(path)
|
||||||
|
|
||||||
dst_dir_name = basename(dst_dir)
|
dst_dir_name = os.path.basename(dst_dir)
|
||||||
|
|
||||||
if dst_dir_name == "src" and len(items["dirs"]) == 1:
|
if dst_dir_name == "src" and len(items["dirs"]) == 1:
|
||||||
copytree(list(items["dirs"]).pop(), dst_dir, symlinks=True)
|
shutil.copytree(list(items["dirs"]).pop(), dst_dir, symlinks=True)
|
||||||
else:
|
else:
|
||||||
if not isdir(dst_dir):
|
if not os.path.isdir(dst_dir):
|
||||||
makedirs(dst_dir)
|
os.makedirs(dst_dir)
|
||||||
for d in items["dirs"]:
|
for d in items["dirs"]:
|
||||||
copytree(d, join(dst_dir, basename(d)), symlinks=True)
|
shutil.copytree(
|
||||||
|
d, os.path.join(dst_dir, os.path.basename(d)), symlinks=True
|
||||||
|
)
|
||||||
|
|
||||||
if not items["files"]:
|
if not items["files"]:
|
||||||
return
|
return
|
||||||
|
|
||||||
if dst_dir_name == "lib":
|
if dst_dir_name == "lib":
|
||||||
dst_dir = join(dst_dir, mkdtemp(dir=dst_dir))
|
dst_dir = os.path.join(dst_dir, tempfile.mkdtemp(dir=dst_dir))
|
||||||
|
|
||||||
for f in items["files"]:
|
for f in items["files"]:
|
||||||
dst_file = join(dst_dir, basename(f))
|
dst_file = os.path.join(dst_dir, os.path.basename(f))
|
||||||
if f == dst_file:
|
if f == dst_file:
|
||||||
continue
|
continue
|
||||||
copyfile(f, dst_file)
|
shutil.copyfile(f, dst_file)
|
||||||
|
|
||||||
|
|
||||||
def _exclude_contents(dst_dir, patterns):
|
def _exclude_contents(dst_dir, patterns):
|
||||||
contents = []
|
contents = []
|
||||||
for p in patterns:
|
for p in patterns:
|
||||||
contents += compat.glob_recursive(join(compat.glob_escape(dst_dir), p))
|
contents += glob.glob(os.path.join(glob.escape(dst_dir), p), recursive=True)
|
||||||
for path in contents:
|
for path in contents:
|
||||||
path = realpath(path)
|
path = os.path.realpath(path)
|
||||||
if isdir(path):
|
if os.path.isdir(path):
|
||||||
fs.rmtree(path)
|
fs.rmtree(path)
|
||||||
elif isfile(path):
|
elif os.path.isfile(path):
|
||||||
remove(path)
|
os.remove(path)
|
||||||
|
|
||||||
|
|
||||||
def _copy_project_conf(build_dir, project_conf):
|
def _copy_project_conf(build_dir, project_conf):
|
||||||
config = ProjectConfig(project_conf, parse_extra=False)
|
config = ProjectConfig(project_conf, parse_extra=False)
|
||||||
if config.has_section("platformio"):
|
if config.has_section("platformio"):
|
||||||
config.remove_section("platformio")
|
config.remove_section("platformio")
|
||||||
config.save(join(build_dir, "platformio.ini"))
|
config.save(os.path.join(build_dir, "platformio.ini"))
|
||||||
|
167
platformio/commands/debug.py
Normal file
167
platformio/commands/debug.py
Normal file
@@ -0,0 +1,167 @@
|
|||||||
|
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
|
||||||
|
#
|
||||||
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
# you may not use this file except in compliance with the License.
|
||||||
|
# You may obtain a copy of the License at
|
||||||
|
#
|
||||||
|
# http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
#
|
||||||
|
# Unless required by applicable law or agreed to in writing, software
|
||||||
|
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
# See the License for the specific language governing permissions and
|
||||||
|
# limitations under the License.
|
||||||
|
|
||||||
|
# pylint: disable=too-many-arguments, too-many-locals
|
||||||
|
# pylint: disable=too-many-branches, too-many-statements
|
||||||
|
|
||||||
|
import asyncio
|
||||||
|
import os
|
||||||
|
import subprocess
|
||||||
|
|
||||||
|
import click
|
||||||
|
|
||||||
|
from platformio import app, exception, fs, proc
|
||||||
|
from platformio.commands.platform import init_platform
|
||||||
|
from platformio.compat import IS_WINDOWS
|
||||||
|
from platformio.debug import helpers
|
||||||
|
from platformio.debug.config.factory import DebugConfigFactory
|
||||||
|
from platformio.debug.exception import DebugInvalidOptionsError
|
||||||
|
from platformio.debug.process.gdb import GDBClientProcess
|
||||||
|
from platformio.project.config import ProjectConfig
|
||||||
|
from platformio.project.exception import ProjectEnvsNotAvailableError
|
||||||
|
from platformio.project.helpers import is_platformio_project
|
||||||
|
|
||||||
|
|
||||||
|
@click.command(
|
||||||
|
"debug",
|
||||||
|
context_settings=dict(ignore_unknown_options=True),
|
||||||
|
short_help="Unified debugger",
|
||||||
|
)
|
||||||
|
@click.option(
|
||||||
|
"-d",
|
||||||
|
"--project-dir",
|
||||||
|
default=os.getcwd,
|
||||||
|
type=click.Path(
|
||||||
|
exists=True, file_okay=False, dir_okay=True, writable=True, resolve_path=True
|
||||||
|
),
|
||||||
|
)
|
||||||
|
@click.option(
|
||||||
|
"-c",
|
||||||
|
"--project-conf",
|
||||||
|
type=click.Path(
|
||||||
|
exists=True, file_okay=True, dir_okay=False, readable=True, resolve_path=True
|
||||||
|
),
|
||||||
|
)
|
||||||
|
@click.option("--environment", "-e", metavar="<environment>")
|
||||||
|
@click.option("--verbose", "-v", is_flag=True)
|
||||||
|
@click.option("--interface", type=click.Choice(["gdb"]))
|
||||||
|
@click.argument("__unprocessed", nargs=-1, type=click.UNPROCESSED)
|
||||||
|
@click.pass_context
|
||||||
|
def cli(ctx, project_dir, project_conf, environment, verbose, interface, __unprocessed):
|
||||||
|
app.set_session_var("custom_project_conf", project_conf)
|
||||||
|
|
||||||
|
# use env variables from Eclipse or CLion
|
||||||
|
for name in ("CWD", "PWD", "PLATFORMIO_PROJECT_DIR"):
|
||||||
|
if is_platformio_project(project_dir):
|
||||||
|
break
|
||||||
|
if os.getenv(name):
|
||||||
|
project_dir = os.getenv(name)
|
||||||
|
|
||||||
|
with fs.cd(project_dir):
|
||||||
|
project_config = ProjectConfig.get_instance(project_conf)
|
||||||
|
project_config.validate(envs=[environment] if environment else None)
|
||||||
|
env_name = environment or helpers.get_default_debug_env(project_config)
|
||||||
|
|
||||||
|
if not interface:
|
||||||
|
return helpers.predebug_project(
|
||||||
|
ctx, project_dir, project_config, env_name, False, verbose
|
||||||
|
)
|
||||||
|
|
||||||
|
env_options = project_config.items(env=env_name, as_dict=True)
|
||||||
|
if "platform" not in env_options:
|
||||||
|
raise ProjectEnvsNotAvailableError()
|
||||||
|
|
||||||
|
with fs.cd(project_dir):
|
||||||
|
debug_config = DebugConfigFactory.new(
|
||||||
|
init_platform(env_options["platform"]), project_config, env_name
|
||||||
|
)
|
||||||
|
|
||||||
|
if "--version" in __unprocessed:
|
||||||
|
return subprocess.run(
|
||||||
|
[debug_config.client_executable_path, "--version"], check=True
|
||||||
|
)
|
||||||
|
|
||||||
|
try:
|
||||||
|
fs.ensure_udev_rules()
|
||||||
|
except exception.InvalidUdevRules as e:
|
||||||
|
click.echo(
|
||||||
|
helpers.escape_gdbmi_stream("~", str(e) + "\n")
|
||||||
|
if helpers.is_gdbmi_mode()
|
||||||
|
else str(e) + "\n",
|
||||||
|
nl=False,
|
||||||
|
)
|
||||||
|
|
||||||
|
rebuild_prog = False
|
||||||
|
preload = debug_config.load_cmds == ["preload"]
|
||||||
|
load_mode = debug_config.load_mode
|
||||||
|
if load_mode == "always":
|
||||||
|
rebuild_prog = preload or not helpers.has_debug_symbols(
|
||||||
|
debug_config.program_path
|
||||||
|
)
|
||||||
|
elif load_mode == "modified":
|
||||||
|
rebuild_prog = helpers.is_prog_obsolete(
|
||||||
|
debug_config.program_path
|
||||||
|
) or not helpers.has_debug_symbols(debug_config.program_path)
|
||||||
|
|
||||||
|
if not (debug_config.program_path and os.path.isfile(debug_config.program_path)):
|
||||||
|
rebuild_prog = True
|
||||||
|
|
||||||
|
if preload or (not rebuild_prog and load_mode != "always"):
|
||||||
|
# don't load firmware through debug server
|
||||||
|
debug_config.load_cmds = []
|
||||||
|
|
||||||
|
if rebuild_prog:
|
||||||
|
if helpers.is_gdbmi_mode():
|
||||||
|
click.echo(
|
||||||
|
helpers.escape_gdbmi_stream(
|
||||||
|
"~", "Preparing firmware for debugging...\n"
|
||||||
|
),
|
||||||
|
nl=False,
|
||||||
|
)
|
||||||
|
stream = helpers.GDBMIConsoleStream()
|
||||||
|
with proc.capture_std_streams(stream):
|
||||||
|
helpers.predebug_project(
|
||||||
|
ctx, project_dir, project_config, env_name, preload, verbose
|
||||||
|
)
|
||||||
|
stream.close()
|
||||||
|
else:
|
||||||
|
click.echo("Preparing firmware for debugging...")
|
||||||
|
helpers.predebug_project(
|
||||||
|
ctx, project_dir, project_config, env_name, preload, verbose
|
||||||
|
)
|
||||||
|
|
||||||
|
# save SHA sum of newly created prog
|
||||||
|
if load_mode == "modified":
|
||||||
|
helpers.is_prog_obsolete(debug_config.program_path)
|
||||||
|
|
||||||
|
if not os.path.isfile(debug_config.program_path):
|
||||||
|
raise DebugInvalidOptionsError("Program/firmware is missed")
|
||||||
|
|
||||||
|
loop = asyncio.ProactorEventLoop() if IS_WINDOWS else asyncio.get_event_loop()
|
||||||
|
asyncio.set_event_loop(loop)
|
||||||
|
|
||||||
|
with fs.cd(project_dir):
|
||||||
|
client = GDBClientProcess(project_dir, debug_config)
|
||||||
|
coro = client.run(__unprocessed)
|
||||||
|
try:
|
||||||
|
loop.run_until_complete(coro)
|
||||||
|
if IS_WINDOWS:
|
||||||
|
# an issue with `asyncio` executor and STIDIN,
|
||||||
|
# it cannot be closed gracefully
|
||||||
|
proc.force_exit()
|
||||||
|
finally:
|
||||||
|
del client
|
||||||
|
loop.close()
|
||||||
|
|
||||||
|
return True
|
@@ -1,175 +0,0 @@
|
|||||||
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
|
|
||||||
#
|
|
||||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
|
||||||
# you may not use this file except in compliance with the License.
|
|
||||||
# You may obtain a copy of the License at
|
|
||||||
#
|
|
||||||
# http://www.apache.org/licenses/LICENSE-2.0
|
|
||||||
#
|
|
||||||
# Unless required by applicable law or agreed to in writing, software
|
|
||||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
|
||||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
||||||
# See the License for the specific language governing permissions and
|
|
||||||
# limitations under the License.
|
|
||||||
|
|
||||||
# pylint: disable=too-many-arguments, too-many-statements
|
|
||||||
# pylint: disable=too-many-locals, too-many-branches
|
|
||||||
|
|
||||||
import os
|
|
||||||
import signal
|
|
||||||
from os.path import isfile
|
|
||||||
|
|
||||||
import click
|
|
||||||
|
|
||||||
from platformio import app, exception, fs, proc
|
|
||||||
from platformio.commands.debug import helpers
|
|
||||||
from platformio.commands.debug.exception import DebugInvalidOptionsError
|
|
||||||
from platformio.commands.platform import platform_install as cmd_platform_install
|
|
||||||
from platformio.package.manager.core import inject_contrib_pysite
|
|
||||||
from platformio.platform.exception import UnknownPlatform
|
|
||||||
from platformio.platform.factory import PlatformFactory
|
|
||||||
from platformio.project.config import ProjectConfig
|
|
||||||
from platformio.project.exception import ProjectEnvsNotAvailableError
|
|
||||||
from platformio.project.helpers import is_platformio_project, load_project_ide_data
|
|
||||||
|
|
||||||
|
|
||||||
@click.command(
|
|
||||||
"debug",
|
|
||||||
context_settings=dict(ignore_unknown_options=True),
|
|
||||||
short_help="Unified debugger",
|
|
||||||
)
|
|
||||||
@click.option(
|
|
||||||
"-d",
|
|
||||||
"--project-dir",
|
|
||||||
default=os.getcwd,
|
|
||||||
type=click.Path(
|
|
||||||
exists=True, file_okay=False, dir_okay=True, writable=True, resolve_path=True
|
|
||||||
),
|
|
||||||
)
|
|
||||||
@click.option(
|
|
||||||
"-c",
|
|
||||||
"--project-conf",
|
|
||||||
type=click.Path(
|
|
||||||
exists=True, file_okay=True, dir_okay=False, readable=True, resolve_path=True
|
|
||||||
),
|
|
||||||
)
|
|
||||||
@click.option("--environment", "-e", metavar="<environment>")
|
|
||||||
@click.option("--verbose", "-v", is_flag=True)
|
|
||||||
@click.option("--interface", type=click.Choice(["gdb"]))
|
|
||||||
@click.argument("__unprocessed", nargs=-1, type=click.UNPROCESSED)
|
|
||||||
@click.pass_context
|
|
||||||
def cli(ctx, project_dir, project_conf, environment, verbose, interface, __unprocessed):
|
|
||||||
app.set_session_var("custom_project_conf", project_conf)
|
|
||||||
|
|
||||||
# use env variables from Eclipse or CLion
|
|
||||||
for sysenv in ("CWD", "PWD", "PLATFORMIO_PROJECT_DIR"):
|
|
||||||
if is_platformio_project(project_dir):
|
|
||||||
break
|
|
||||||
if os.getenv(sysenv):
|
|
||||||
project_dir = os.getenv(sysenv)
|
|
||||||
|
|
||||||
with fs.cd(project_dir):
|
|
||||||
config = ProjectConfig.get_instance(project_conf)
|
|
||||||
config.validate(envs=[environment] if environment else None)
|
|
||||||
|
|
||||||
env_name = environment or helpers.get_default_debug_env(config)
|
|
||||||
env_options = config.items(env=env_name, as_dict=True)
|
|
||||||
if not set(env_options.keys()) >= set(["platform", "board"]):
|
|
||||||
raise ProjectEnvsNotAvailableError()
|
|
||||||
|
|
||||||
try:
|
|
||||||
platform = PlatformFactory.new(env_options["platform"])
|
|
||||||
except UnknownPlatform:
|
|
||||||
ctx.invoke(
|
|
||||||
cmd_platform_install,
|
|
||||||
platforms=[env_options["platform"]],
|
|
||||||
skip_default_package=True,
|
|
||||||
)
|
|
||||||
platform = PlatformFactory.new(env_options["platform"])
|
|
||||||
|
|
||||||
debug_options = helpers.configure_initial_debug_options(platform, env_options)
|
|
||||||
assert debug_options
|
|
||||||
|
|
||||||
if not interface:
|
|
||||||
return helpers.predebug_project(ctx, project_dir, env_name, False, verbose)
|
|
||||||
|
|
||||||
ide_data = load_project_ide_data(project_dir, env_name)
|
|
||||||
if not ide_data:
|
|
||||||
raise DebugInvalidOptionsError("Could not load a build configuration")
|
|
||||||
|
|
||||||
if "--version" in __unprocessed:
|
|
||||||
result = proc.exec_command([ide_data["gdb_path"], "--version"])
|
|
||||||
if result["returncode"] == 0:
|
|
||||||
return click.echo(result["out"])
|
|
||||||
raise exception.PlatformioException("\n".join([result["out"], result["err"]]))
|
|
||||||
|
|
||||||
try:
|
|
||||||
fs.ensure_udev_rules()
|
|
||||||
except exception.InvalidUdevRules as e:
|
|
||||||
click.echo(
|
|
||||||
helpers.escape_gdbmi_stream("~", str(e) + "\n")
|
|
||||||
if helpers.is_gdbmi_mode()
|
|
||||||
else str(e) + "\n",
|
|
||||||
nl=False,
|
|
||||||
)
|
|
||||||
|
|
||||||
try:
|
|
||||||
debug_options = platform.configure_debug_options(debug_options, ide_data)
|
|
||||||
except NotImplementedError:
|
|
||||||
# legacy for ESP32 dev-platform <=2.0.0
|
|
||||||
debug_options["load_cmds"] = helpers.configure_esp32_load_cmds(
|
|
||||||
debug_options, ide_data
|
|
||||||
)
|
|
||||||
|
|
||||||
rebuild_prog = False
|
|
||||||
preload = debug_options["load_cmds"] == ["preload"]
|
|
||||||
load_mode = debug_options["load_mode"]
|
|
||||||
if load_mode == "always":
|
|
||||||
rebuild_prog = preload or not helpers.has_debug_symbols(ide_data["prog_path"])
|
|
||||||
elif load_mode == "modified":
|
|
||||||
rebuild_prog = helpers.is_prog_obsolete(
|
|
||||||
ide_data["prog_path"]
|
|
||||||
) or not helpers.has_debug_symbols(ide_data["prog_path"])
|
|
||||||
else:
|
|
||||||
rebuild_prog = not isfile(ide_data["prog_path"])
|
|
||||||
|
|
||||||
if preload or (not rebuild_prog and load_mode != "always"):
|
|
||||||
# don't load firmware through debug server
|
|
||||||
debug_options["load_cmds"] = []
|
|
||||||
|
|
||||||
if rebuild_prog:
|
|
||||||
if helpers.is_gdbmi_mode():
|
|
||||||
click.echo(
|
|
||||||
helpers.escape_gdbmi_stream(
|
|
||||||
"~", "Preparing firmware for debugging...\n"
|
|
||||||
),
|
|
||||||
nl=False,
|
|
||||||
)
|
|
||||||
stream = helpers.GDBMIConsoleStream()
|
|
||||||
with proc.capture_std_streams(stream):
|
|
||||||
helpers.predebug_project(ctx, project_dir, env_name, preload, verbose)
|
|
||||||
stream.close()
|
|
||||||
else:
|
|
||||||
click.echo("Preparing firmware for debugging...")
|
|
||||||
helpers.predebug_project(ctx, project_dir, env_name, preload, verbose)
|
|
||||||
|
|
||||||
# save SHA sum of newly created prog
|
|
||||||
if load_mode == "modified":
|
|
||||||
helpers.is_prog_obsolete(ide_data["prog_path"])
|
|
||||||
|
|
||||||
if not isfile(ide_data["prog_path"]):
|
|
||||||
raise DebugInvalidOptionsError("Program/firmware is missed")
|
|
||||||
|
|
||||||
# run debugging client
|
|
||||||
inject_contrib_pysite()
|
|
||||||
|
|
||||||
# pylint: disable=import-outside-toplevel
|
|
||||||
from platformio.commands.debug.process.client import GDBClient, reactor
|
|
||||||
|
|
||||||
client = GDBClient(project_dir, __unprocessed, debug_options, env_options)
|
|
||||||
client.spawn(ide_data["gdb_path"], ide_data["prog_path"])
|
|
||||||
|
|
||||||
signal.signal(signal.SIGINT, lambda *args, **kwargs: None)
|
|
||||||
reactor.run()
|
|
||||||
|
|
||||||
return True
|
|
@@ -1,303 +0,0 @@
|
|||||||
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
|
|
||||||
#
|
|
||||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
|
||||||
# you may not use this file except in compliance with the License.
|
|
||||||
# You may obtain a copy of the License at
|
|
||||||
#
|
|
||||||
# http://www.apache.org/licenses/LICENSE-2.0
|
|
||||||
#
|
|
||||||
# Unless required by applicable law or agreed to in writing, software
|
|
||||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
|
||||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
||||||
# See the License for the specific language governing permissions and
|
|
||||||
# limitations under the License.
|
|
||||||
|
|
||||||
import re
|
|
||||||
import sys
|
|
||||||
import time
|
|
||||||
from fnmatch import fnmatch
|
|
||||||
from hashlib import sha1
|
|
||||||
from io import BytesIO
|
|
||||||
from os.path import isfile
|
|
||||||
|
|
||||||
from platformio import fs, util
|
|
||||||
from platformio.commands import PlatformioCLI
|
|
||||||
from platformio.commands.debug.exception import DebugInvalidOptionsError
|
|
||||||
from platformio.commands.run.command import cli as cmd_run
|
|
||||||
from platformio.compat import is_bytes
|
|
||||||
from platformio.project.config import ProjectConfig
|
|
||||||
from platformio.project.options import ProjectOptions
|
|
||||||
|
|
||||||
|
|
||||||
class GDBMIConsoleStream(BytesIO): # pylint: disable=too-few-public-methods
|
|
||||||
|
|
||||||
STDOUT = sys.stdout
|
|
||||||
|
|
||||||
def write(self, text):
|
|
||||||
self.STDOUT.write(escape_gdbmi_stream("~", text))
|
|
||||||
self.STDOUT.flush()
|
|
||||||
|
|
||||||
|
|
||||||
def is_gdbmi_mode():
|
|
||||||
return "--interpreter" in " ".join(PlatformioCLI.leftover_args)
|
|
||||||
|
|
||||||
|
|
||||||
def escape_gdbmi_stream(prefix, stream):
|
|
||||||
bytes_stream = False
|
|
||||||
if is_bytes(stream):
|
|
||||||
bytes_stream = True
|
|
||||||
stream = stream.decode()
|
|
||||||
|
|
||||||
if not stream:
|
|
||||||
return b"" if bytes_stream else ""
|
|
||||||
|
|
||||||
ends_nl = stream.endswith("\n")
|
|
||||||
stream = re.sub(r"\\+", "\\\\\\\\", stream)
|
|
||||||
stream = stream.replace('"', '\\"')
|
|
||||||
stream = stream.replace("\n", "\\n")
|
|
||||||
stream = '%s"%s"' % (prefix, stream)
|
|
||||||
if ends_nl:
|
|
||||||
stream += "\n"
|
|
||||||
|
|
||||||
return stream.encode() if bytes_stream else stream
|
|
||||||
|
|
||||||
|
|
||||||
def get_default_debug_env(config):
|
|
||||||
default_envs = config.default_envs()
|
|
||||||
all_envs = config.envs()
|
|
||||||
for env in default_envs:
|
|
||||||
if config.get("env:" + env, "build_type") == "debug":
|
|
||||||
return env
|
|
||||||
for env in all_envs:
|
|
||||||
if config.get("env:" + env, "build_type") == "debug":
|
|
||||||
return env
|
|
||||||
return default_envs[0] if default_envs else all_envs[0]
|
|
||||||
|
|
||||||
|
|
||||||
def predebug_project(ctx, project_dir, env_name, preload, verbose):
|
|
||||||
ctx.invoke(
|
|
||||||
cmd_run,
|
|
||||||
project_dir=project_dir,
|
|
||||||
environment=[env_name],
|
|
||||||
target=["debug"] + (["upload"] if preload else []),
|
|
||||||
verbose=verbose,
|
|
||||||
)
|
|
||||||
if preload:
|
|
||||||
time.sleep(5)
|
|
||||||
|
|
||||||
|
|
||||||
def configure_initial_debug_options(platform, env_options):
|
|
||||||
def _cleanup_cmds(items):
|
|
||||||
items = ProjectConfig.parse_multi_values(items)
|
|
||||||
return ["$LOAD_CMDS" if item == "$LOAD_CMD" else item for item in items]
|
|
||||||
|
|
||||||
board_config = platform.board_config(env_options["board"])
|
|
||||||
tool_name = board_config.get_debug_tool_name(env_options.get("debug_tool"))
|
|
||||||
tool_settings = board_config.get("debug", {}).get("tools", {}).get(tool_name, {})
|
|
||||||
server_options = None
|
|
||||||
|
|
||||||
# specific server per a system
|
|
||||||
if isinstance(tool_settings.get("server", {}), list):
|
|
||||||
for item in tool_settings["server"][:]:
|
|
||||||
tool_settings["server"] = item
|
|
||||||
if util.get_systype() in item.get("system", []):
|
|
||||||
break
|
|
||||||
|
|
||||||
# user overwrites debug server
|
|
||||||
if env_options.get("debug_server"):
|
|
||||||
server_options = {
|
|
||||||
"cwd": None,
|
|
||||||
"executable": None,
|
|
||||||
"arguments": env_options.get("debug_server"),
|
|
||||||
}
|
|
||||||
server_options["executable"] = server_options["arguments"][0]
|
|
||||||
server_options["arguments"] = server_options["arguments"][1:]
|
|
||||||
elif "server" in tool_settings:
|
|
||||||
server_options = tool_settings["server"]
|
|
||||||
server_package = server_options.get("package")
|
|
||||||
server_package_dir = (
|
|
||||||
platform.get_package_dir(server_package) if server_package else None
|
|
||||||
)
|
|
||||||
if server_package and not server_package_dir:
|
|
||||||
platform.install_packages(
|
|
||||||
with_packages=[server_package], skip_default_package=True, silent=True
|
|
||||||
)
|
|
||||||
server_package_dir = platform.get_package_dir(server_package)
|
|
||||||
server_options.update(
|
|
||||||
dict(
|
|
||||||
cwd=server_package_dir if server_package else None,
|
|
||||||
executable=server_options.get("executable"),
|
|
||||||
arguments=[
|
|
||||||
a.replace("$PACKAGE_DIR", server_package_dir)
|
|
||||||
if server_package_dir
|
|
||||||
else a
|
|
||||||
for a in server_options.get("arguments", [])
|
|
||||||
],
|
|
||||||
)
|
|
||||||
)
|
|
||||||
|
|
||||||
extra_cmds = _cleanup_cmds(env_options.get("debug_extra_cmds"))
|
|
||||||
extra_cmds.extend(_cleanup_cmds(tool_settings.get("extra_cmds")))
|
|
||||||
result = dict(
|
|
||||||
tool=tool_name,
|
|
||||||
upload_protocol=env_options.get(
|
|
||||||
"upload_protocol", board_config.get("upload", {}).get("protocol")
|
|
||||||
),
|
|
||||||
load_cmds=_cleanup_cmds(
|
|
||||||
env_options.get(
|
|
||||||
"debug_load_cmds",
|
|
||||||
tool_settings.get(
|
|
||||||
"load_cmds",
|
|
||||||
tool_settings.get(
|
|
||||||
"load_cmd", ProjectOptions["env.debug_load_cmds"].default
|
|
||||||
),
|
|
||||||
),
|
|
||||||
)
|
|
||||||
),
|
|
||||||
load_mode=env_options.get(
|
|
||||||
"debug_load_mode",
|
|
||||||
tool_settings.get(
|
|
||||||
"load_mode", ProjectOptions["env.debug_load_mode"].default
|
|
||||||
),
|
|
||||||
),
|
|
||||||
init_break=env_options.get(
|
|
||||||
"debug_init_break",
|
|
||||||
tool_settings.get(
|
|
||||||
"init_break", ProjectOptions["env.debug_init_break"].default
|
|
||||||
),
|
|
||||||
),
|
|
||||||
init_cmds=_cleanup_cmds(
|
|
||||||
env_options.get("debug_init_cmds", tool_settings.get("init_cmds"))
|
|
||||||
),
|
|
||||||
extra_cmds=extra_cmds,
|
|
||||||
require_debug_port=tool_settings.get("require_debug_port", False),
|
|
||||||
port=reveal_debug_port(
|
|
||||||
env_options.get("debug_port", tool_settings.get("port")),
|
|
||||||
tool_name,
|
|
||||||
tool_settings,
|
|
||||||
),
|
|
||||||
speed=env_options.get("debug_speed", tool_settings.get("speed")),
|
|
||||||
server=server_options,
|
|
||||||
)
|
|
||||||
return result
|
|
||||||
|
|
||||||
|
|
||||||
def configure_esp32_load_cmds(debug_options, configuration):
|
|
||||||
"""
|
|
||||||
DEPRECATED: Moved to ESP32 dev-platform
|
|
||||||
See platform.py::configure_debug_options
|
|
||||||
"""
|
|
||||||
flash_images = configuration.get("extra", {}).get("flash_images")
|
|
||||||
ignore_conds = [
|
|
||||||
debug_options["load_cmds"] != ["load"],
|
|
||||||
"xtensa-esp32" not in configuration.get("cc_path", ""),
|
|
||||||
not flash_images,
|
|
||||||
not all(isfile(item["path"]) for item in flash_images),
|
|
||||||
]
|
|
||||||
if any(ignore_conds):
|
|
||||||
return debug_options["load_cmds"]
|
|
||||||
|
|
||||||
mon_cmds = [
|
|
||||||
'monitor program_esp32 "{{{path}}}" {offset} verify'.format(
|
|
||||||
path=fs.to_unix_path(item["path"]), offset=item["offset"]
|
|
||||||
)
|
|
||||||
for item in flash_images
|
|
||||||
]
|
|
||||||
mon_cmds.append(
|
|
||||||
'monitor program_esp32 "{%s.bin}" 0x10000 verify'
|
|
||||||
% fs.to_unix_path(configuration["prog_path"][:-4])
|
|
||||||
)
|
|
||||||
return mon_cmds
|
|
||||||
|
|
||||||
|
|
||||||
def has_debug_symbols(prog_path):
|
|
||||||
if not isfile(prog_path):
|
|
||||||
return False
|
|
||||||
matched = {
|
|
||||||
b".debug_info": False,
|
|
||||||
b".debug_abbrev": False,
|
|
||||||
b" -Og": False,
|
|
||||||
b" -g": False,
|
|
||||||
b"__PLATFORMIO_BUILD_DEBUG__": False,
|
|
||||||
}
|
|
||||||
with open(prog_path, "rb") as fp:
|
|
||||||
last_data = b""
|
|
||||||
while True:
|
|
||||||
data = fp.read(1024)
|
|
||||||
if not data:
|
|
||||||
break
|
|
||||||
for pattern, found in matched.items():
|
|
||||||
if found:
|
|
||||||
continue
|
|
||||||
if pattern in last_data + data:
|
|
||||||
matched[pattern] = True
|
|
||||||
last_data = data
|
|
||||||
return all(matched.values())
|
|
||||||
|
|
||||||
|
|
||||||
def is_prog_obsolete(prog_path):
|
|
||||||
prog_hash_path = prog_path + ".sha1"
|
|
||||||
if not isfile(prog_path):
|
|
||||||
return True
|
|
||||||
shasum = sha1()
|
|
||||||
with open(prog_path, "rb") as fp:
|
|
||||||
while True:
|
|
||||||
data = fp.read(1024)
|
|
||||||
if not data:
|
|
||||||
break
|
|
||||||
shasum.update(data)
|
|
||||||
new_digest = shasum.hexdigest()
|
|
||||||
old_digest = None
|
|
||||||
if isfile(prog_hash_path):
|
|
||||||
with open(prog_hash_path) as fp:
|
|
||||||
old_digest = fp.read()
|
|
||||||
if new_digest == old_digest:
|
|
||||||
return False
|
|
||||||
with open(prog_hash_path, "w") as fp:
|
|
||||||
fp.write(new_digest)
|
|
||||||
return True
|
|
||||||
|
|
||||||
|
|
||||||
def reveal_debug_port(env_debug_port, tool_name, tool_settings):
|
|
||||||
def _get_pattern():
|
|
||||||
if not env_debug_port:
|
|
||||||
return None
|
|
||||||
if set(["*", "?", "[", "]"]) & set(env_debug_port):
|
|
||||||
return env_debug_port
|
|
||||||
return None
|
|
||||||
|
|
||||||
def _is_match_pattern(port):
|
|
||||||
pattern = _get_pattern()
|
|
||||||
if not pattern:
|
|
||||||
return True
|
|
||||||
return fnmatch(port, pattern)
|
|
||||||
|
|
||||||
def _look_for_serial_port(hwids):
|
|
||||||
for item in util.get_serialports(filter_hwid=True):
|
|
||||||
if not _is_match_pattern(item["port"]):
|
|
||||||
continue
|
|
||||||
port = item["port"]
|
|
||||||
if tool_name.startswith("blackmagic"):
|
|
||||||
if (
|
|
||||||
"windows" in util.get_systype()
|
|
||||||
and port.startswith("COM")
|
|
||||||
and len(port) > 4
|
|
||||||
):
|
|
||||||
port = "\\\\.\\%s" % port
|
|
||||||
if "GDB" in item["description"]:
|
|
||||||
return port
|
|
||||||
for hwid in hwids:
|
|
||||||
hwid_str = ("%s:%s" % (hwid[0], hwid[1])).replace("0x", "")
|
|
||||||
if hwid_str in item["hwid"]:
|
|
||||||
return port
|
|
||||||
return None
|
|
||||||
|
|
||||||
if env_debug_port and not _get_pattern():
|
|
||||||
return env_debug_port
|
|
||||||
if not tool_settings.get("require_debug_port"):
|
|
||||||
return None
|
|
||||||
|
|
||||||
debug_port = _look_for_serial_port(tool_settings.get("hwids", []))
|
|
||||||
if not debug_port:
|
|
||||||
raise DebugInvalidOptionsError("Please specify `debug_port` for environment")
|
|
||||||
return debug_port
|
|
@@ -1,161 +0,0 @@
|
|||||||
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
|
|
||||||
#
|
|
||||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
|
||||||
# you may not use this file except in compliance with the License.
|
|
||||||
# You may obtain a copy of the License at
|
|
||||||
#
|
|
||||||
# http://www.apache.org/licenses/LICENSE-2.0
|
|
||||||
#
|
|
||||||
# Unless required by applicable law or agreed to in writing, software
|
|
||||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
|
||||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
||||||
# See the License for the specific language governing permissions and
|
|
||||||
# limitations under the License.
|
|
||||||
|
|
||||||
GDB_DEFAULT_INIT_CONFIG = """
|
|
||||||
define pio_reset_halt_target
|
|
||||||
monitor reset halt
|
|
||||||
end
|
|
||||||
|
|
||||||
define pio_reset_run_target
|
|
||||||
monitor reset
|
|
||||||
end
|
|
||||||
|
|
||||||
target extended-remote $DEBUG_PORT
|
|
||||||
monitor init
|
|
||||||
$LOAD_CMDS
|
|
||||||
pio_reset_halt_target
|
|
||||||
$INIT_BREAK
|
|
||||||
"""
|
|
||||||
|
|
||||||
GDB_STUTIL_INIT_CONFIG = """
|
|
||||||
define pio_reset_halt_target
|
|
||||||
monitor reset
|
|
||||||
monitor halt
|
|
||||||
end
|
|
||||||
|
|
||||||
define pio_reset_run_target
|
|
||||||
monitor reset
|
|
||||||
end
|
|
||||||
|
|
||||||
target extended-remote $DEBUG_PORT
|
|
||||||
$LOAD_CMDS
|
|
||||||
pio_reset_halt_target
|
|
||||||
$INIT_BREAK
|
|
||||||
"""
|
|
||||||
|
|
||||||
GDB_JLINK_INIT_CONFIG = """
|
|
||||||
define pio_reset_halt_target
|
|
||||||
monitor reset
|
|
||||||
monitor halt
|
|
||||||
end
|
|
||||||
|
|
||||||
define pio_reset_run_target
|
|
||||||
monitor clrbp
|
|
||||||
monitor reset
|
|
||||||
monitor go
|
|
||||||
end
|
|
||||||
|
|
||||||
target extended-remote $DEBUG_PORT
|
|
||||||
monitor clrbp
|
|
||||||
monitor speed auto
|
|
||||||
pio_reset_halt_target
|
|
||||||
$LOAD_CMDS
|
|
||||||
$INIT_BREAK
|
|
||||||
"""
|
|
||||||
|
|
||||||
GDB_BLACKMAGIC_INIT_CONFIG = """
|
|
||||||
define pio_reset_halt_target
|
|
||||||
set language c
|
|
||||||
set *0xE000ED0C = 0x05FA0004
|
|
||||||
set $busy = (*0xE000ED0C & 0x4)
|
|
||||||
while ($busy)
|
|
||||||
set $busy = (*0xE000ED0C & 0x4)
|
|
||||||
end
|
|
||||||
set language auto
|
|
||||||
end
|
|
||||||
|
|
||||||
define pio_reset_run_target
|
|
||||||
pio_reset_halt_target
|
|
||||||
end
|
|
||||||
|
|
||||||
target extended-remote $DEBUG_PORT
|
|
||||||
monitor swdp_scan
|
|
||||||
attach 1
|
|
||||||
set mem inaccessible-by-default off
|
|
||||||
$LOAD_CMDS
|
|
||||||
$INIT_BREAK
|
|
||||||
|
|
||||||
set language c
|
|
||||||
set *0xE000ED0C = 0x05FA0004
|
|
||||||
set $busy = (*0xE000ED0C & 0x4)
|
|
||||||
while ($busy)
|
|
||||||
set $busy = (*0xE000ED0C & 0x4)
|
|
||||||
end
|
|
||||||
set language auto
|
|
||||||
"""
|
|
||||||
|
|
||||||
GDB_MSPDEBUG_INIT_CONFIG = """
|
|
||||||
define pio_reset_halt_target
|
|
||||||
end
|
|
||||||
|
|
||||||
define pio_reset_run_target
|
|
||||||
end
|
|
||||||
|
|
||||||
target extended-remote $DEBUG_PORT
|
|
||||||
monitor erase
|
|
||||||
$LOAD_CMDS
|
|
||||||
pio_reset_halt_target
|
|
||||||
$INIT_BREAK
|
|
||||||
"""
|
|
||||||
|
|
||||||
GDB_QEMU_INIT_CONFIG = """
|
|
||||||
define pio_reset_halt_target
|
|
||||||
monitor system_reset
|
|
||||||
end
|
|
||||||
|
|
||||||
define pio_reset_run_target
|
|
||||||
monitor system_reset
|
|
||||||
end
|
|
||||||
|
|
||||||
target extended-remote $DEBUG_PORT
|
|
||||||
$LOAD_CMDS
|
|
||||||
pio_reset_halt_target
|
|
||||||
$INIT_BREAK
|
|
||||||
"""
|
|
||||||
|
|
||||||
GDB_RENODE_INIT_CONFIG = """
|
|
||||||
define pio_reset_halt_target
|
|
||||||
monitor machine Reset
|
|
||||||
$LOAD_CMDS
|
|
||||||
monitor start
|
|
||||||
end
|
|
||||||
|
|
||||||
define pio_reset_run_target
|
|
||||||
pio_reset_halt_target
|
|
||||||
end
|
|
||||||
|
|
||||||
target extended-remote $DEBUG_PORT
|
|
||||||
$LOAD_CMDS
|
|
||||||
$INIT_BREAK
|
|
||||||
monitor start
|
|
||||||
"""
|
|
||||||
|
|
||||||
|
|
||||||
TOOL_TO_CONFIG = {
|
|
||||||
"jlink": GDB_JLINK_INIT_CONFIG,
|
|
||||||
"mspdebug": GDB_MSPDEBUG_INIT_CONFIG,
|
|
||||||
"qemu": GDB_QEMU_INIT_CONFIG,
|
|
||||||
"blackmagic": GDB_BLACKMAGIC_INIT_CONFIG,
|
|
||||||
"renode": GDB_RENODE_INIT_CONFIG,
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
def get_gdb_init_config(debug_options):
|
|
||||||
tool = debug_options.get("tool")
|
|
||||||
if tool and tool in TOOL_TO_CONFIG:
|
|
||||||
return TOOL_TO_CONFIG[tool]
|
|
||||||
server_exe = (debug_options.get("server") or {}).get("executable", "").lower()
|
|
||||||
if "st-util" in server_exe:
|
|
||||||
return GDB_STUTIL_INIT_CONFIG
|
|
||||||
return GDB_DEFAULT_INIT_CONFIG
|
|
@@ -1,93 +0,0 @@
|
|||||||
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
|
|
||||||
#
|
|
||||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
|
||||||
# you may not use this file except in compliance with the License.
|
|
||||||
# You may obtain a copy of the License at
|
|
||||||
#
|
|
||||||
# http://www.apache.org/licenses/LICENSE-2.0
|
|
||||||
#
|
|
||||||
# Unless required by applicable law or agreed to in writing, software
|
|
||||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
|
||||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
||||||
# See the License for the specific language governing permissions and
|
|
||||||
# limitations under the License.
|
|
||||||
|
|
||||||
import signal
|
|
||||||
import time
|
|
||||||
|
|
||||||
import click
|
|
||||||
from twisted.internet import protocol # pylint: disable=import-error
|
|
||||||
|
|
||||||
from platformio import fs
|
|
||||||
from platformio.compat import string_types
|
|
||||||
from platformio.proc import get_pythonexe_path
|
|
||||||
from platformio.project.helpers import get_project_core_dir
|
|
||||||
|
|
||||||
|
|
||||||
class BaseProcess(protocol.ProcessProtocol, object):
|
|
||||||
|
|
||||||
STDOUT_CHUNK_SIZE = 2048
|
|
||||||
LOG_FILE = None
|
|
||||||
|
|
||||||
COMMON_PATTERNS = {
|
|
||||||
"PLATFORMIO_HOME_DIR": get_project_core_dir(),
|
|
||||||
"PLATFORMIO_CORE_DIR": get_project_core_dir(),
|
|
||||||
"PYTHONEXE": get_pythonexe_path(),
|
|
||||||
}
|
|
||||||
|
|
||||||
def __init__(self):
|
|
||||||
self._last_activity = 0
|
|
||||||
|
|
||||||
def apply_patterns(self, source, patterns=None):
|
|
||||||
_patterns = self.COMMON_PATTERNS.copy()
|
|
||||||
_patterns.update(patterns or {})
|
|
||||||
|
|
||||||
for key, value in _patterns.items():
|
|
||||||
if key.endswith(("_DIR", "_PATH")):
|
|
||||||
_patterns[key] = fs.to_unix_path(value)
|
|
||||||
|
|
||||||
def _replace(text):
|
|
||||||
for key, value in _patterns.items():
|
|
||||||
pattern = "$%s" % key
|
|
||||||
text = text.replace(pattern, value or "")
|
|
||||||
return text
|
|
||||||
|
|
||||||
if isinstance(source, string_types):
|
|
||||||
source = _replace(source)
|
|
||||||
elif isinstance(source, (list, dict)):
|
|
||||||
items = enumerate(source) if isinstance(source, list) else source.items()
|
|
||||||
for key, value in items:
|
|
||||||
if isinstance(value, string_types):
|
|
||||||
source[key] = _replace(value)
|
|
||||||
elif isinstance(value, (list, dict)):
|
|
||||||
source[key] = self.apply_patterns(value, patterns)
|
|
||||||
|
|
||||||
return source
|
|
||||||
|
|
||||||
def onStdInData(self, data):
|
|
||||||
self._last_activity = time.time()
|
|
||||||
if self.LOG_FILE:
|
|
||||||
with open(self.LOG_FILE, "ab") as fp:
|
|
||||||
fp.write(data)
|
|
||||||
|
|
||||||
def outReceived(self, data):
|
|
||||||
self._last_activity = time.time()
|
|
||||||
if self.LOG_FILE:
|
|
||||||
with open(self.LOG_FILE, "ab") as fp:
|
|
||||||
fp.write(data)
|
|
||||||
while data:
|
|
||||||
chunk = data[: self.STDOUT_CHUNK_SIZE]
|
|
||||||
click.echo(chunk, nl=False)
|
|
||||||
data = data[self.STDOUT_CHUNK_SIZE :]
|
|
||||||
|
|
||||||
def errReceived(self, data):
|
|
||||||
self._last_activity = time.time()
|
|
||||||
if self.LOG_FILE:
|
|
||||||
with open(self.LOG_FILE, "ab") as fp:
|
|
||||||
fp.write(data)
|
|
||||||
click.echo(data, nl=False, err=True)
|
|
||||||
|
|
||||||
def processEnded(self, _):
|
|
||||||
self._last_activity = time.time()
|
|
||||||
# Allow terminating via SIGINT/CTRL+C
|
|
||||||
signal.signal(signal.SIGINT, signal.default_int_handler)
|
|
@@ -1,280 +0,0 @@
|
|||||||
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
|
|
||||||
#
|
|
||||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
|
||||||
# you may not use this file except in compliance with the License.
|
|
||||||
# You may obtain a copy of the License at
|
|
||||||
#
|
|
||||||
# http://www.apache.org/licenses/LICENSE-2.0
|
|
||||||
#
|
|
||||||
# Unless required by applicable law or agreed to in writing, software
|
|
||||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
|
||||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
||||||
# See the License for the specific language governing permissions and
|
|
||||||
# limitations under the License.
|
|
||||||
|
|
||||||
import os
|
|
||||||
import re
|
|
||||||
import signal
|
|
||||||
import time
|
|
||||||
from hashlib import sha1
|
|
||||||
from os.path import basename, dirname, isdir, join, realpath, splitext
|
|
||||||
from tempfile import mkdtemp
|
|
||||||
|
|
||||||
from twisted.internet import defer # pylint: disable=import-error
|
|
||||||
from twisted.internet import protocol # pylint: disable=import-error
|
|
||||||
from twisted.internet import reactor # pylint: disable=import-error
|
|
||||||
from twisted.internet import stdio # pylint: disable=import-error
|
|
||||||
from twisted.internet import task # pylint: disable=import-error
|
|
||||||
|
|
||||||
from platformio import fs, proc, telemetry, util
|
|
||||||
from platformio.cache import ContentCache
|
|
||||||
from platformio.commands.debug import helpers
|
|
||||||
from platformio.commands.debug.exception import DebugInvalidOptionsError
|
|
||||||
from platformio.commands.debug.initcfgs import get_gdb_init_config
|
|
||||||
from platformio.commands.debug.process.base import BaseProcess
|
|
||||||
from platformio.commands.debug.process.server import DebugServer
|
|
||||||
from platformio.compat import hashlib_encode_data, is_bytes
|
|
||||||
from platformio.project.helpers import get_project_cache_dir
|
|
||||||
|
|
||||||
|
|
||||||
class GDBClient(BaseProcess): # pylint: disable=too-many-instance-attributes
|
|
||||||
|
|
||||||
PIO_SRC_NAME = ".pioinit"
|
|
||||||
INIT_COMPLETED_BANNER = "PlatformIO: Initialization completed"
|
|
||||||
|
|
||||||
def __init__(self, project_dir, args, debug_options, env_options):
|
|
||||||
super(GDBClient, self).__init__()
|
|
||||||
self.project_dir = project_dir
|
|
||||||
self.args = list(args)
|
|
||||||
self.debug_options = debug_options
|
|
||||||
self.env_options = env_options
|
|
||||||
|
|
||||||
self._debug_server = DebugServer(debug_options, env_options)
|
|
||||||
self._session_id = None
|
|
||||||
|
|
||||||
if not isdir(get_project_cache_dir()):
|
|
||||||
os.makedirs(get_project_cache_dir())
|
|
||||||
self._gdbsrc_dir = mkdtemp(dir=get_project_cache_dir(), prefix=".piodebug-")
|
|
||||||
|
|
||||||
self._target_is_run = False
|
|
||||||
self._auto_continue_timer = None
|
|
||||||
self._errors_buffer = b""
|
|
||||||
|
|
||||||
@defer.inlineCallbacks
|
|
||||||
def spawn(self, gdb_path, prog_path):
|
|
||||||
session_hash = gdb_path + prog_path
|
|
||||||
self._session_id = sha1(hashlib_encode_data(session_hash)).hexdigest()
|
|
||||||
self._kill_previous_session()
|
|
||||||
|
|
||||||
patterns = {
|
|
||||||
"PROJECT_DIR": self.project_dir,
|
|
||||||
"PROG_PATH": prog_path,
|
|
||||||
"PROG_DIR": dirname(prog_path),
|
|
||||||
"PROG_NAME": basename(splitext(prog_path)[0]),
|
|
||||||
"DEBUG_PORT": self.debug_options["port"],
|
|
||||||
"UPLOAD_PROTOCOL": self.debug_options["upload_protocol"],
|
|
||||||
"INIT_BREAK": self.debug_options["init_break"] or "",
|
|
||||||
"LOAD_CMDS": "\n".join(self.debug_options["load_cmds"] or []),
|
|
||||||
}
|
|
||||||
|
|
||||||
yield self._debug_server.spawn(patterns)
|
|
||||||
if not patterns["DEBUG_PORT"]:
|
|
||||||
patterns["DEBUG_PORT"] = self._debug_server.get_debug_port()
|
|
||||||
|
|
||||||
self.generate_pioinit(self._gdbsrc_dir, patterns)
|
|
||||||
|
|
||||||
# start GDB client
|
|
||||||
args = [
|
|
||||||
"piogdb",
|
|
||||||
"-q",
|
|
||||||
"--directory",
|
|
||||||
self._gdbsrc_dir,
|
|
||||||
"--directory",
|
|
||||||
self.project_dir,
|
|
||||||
"-l",
|
|
||||||
"10",
|
|
||||||
]
|
|
||||||
args.extend(self.args)
|
|
||||||
if not gdb_path:
|
|
||||||
raise DebugInvalidOptionsError("GDB client is not configured")
|
|
||||||
gdb_data_dir = self._get_data_dir(gdb_path)
|
|
||||||
if gdb_data_dir:
|
|
||||||
args.extend(["--data-directory", gdb_data_dir])
|
|
||||||
args.append(patterns["PROG_PATH"])
|
|
||||||
|
|
||||||
transport = reactor.spawnProcess(
|
|
||||||
self, gdb_path, args, path=self.project_dir, env=os.environ
|
|
||||||
)
|
|
||||||
defer.returnValue(transport)
|
|
||||||
|
|
||||||
@staticmethod
|
|
||||||
def _get_data_dir(gdb_path):
|
|
||||||
if "msp430" in gdb_path:
|
|
||||||
return None
|
|
||||||
gdb_data_dir = realpath(join(dirname(gdb_path), "..", "share", "gdb"))
|
|
||||||
return gdb_data_dir if isdir(gdb_data_dir) else None
|
|
||||||
|
|
||||||
def generate_pioinit(self, dst_dir, patterns):
|
|
||||||
# default GDB init commands depending on debug tool
|
|
||||||
commands = get_gdb_init_config(self.debug_options).split("\n")
|
|
||||||
|
|
||||||
if self.debug_options["init_cmds"]:
|
|
||||||
commands = self.debug_options["init_cmds"]
|
|
||||||
commands.extend(self.debug_options["extra_cmds"])
|
|
||||||
|
|
||||||
if not any("define pio_reset_run_target" in cmd for cmd in commands):
|
|
||||||
commands = [
|
|
||||||
"define pio_reset_run_target",
|
|
||||||
" echo Warning! Undefined pio_reset_run_target command\\n",
|
|
||||||
" monitor reset",
|
|
||||||
"end",
|
|
||||||
] + commands
|
|
||||||
if not any("define pio_reset_halt_target" in cmd for cmd in commands):
|
|
||||||
commands = [
|
|
||||||
"define pio_reset_halt_target",
|
|
||||||
" echo Warning! Undefined pio_reset_halt_target command\\n",
|
|
||||||
" monitor reset halt",
|
|
||||||
"end",
|
|
||||||
] + commands
|
|
||||||
if not any("define pio_restart_target" in cmd for cmd in commands):
|
|
||||||
commands += [
|
|
||||||
"define pio_restart_target",
|
|
||||||
" pio_reset_halt_target",
|
|
||||||
" $INIT_BREAK",
|
|
||||||
" %s" % ("continue" if patterns["INIT_BREAK"] else "next"),
|
|
||||||
"end",
|
|
||||||
]
|
|
||||||
|
|
||||||
banner = [
|
|
||||||
"echo PlatformIO Unified Debugger -> http://bit.ly/pio-debug\\n",
|
|
||||||
"echo PlatformIO: debug_tool = %s\\n" % self.debug_options["tool"],
|
|
||||||
"echo PlatformIO: Initializing remote target...\\n",
|
|
||||||
]
|
|
||||||
footer = ["echo %s\\n" % self.INIT_COMPLETED_BANNER]
|
|
||||||
commands = banner + commands + footer
|
|
||||||
|
|
||||||
with open(join(dst_dir, self.PIO_SRC_NAME), "w") as fp:
|
|
||||||
fp.write("\n".join(self.apply_patterns(commands, patterns)))
|
|
||||||
|
|
||||||
def connectionMade(self):
|
|
||||||
self._lock_session(self.transport.pid)
|
|
||||||
|
|
||||||
p = protocol.Protocol()
|
|
||||||
p.dataReceived = self.onStdInData
|
|
||||||
stdio.StandardIO(p)
|
|
||||||
|
|
||||||
def onStdInData(self, data):
|
|
||||||
super(GDBClient, self).onStdInData(data)
|
|
||||||
if b"-exec-run" in data:
|
|
||||||
if self._target_is_run:
|
|
||||||
token, _ = data.split(b"-", 1)
|
|
||||||
self.outReceived(token + b"^running\n")
|
|
||||||
return
|
|
||||||
data = data.replace(b"-exec-run", b"-exec-continue")
|
|
||||||
|
|
||||||
if b"-exec-continue" in data:
|
|
||||||
self._target_is_run = True
|
|
||||||
if b"-gdb-exit" in data or data.strip() in (b"q", b"quit"):
|
|
||||||
# Allow terminating via SIGINT/CTRL+C
|
|
||||||
signal.signal(signal.SIGINT, signal.default_int_handler)
|
|
||||||
self.transport.write(b"pio_reset_run_target\n")
|
|
||||||
self.transport.write(data)
|
|
||||||
|
|
||||||
def processEnded(self, reason): # pylint: disable=unused-argument
|
|
||||||
self._unlock_session()
|
|
||||||
if self._gdbsrc_dir and isdir(self._gdbsrc_dir):
|
|
||||||
fs.rmtree(self._gdbsrc_dir)
|
|
||||||
if self._debug_server:
|
|
||||||
self._debug_server.terminate()
|
|
||||||
|
|
||||||
reactor.stop()
|
|
||||||
|
|
||||||
def outReceived(self, data):
|
|
||||||
super(GDBClient, self).outReceived(data)
|
|
||||||
self._handle_error(data)
|
|
||||||
# go to init break automatically
|
|
||||||
if self.INIT_COMPLETED_BANNER.encode() in data:
|
|
||||||
telemetry.send_event(
|
|
||||||
"Debug", "Started", telemetry.dump_run_environment(self.env_options)
|
|
||||||
)
|
|
||||||
self._auto_continue_timer = task.LoopingCall(self._auto_exec_continue)
|
|
||||||
self._auto_continue_timer.start(0.1)
|
|
||||||
|
|
||||||
def errReceived(self, data):
|
|
||||||
super(GDBClient, self).errReceived(data)
|
|
||||||
self._handle_error(data)
|
|
||||||
|
|
||||||
def console_log(self, msg):
|
|
||||||
if helpers.is_gdbmi_mode():
|
|
||||||
msg = helpers.escape_gdbmi_stream("~", msg)
|
|
||||||
self.outReceived(msg if is_bytes(msg) else msg.encode())
|
|
||||||
|
|
||||||
def _auto_exec_continue(self):
|
|
||||||
auto_exec_delay = 0.5 # in seconds
|
|
||||||
if self._last_activity > (time.time() - auto_exec_delay):
|
|
||||||
return
|
|
||||||
if self._auto_continue_timer:
|
|
||||||
self._auto_continue_timer.stop()
|
|
||||||
self._auto_continue_timer = None
|
|
||||||
|
|
||||||
if not self.debug_options["init_break"] or self._target_is_run:
|
|
||||||
return
|
|
||||||
self.console_log(
|
|
||||||
"PlatformIO: Resume the execution to `debug_init_break = %s`\n"
|
|
||||||
% self.debug_options["init_break"]
|
|
||||||
)
|
|
||||||
self.console_log(
|
|
||||||
"PlatformIO: More configuration options -> http://bit.ly/pio-debug\n"
|
|
||||||
)
|
|
||||||
self.transport.write(
|
|
||||||
b"0-exec-continue\n" if helpers.is_gdbmi_mode() else b"continue\n"
|
|
||||||
)
|
|
||||||
self._target_is_run = True
|
|
||||||
|
|
||||||
def _handle_error(self, data):
|
|
||||||
self._errors_buffer = (self._errors_buffer + data)[-8192:] # keep last 8 KBytes
|
|
||||||
if not (
|
|
||||||
self.PIO_SRC_NAME.encode() in self._errors_buffer
|
|
||||||
and b"Error in sourced" in self._errors_buffer
|
|
||||||
):
|
|
||||||
return
|
|
||||||
|
|
||||||
last_erros = self._errors_buffer.decode()
|
|
||||||
last_erros = " ".join(reversed(last_erros.split("\n")))
|
|
||||||
last_erros = re.sub(r'((~|&)"|\\n\"|\\t)', " ", last_erros, flags=re.M)
|
|
||||||
|
|
||||||
err = "%s -> %s" % (
|
|
||||||
telemetry.dump_run_environment(self.env_options),
|
|
||||||
last_erros,
|
|
||||||
)
|
|
||||||
telemetry.send_exception("DebugInitError: %s" % err)
|
|
||||||
self.transport.loseConnection()
|
|
||||||
|
|
||||||
def _kill_previous_session(self):
|
|
||||||
assert self._session_id
|
|
||||||
pid = None
|
|
||||||
with ContentCache() as cc:
|
|
||||||
pid = cc.get(self._session_id)
|
|
||||||
cc.delete(self._session_id)
|
|
||||||
if not pid:
|
|
||||||
return
|
|
||||||
if "windows" in util.get_systype():
|
|
||||||
kill = ["Taskkill", "/PID", pid, "/F"]
|
|
||||||
else:
|
|
||||||
kill = ["kill", pid]
|
|
||||||
try:
|
|
||||||
proc.exec_command(kill)
|
|
||||||
except: # pylint: disable=bare-except
|
|
||||||
pass
|
|
||||||
|
|
||||||
def _lock_session(self, pid):
|
|
||||||
if not self._session_id:
|
|
||||||
return
|
|
||||||
with ContentCache() as cc:
|
|
||||||
cc.set(self._session_id, str(pid), "1h")
|
|
||||||
|
|
||||||
def _unlock_session(self):
|
|
||||||
if not self._session_id:
|
|
||||||
return
|
|
||||||
with ContentCache() as cc:
|
|
||||||
cc.delete(self._session_id)
|
|
@@ -1,175 +0,0 @@
|
|||||||
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
|
|
||||||
#
|
|
||||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
|
||||||
# you may not use this file except in compliance with the License.
|
|
||||||
# You may obtain a copy of the License at
|
|
||||||
#
|
|
||||||
# http://www.apache.org/licenses/LICENSE-2.0
|
|
||||||
#
|
|
||||||
# Unless required by applicable law or agreed to in writing, software
|
|
||||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
|
||||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
||||||
# See the License for the specific language governing permissions and
|
|
||||||
# limitations under the License.
|
|
||||||
|
|
||||||
import os
|
|
||||||
import time
|
|
||||||
from os.path import isdir, isfile, join
|
|
||||||
|
|
||||||
from twisted.internet import defer # pylint: disable=import-error
|
|
||||||
from twisted.internet import reactor # pylint: disable=import-error
|
|
||||||
|
|
||||||
from platformio import fs, util
|
|
||||||
from platformio.commands.debug.exception import DebugInvalidOptionsError
|
|
||||||
from platformio.commands.debug.helpers import escape_gdbmi_stream, is_gdbmi_mode
|
|
||||||
from platformio.commands.debug.process.base import BaseProcess
|
|
||||||
from platformio.proc import where_is_program
|
|
||||||
|
|
||||||
|
|
||||||
class DebugServer(BaseProcess):
|
|
||||||
def __init__(self, debug_options, env_options):
|
|
||||||
super(DebugServer, self).__init__()
|
|
||||||
self.debug_options = debug_options
|
|
||||||
self.env_options = env_options
|
|
||||||
|
|
||||||
self._debug_port = ":3333"
|
|
||||||
self._transport = None
|
|
||||||
self._process_ended = False
|
|
||||||
self._ready = False
|
|
||||||
|
|
||||||
@defer.inlineCallbacks
|
|
||||||
def spawn(self, patterns): # pylint: disable=too-many-branches
|
|
||||||
systype = util.get_systype()
|
|
||||||
server = self.debug_options.get("server")
|
|
||||||
if not server:
|
|
||||||
defer.returnValue(None)
|
|
||||||
server = self.apply_patterns(server, patterns)
|
|
||||||
server_executable = server["executable"]
|
|
||||||
if not server_executable:
|
|
||||||
defer.returnValue(None)
|
|
||||||
if server["cwd"]:
|
|
||||||
server_executable = join(server["cwd"], server_executable)
|
|
||||||
if (
|
|
||||||
"windows" in systype
|
|
||||||
and not server_executable.endswith(".exe")
|
|
||||||
and isfile(server_executable + ".exe")
|
|
||||||
):
|
|
||||||
server_executable = server_executable + ".exe"
|
|
||||||
|
|
||||||
if not isfile(server_executable):
|
|
||||||
server_executable = where_is_program(server_executable)
|
|
||||||
if not isfile(server_executable):
|
|
||||||
raise DebugInvalidOptionsError(
|
|
||||||
"\nCould not launch Debug Server '%s'. Please check that it "
|
|
||||||
"is installed and is included in a system PATH\n\n"
|
|
||||||
"See documentation or contact contact@platformio.org:\n"
|
|
||||||
"https://docs.platformio.org/page/plus/debugging.html\n"
|
|
||||||
% server_executable
|
|
||||||
)
|
|
||||||
|
|
||||||
openocd_pipe_allowed = all(
|
|
||||||
[not self.debug_options["port"], "openocd" in server_executable]
|
|
||||||
)
|
|
||||||
if openocd_pipe_allowed:
|
|
||||||
args = []
|
|
||||||
if server["cwd"]:
|
|
||||||
args.extend(["-s", server["cwd"]])
|
|
||||||
args.extend(
|
|
||||||
["-c", "gdb_port pipe; tcl_port disabled; telnet_port disabled"]
|
|
||||||
)
|
|
||||||
args.extend(server["arguments"])
|
|
||||||
str_args = " ".join(
|
|
||||||
[arg if arg.startswith("-") else '"%s"' % arg for arg in args]
|
|
||||||
)
|
|
||||||
self._debug_port = '| "%s" %s' % (server_executable, str_args)
|
|
||||||
self._debug_port = fs.to_unix_path(self._debug_port)
|
|
||||||
defer.returnValue(self._debug_port)
|
|
||||||
|
|
||||||
env = os.environ.copy()
|
|
||||||
# prepend server "lib" folder to LD path
|
|
||||||
if (
|
|
||||||
"windows" not in systype
|
|
||||||
and server["cwd"]
|
|
||||||
and isdir(join(server["cwd"], "lib"))
|
|
||||||
):
|
|
||||||
ld_key = "DYLD_LIBRARY_PATH" if "darwin" in systype else "LD_LIBRARY_PATH"
|
|
||||||
env[ld_key] = join(server["cwd"], "lib")
|
|
||||||
if os.environ.get(ld_key):
|
|
||||||
env[ld_key] = "%s:%s" % (env[ld_key], os.environ.get(ld_key))
|
|
||||||
# prepend BIN to PATH
|
|
||||||
if server["cwd"] and isdir(join(server["cwd"], "bin")):
|
|
||||||
env["PATH"] = "%s%s%s" % (
|
|
||||||
join(server["cwd"], "bin"),
|
|
||||||
os.pathsep,
|
|
||||||
os.environ.get("PATH", os.environ.get("Path", "")),
|
|
||||||
)
|
|
||||||
|
|
||||||
self._transport = reactor.spawnProcess(
|
|
||||||
self,
|
|
||||||
server_executable,
|
|
||||||
[server_executable] + server["arguments"],
|
|
||||||
path=server["cwd"],
|
|
||||||
env=env,
|
|
||||||
)
|
|
||||||
if "mspdebug" in server_executable.lower():
|
|
||||||
self._debug_port = ":2000"
|
|
||||||
elif "jlink" in server_executable.lower():
|
|
||||||
self._debug_port = ":2331"
|
|
||||||
elif "qemu" in server_executable.lower():
|
|
||||||
self._debug_port = ":1234"
|
|
||||||
|
|
||||||
yield self._wait_until_ready()
|
|
||||||
|
|
||||||
defer.returnValue(self._debug_port)
|
|
||||||
|
|
||||||
@defer.inlineCallbacks
|
|
||||||
def _wait_until_ready(self):
|
|
||||||
ready_pattern = self.debug_options.get("server", {}).get("ready_pattern")
|
|
||||||
timeout = 60 if ready_pattern else 10
|
|
||||||
elapsed = 0
|
|
||||||
delay = 0.5
|
|
||||||
auto_ready_delay = 0.5
|
|
||||||
while not self._ready and not self._process_ended and elapsed < timeout:
|
|
||||||
yield self.async_sleep(delay)
|
|
||||||
if not ready_pattern:
|
|
||||||
self._ready = self._last_activity < (time.time() - auto_ready_delay)
|
|
||||||
elapsed += delay
|
|
||||||
|
|
||||||
def _check_ready_by_pattern(self, data):
|
|
||||||
if self._ready:
|
|
||||||
return self._ready
|
|
||||||
ready_pattern = self.debug_options.get("server", {}).get("ready_pattern")
|
|
||||||
if ready_pattern:
|
|
||||||
self._ready = ready_pattern.encode() in data
|
|
||||||
return self._ready
|
|
||||||
|
|
||||||
@staticmethod
|
|
||||||
def async_sleep(secs):
|
|
||||||
d = defer.Deferred()
|
|
||||||
reactor.callLater(secs, d.callback, None)
|
|
||||||
return d
|
|
||||||
|
|
||||||
def get_debug_port(self):
|
|
||||||
return self._debug_port
|
|
||||||
|
|
||||||
def outReceived(self, data):
|
|
||||||
super(DebugServer, self).outReceived(
|
|
||||||
escape_gdbmi_stream("@", data) if is_gdbmi_mode() else data
|
|
||||||
)
|
|
||||||
self._check_ready_by_pattern(data)
|
|
||||||
|
|
||||||
def errReceived(self, data):
|
|
||||||
super(DebugServer, self).errReceived(data)
|
|
||||||
self._check_ready_by_pattern(data)
|
|
||||||
|
|
||||||
def processEnded(self, reason):
|
|
||||||
self._process_ended = True
|
|
||||||
super(DebugServer, self).processEnded(reason)
|
|
||||||
|
|
||||||
def terminate(self):
|
|
||||||
if self._process_ended or not self._transport:
|
|
||||||
return
|
|
||||||
try:
|
|
||||||
self._transport.signalProcess("KILL")
|
|
||||||
except: # pylint: disable=bare-except
|
|
||||||
pass
|
|
@@ -12,6 +12,7 @@
|
|||||||
# See the License for the specific language governing permissions and
|
# See the License for the specific language governing permissions and
|
||||||
# limitations under the License.
|
# limitations under the License.
|
||||||
|
|
||||||
|
import json
|
||||||
import os
|
import os
|
||||||
import sys
|
import sys
|
||||||
from fnmatch import fnmatch
|
from fnmatch import fnmatch
|
||||||
@@ -21,7 +22,6 @@ from serial.tools import miniterm
|
|||||||
|
|
||||||
from platformio import exception, fs, util
|
from platformio import exception, fs, util
|
||||||
from platformio.commands.device import helpers as device_helpers
|
from platformio.commands.device import helpers as device_helpers
|
||||||
from platformio.compat import dump_json_to_unicode
|
|
||||||
from platformio.platform.factory import PlatformFactory
|
from platformio.platform.factory import PlatformFactory
|
||||||
from platformio.project.exception import NotPlatformIOProjectError
|
from platformio.project.exception import NotPlatformIOProjectError
|
||||||
|
|
||||||
@@ -52,9 +52,7 @@ def device_list( # pylint: disable=too-many-branches
|
|||||||
single_key = list(data)[0] if len(list(data)) == 1 else None
|
single_key = list(data)[0] if len(list(data)) == 1 else None
|
||||||
|
|
||||||
if json_output:
|
if json_output:
|
||||||
return click.echo(
|
return click.echo(json.dumps(data[single_key] if single_key else data))
|
||||||
dump_json_to_unicode(data[single_key] if single_key else data)
|
|
||||||
)
|
|
||||||
|
|
||||||
titles = {
|
titles = {
|
||||||
"serial": "Serial Ports",
|
"serial": "Serial Ports",
|
||||||
|
@@ -19,7 +19,7 @@ from platformio.project.config import ProjectConfig
|
|||||||
|
|
||||||
class DeviceMonitorFilter(miniterm.Transform):
|
class DeviceMonitorFilter(miniterm.Transform):
|
||||||
def __init__(self, options=None):
|
def __init__(self, options=None):
|
||||||
""" Called by PlatformIO to pass context """
|
"""Called by PlatformIO to pass context"""
|
||||||
miniterm.Transform.__init__(self)
|
miniterm.Transform.__init__(self)
|
||||||
|
|
||||||
self.options = options or {}
|
self.options = options or {}
|
||||||
@@ -35,7 +35,7 @@ class DeviceMonitorFilter(miniterm.Transform):
|
|||||||
self.environment = self.config.envs()[0]
|
self.environment = self.config.envs()[0]
|
||||||
|
|
||||||
def __call__(self):
|
def __call__(self):
|
||||||
""" Called by the miniterm library when the filter is actually used """
|
"""Called by the miniterm library when the filter is actually used"""
|
||||||
return self
|
return self
|
||||||
|
|
||||||
@property
|
@property
|
||||||
|
@@ -31,6 +31,7 @@ class LogToFile(DeviceMonitorFilter):
|
|||||||
"%y%m%d-%H%M%S"
|
"%y%m%d-%H%M%S"
|
||||||
)
|
)
|
||||||
print("--- Logging an output to %s" % os.path.abspath(log_file_name))
|
print("--- Logging an output to %s" % os.path.abspath(log_file_name))
|
||||||
|
# pylint: disable=consider-using-with
|
||||||
self._log_fp = io.open(log_file_name, "w", encoding="utf-8")
|
self._log_fp = io.open(log_file_name, "w", encoding="utf-8")
|
||||||
return self
|
return self
|
||||||
|
|
||||||
|
@@ -17,7 +17,7 @@ import mimetypes
|
|||||||
import click
|
import click
|
||||||
|
|
||||||
from platformio.commands.home.helpers import is_port_used
|
from platformio.commands.home.helpers import is_port_used
|
||||||
from platformio.compat import ensure_python3
|
from platformio.commands.home.run import run_server
|
||||||
|
|
||||||
|
|
||||||
@click.command("home", short_help="GUI to manage PlatformIO")
|
@click.command("home", short_help="GUI to manage PlatformIO")
|
||||||
@@ -48,8 +48,6 @@ from platformio.compat import ensure_python3
|
|||||||
),
|
),
|
||||||
)
|
)
|
||||||
def cli(port, host, no_open, shutdown_timeout, session_id):
|
def cli(port, host, no_open, shutdown_timeout, session_id):
|
||||||
ensure_python3()
|
|
||||||
|
|
||||||
# Ensure PIO Home mimetypes are known
|
# Ensure PIO Home mimetypes are known
|
||||||
mimetypes.add_type("text/html", ".html")
|
mimetypes.add_type("text/html", ".html")
|
||||||
mimetypes.add_type("text/css", ".css")
|
mimetypes.add_type("text/css", ".css")
|
||||||
@@ -87,9 +85,6 @@ def cli(port, host, no_open, shutdown_timeout, session_id):
|
|||||||
click.launch(home_url)
|
click.launch(home_url)
|
||||||
return
|
return
|
||||||
|
|
||||||
# pylint: disable=import-outside-toplevel
|
|
||||||
from platformio.commands.home.run import run_server
|
|
||||||
|
|
||||||
run_server(
|
run_server(
|
||||||
host=host,
|
host=host,
|
||||||
port=port,
|
port=port,
|
||||||
|
@@ -18,7 +18,7 @@ import requests
|
|||||||
from starlette.concurrency import run_in_threadpool
|
from starlette.concurrency import run_in_threadpool
|
||||||
|
|
||||||
from platformio import util
|
from platformio import util
|
||||||
from platformio.compat import WINDOWS
|
from platformio.compat import IS_WINDOWS
|
||||||
from platformio.proc import where_is_program
|
from platformio.proc import where_is_program
|
||||||
|
|
||||||
|
|
||||||
@@ -37,15 +37,13 @@ def requests_session():
|
|||||||
|
|
||||||
@util.memoized(expire="60s")
|
@util.memoized(expire="60s")
|
||||||
def get_core_fullpath():
|
def get_core_fullpath():
|
||||||
return where_is_program(
|
return where_is_program("platformio" + (".exe" if IS_WINDOWS else ""))
|
||||||
"platformio" + (".exe" if "windows" in util.get_systype() else "")
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
def is_port_used(host, port):
|
def is_port_used(host, port):
|
||||||
socket.setdefaulttimeout(1)
|
socket.setdefaulttimeout(1)
|
||||||
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
|
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
|
||||||
if WINDOWS:
|
if IS_WINDOWS:
|
||||||
try:
|
try:
|
||||||
s.bind((host, port))
|
s.bind((host, port))
|
||||||
s.close()
|
s.close()
|
||||||
|
@@ -16,7 +16,7 @@ import time
|
|||||||
|
|
||||||
from ajsonrpc.core import JSONRPC20DispatchException
|
from ajsonrpc.core import JSONRPC20DispatchException
|
||||||
|
|
||||||
from platformio.compat import get_running_loop
|
from platformio.compat import aio_get_running_loop
|
||||||
|
|
||||||
|
|
||||||
class IDERPC:
|
class IDERPC:
|
||||||
@@ -36,7 +36,7 @@ class IDERPC:
|
|||||||
async def listen_commands(self, sid=0):
|
async def listen_commands(self, sid=0):
|
||||||
if sid not in self._queue:
|
if sid not in self._queue:
|
||||||
self._queue[sid] = []
|
self._queue[sid] = []
|
||||||
self._queue[sid].append(get_running_loop().create_future())
|
self._queue[sid].append(aio_get_running_loop().create_future())
|
||||||
return await self._queue[sid][-1]
|
return await self._queue[sid][-1]
|
||||||
|
|
||||||
def open_project(self, sid, project_dir):
|
def open_project(self, sid, project_dir):
|
||||||
|
@@ -17,7 +17,7 @@ import time
|
|||||||
|
|
||||||
from platformio.cache import ContentCache
|
from platformio.cache import ContentCache
|
||||||
from platformio.commands.home.rpc.handlers.os import OSRPC
|
from platformio.commands.home.rpc.handlers.os import OSRPC
|
||||||
from platformio.compat import create_task
|
from platformio.compat import aio_create_task
|
||||||
|
|
||||||
|
|
||||||
class MiscRPC:
|
class MiscRPC:
|
||||||
@@ -30,7 +30,7 @@ class MiscRPC:
|
|||||||
cache_data = json.loads(cache_data)
|
cache_data = json.loads(cache_data)
|
||||||
# automatically update cache in background every 12 hours
|
# automatically update cache in background every 12 hours
|
||||||
if cache_data["time"] < (time.time() - (3600 * 12)):
|
if cache_data["time"] < (time.time() - (3600 * 12)):
|
||||||
create_task(
|
aio_create_task(
|
||||||
self._preload_latest_tweets(data_url, cache_key, cache_valid)
|
self._preload_latest_tweets(data_url, cache_key, cache_valid)
|
||||||
)
|
)
|
||||||
return cache_data["result"]
|
return cache_data["result"]
|
||||||
|
@@ -200,10 +200,10 @@ class ProjectRPC:
|
|||||||
await PIOCoreRPC.call(
|
await PIOCoreRPC.call(
|
||||||
args, options={"cwd": project_dir, "force_subprocess": True}
|
args, options={"cwd": project_dir, "force_subprocess": True}
|
||||||
)
|
)
|
||||||
return self._generate_project_main(project_dir, framework)
|
return self._generate_project_main(project_dir, board, framework)
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def _generate_project_main(project_dir, framework):
|
def _generate_project_main(project_dir, board, framework):
|
||||||
main_content = None
|
main_content = None
|
||||||
if framework == "arduino":
|
if framework == "arduino":
|
||||||
main_content = "\n".join(
|
main_content = "\n".join(
|
||||||
@@ -238,15 +238,30 @@ class ProjectRPC:
|
|||||||
)
|
)
|
||||||
if not main_content:
|
if not main_content:
|
||||||
return project_dir
|
return project_dir
|
||||||
|
|
||||||
|
is_cpp_project = True
|
||||||
|
pm = PlatformPackageManager()
|
||||||
|
try:
|
||||||
|
board = pm.board_config(board)
|
||||||
|
platforms = board.get("platforms", board.get("platform"))
|
||||||
|
if not isinstance(platforms, list):
|
||||||
|
platforms = [platforms]
|
||||||
|
c_based_platforms = ["intel_mcs51", "ststm8"]
|
||||||
|
is_cpp_project = not (set(platforms) & set(c_based_platforms))
|
||||||
|
except exception.PlatformioException:
|
||||||
|
pass
|
||||||
|
|
||||||
with fs.cd(project_dir):
|
with fs.cd(project_dir):
|
||||||
config = ProjectConfig()
|
config = ProjectConfig()
|
||||||
src_dir = config.get_optional_dir("src")
|
src_dir = config.get_optional_dir("src")
|
||||||
main_path = os.path.join(src_dir, "main.cpp")
|
main_path = os.path.join(
|
||||||
|
src_dir, "main.%s" % ("cpp" if is_cpp_project else "c")
|
||||||
|
)
|
||||||
if os.path.isfile(main_path):
|
if os.path.isfile(main_path):
|
||||||
return project_dir
|
return project_dir
|
||||||
if not os.path.isdir(src_dir):
|
if not os.path.isdir(src_dir):
|
||||||
os.makedirs(src_dir)
|
os.makedirs(src_dir)
|
||||||
with open(main_path, "w") as fp:
|
with open(main_path, mode="w", encoding="utf8") as fp:
|
||||||
fp.write(main_content.strip())
|
fp.write(main_content.strip())
|
||||||
return project_dir
|
return project_dir
|
||||||
|
|
||||||
|
@@ -17,7 +17,7 @@ from ajsonrpc.dispatcher import Dispatcher
|
|||||||
from ajsonrpc.manager import AsyncJSONRPCResponseManager
|
from ajsonrpc.manager import AsyncJSONRPCResponseManager
|
||||||
from starlette.endpoints import WebSocketEndpoint
|
from starlette.endpoints import WebSocketEndpoint
|
||||||
|
|
||||||
from platformio.compat import create_task, get_running_loop
|
from platformio.compat import aio_create_task, aio_get_running_loop
|
||||||
from platformio.proc import force_exit
|
from platformio.proc import force_exit
|
||||||
|
|
||||||
|
|
||||||
@@ -63,7 +63,7 @@ class JSONRPCServerFactoryBase:
|
|||||||
click.echo("Automatically shutdown server on timeout")
|
click.echo("Automatically shutdown server on timeout")
|
||||||
force_exit()
|
force_exit()
|
||||||
|
|
||||||
self.shutdown_timer = get_running_loop().call_later(
|
self.shutdown_timer = aio_get_running_loop().call_later(
|
||||||
self.shutdown_timeout, _auto_shutdown_server
|
self.shutdown_timeout, _auto_shutdown_server
|
||||||
)
|
)
|
||||||
|
|
||||||
@@ -84,7 +84,7 @@ class WebSocketJSONRPCServer(WebSocketEndpoint):
|
|||||||
self.factory.on_client_connect() # pylint: disable=no-member
|
self.factory.on_client_connect() # pylint: disable=no-member
|
||||||
|
|
||||||
async def on_receive(self, websocket, data):
|
async def on_receive(self, websocket, data):
|
||||||
create_task(self._handle_rpc(websocket, data))
|
aio_create_task(self._handle_rpc(websocket, data))
|
||||||
|
|
||||||
async def on_disconnect(self, websocket, close_code):
|
async def on_disconnect(self, websocket, close_code):
|
||||||
self.factory.on_client_disconnect() # pylint: disable=no-member
|
self.factory.on_client_disconnect() # pylint: disable=no-member
|
||||||
|
@@ -32,7 +32,7 @@ from platformio.commands.home.rpc.handlers.os import OSRPC
|
|||||||
from platformio.commands.home.rpc.handlers.piocore import PIOCoreRPC
|
from platformio.commands.home.rpc.handlers.piocore import PIOCoreRPC
|
||||||
from platformio.commands.home.rpc.handlers.project import ProjectRPC
|
from platformio.commands.home.rpc.handlers.project import ProjectRPC
|
||||||
from platformio.commands.home.rpc.server import WebSocketJSONRPCServerFactory
|
from platformio.commands.home.rpc.server import WebSocketJSONRPCServerFactory
|
||||||
from platformio.compat import get_running_loop
|
from platformio.compat import aio_get_running_loop
|
||||||
from platformio.exception import PlatformioException
|
from platformio.exception import PlatformioException
|
||||||
from platformio.package.manager.core import get_core_package_dir
|
from platformio.package.manager.core import get_core_package_dir
|
||||||
from platformio.proc import force_exit
|
from platformio.proc import force_exit
|
||||||
@@ -49,7 +49,7 @@ class ShutdownMiddleware:
|
|||||||
|
|
||||||
|
|
||||||
async def shutdown_server(_=None):
|
async def shutdown_server(_=None):
|
||||||
get_running_loop().call_later(0.5, force_exit)
|
aio_get_running_loop().call_later(0.5, force_exit)
|
||||||
return PlainTextResponse("Server has been shutdown!")
|
return PlainTextResponse("Server has been shutdown!")
|
||||||
|
|
||||||
|
|
||||||
|
@@ -14,6 +14,7 @@
|
|||||||
|
|
||||||
# pylint: disable=too-many-branches, too-many-locals
|
# pylint: disable=too-many-branches, too-many-locals
|
||||||
|
|
||||||
|
import json
|
||||||
import os
|
import os
|
||||||
import time
|
import time
|
||||||
|
|
||||||
@@ -23,7 +24,6 @@ from tabulate import tabulate
|
|||||||
from platformio import exception, fs, util
|
from platformio import exception, fs, util
|
||||||
from platformio.commands import PlatformioCLI
|
from platformio.commands import PlatformioCLI
|
||||||
from platformio.commands.lib.helpers import get_builtin_libs, save_project_libdeps
|
from platformio.commands.lib.helpers import get_builtin_libs, save_project_libdeps
|
||||||
from platformio.compat import dump_json_to_unicode
|
|
||||||
from platformio.package.exception import NotGlobalLibDir, UnknownPackageError
|
from platformio.package.exception import NotGlobalLibDir, UnknownPackageError
|
||||||
from platformio.package.manager.library import LibraryPackageManager
|
from platformio.package.manager.library import LibraryPackageManager
|
||||||
from platformio.package.meta import PackageItem, PackageSpec
|
from platformio.package.meta import PackageItem, PackageSpec
|
||||||
@@ -286,7 +286,7 @@ def lib_update( # pylint: disable=too-many-arguments
|
|||||||
|
|
||||||
if json_output:
|
if json_output:
|
||||||
return click.echo(
|
return click.echo(
|
||||||
dump_json_to_unicode(
|
json.dumps(
|
||||||
json_result[storage_dirs[0]] if len(storage_dirs) == 1 else json_result
|
json_result[storage_dirs[0]] if len(storage_dirs) == 1 else json_result
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
@@ -315,7 +315,7 @@ def lib_list(ctx, json_output):
|
|||||||
|
|
||||||
if json_output:
|
if json_output:
|
||||||
return click.echo(
|
return click.echo(
|
||||||
dump_json_to_unicode(
|
json.dumps(
|
||||||
json_result[storage_dirs[0]] if len(storage_dirs) == 1 else json_result
|
json_result[storage_dirs[0]] if len(storage_dirs) == 1 else json_result
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
@@ -359,7 +359,7 @@ def lib_search(query, json_output, page, noninteractive, **filters):
|
|||||||
)
|
)
|
||||||
|
|
||||||
if json_output:
|
if json_output:
|
||||||
click.echo(dump_json_to_unicode(result))
|
click.echo(json.dumps(result))
|
||||||
return
|
return
|
||||||
|
|
||||||
if result["total"] == 0:
|
if result["total"] == 0:
|
||||||
@@ -418,7 +418,7 @@ def lib_search(query, json_output, page, noninteractive, **filters):
|
|||||||
def lib_builtin(storage, json_output):
|
def lib_builtin(storage, json_output):
|
||||||
items = get_builtin_libs(storage)
|
items = get_builtin_libs(storage)
|
||||||
if json_output:
|
if json_output:
|
||||||
return click.echo(dump_json_to_unicode(items))
|
return click.echo(json.dumps(items))
|
||||||
|
|
||||||
for storage_ in items:
|
for storage_ in items:
|
||||||
if not storage_["items"]:
|
if not storage_["items"]:
|
||||||
@@ -442,7 +442,7 @@ def lib_show(library, json_output):
|
|||||||
regclient = lm.get_registry_client_instance()
|
regclient = lm.get_registry_client_instance()
|
||||||
lib = regclient.fetch_json_data("get", "/v2/lib/info/%d" % lib_id, cache_valid="1h")
|
lib = regclient.fetch_json_data("get", "/v2/lib/info/%d" % lib_id, cache_valid="1h")
|
||||||
if json_output:
|
if json_output:
|
||||||
return click.echo(dump_json_to_unicode(lib))
|
return click.echo(json.dumps(lib))
|
||||||
|
|
||||||
title = "{ownername}/{name}".format(**lib)
|
title = "{ownername}/{name}".format(**lib)
|
||||||
click.secho(title, fg="cyan")
|
click.secho(title, fg="cyan")
|
||||||
@@ -538,7 +538,7 @@ def lib_stats(json_output):
|
|||||||
result = regclient.fetch_json_data("get", "/v2/lib/stats", cache_valid="1h")
|
result = regclient.fetch_json_data("get", "/v2/lib/stats", cache_valid="1h")
|
||||||
|
|
||||||
if json_output:
|
if json_output:
|
||||||
return click.echo(dump_json_to_unicode(result))
|
return click.echo(json.dumps(result))
|
||||||
|
|
||||||
for key in ("updated", "added"):
|
for key in ("updated", "added"):
|
||||||
tabular_data = [
|
tabular_data = [
|
||||||
|
@@ -17,10 +17,14 @@ import tempfile
|
|||||||
from datetime import datetime
|
from datetime import datetime
|
||||||
|
|
||||||
import click
|
import click
|
||||||
|
from tabulate import tabulate
|
||||||
|
|
||||||
from platformio import fs
|
from platformio import fs
|
||||||
|
from platformio.clients.account import AccountClient
|
||||||
from platformio.clients.registry import RegistryClient
|
from platformio.clients.registry import RegistryClient
|
||||||
from platformio.compat import ensure_python3
|
from platformio.exception import UserSideException
|
||||||
|
from platformio.package.manifest.parser import ManifestParserFactory
|
||||||
|
from platformio.package.manifest.schema import ManifestSchema, ManifestValidationError
|
||||||
from platformio.package.meta import PackageSpec, PackageType
|
from platformio.package.meta import PackageSpec, PackageType
|
||||||
from platformio.package.pack import PackagePacker
|
from platformio.package.pack import PackagePacker
|
||||||
from platformio.package.unpack import FileUnpacker, TARArchiver
|
from platformio.package.unpack import FileUnpacker, TARArchiver
|
||||||
@@ -36,6 +40,54 @@ def validate_datetime(ctx, param, value): # pylint: disable=unused-argument
|
|||||||
return value
|
return value
|
||||||
|
|
||||||
|
|
||||||
|
def load_manifest_from_archive(path):
|
||||||
|
return ManifestSchema().load_manifest(
|
||||||
|
ManifestParserFactory.new_from_archive(path).as_dict()
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def check_package_duplicates(
|
||||||
|
owner, type, name, version, system
|
||||||
|
): # pylint: disable=redefined-builtin
|
||||||
|
found = False
|
||||||
|
items = (
|
||||||
|
RegistryClient()
|
||||||
|
.list_packages(filters=dict(types=[type], names=[name]))
|
||||||
|
.get("items")
|
||||||
|
)
|
||||||
|
if not items:
|
||||||
|
return True
|
||||||
|
# duplicated version by owner / system
|
||||||
|
found = False
|
||||||
|
for item in items:
|
||||||
|
if item["owner"]["username"] != owner or item["version"]["name"] != version:
|
||||||
|
continue
|
||||||
|
if not system:
|
||||||
|
found = True
|
||||||
|
break
|
||||||
|
published_systems = []
|
||||||
|
for f in item["version"]["files"]:
|
||||||
|
published_systems.extend(f.get("system", []))
|
||||||
|
found = set(system).issubset(set(published_systems))
|
||||||
|
if found:
|
||||||
|
raise UserSideException(
|
||||||
|
"The package `%s/%s@%s` is already published in the registry"
|
||||||
|
% (owner, name, version)
|
||||||
|
)
|
||||||
|
other_owners = [
|
||||||
|
item["owner"]["username"]
|
||||||
|
for item in items
|
||||||
|
if item["owner"]["username"] != owner
|
||||||
|
]
|
||||||
|
if other_owners:
|
||||||
|
click.secho(
|
||||||
|
"\nWarning! A package with the name `%s` is already published by the next "
|
||||||
|
"owners: %s\n" % (name, ", ".join(other_owners)),
|
||||||
|
fg="yellow",
|
||||||
|
)
|
||||||
|
return True
|
||||||
|
|
||||||
|
|
||||||
@click.group("package", short_help="Package manager")
|
@click.group("package", short_help="Package manager")
|
||||||
def cli():
|
def cli():
|
||||||
pass
|
pass
|
||||||
@@ -54,6 +106,12 @@ def cli():
|
|||||||
def package_pack(package, output):
|
def package_pack(package, output):
|
||||||
p = PackagePacker(package)
|
p = PackagePacker(package)
|
||||||
archive_path = p.pack(output)
|
archive_path = p.pack(output)
|
||||||
|
# validate manifest
|
||||||
|
try:
|
||||||
|
load_manifest_from_archive(archive_path)
|
||||||
|
except ManifestValidationError as e:
|
||||||
|
os.remove(archive_path)
|
||||||
|
raise e
|
||||||
click.secho('Wrote a tarball to "%s"' % archive_path, fg="green")
|
click.secho('Wrote a tarball to "%s"' % archive_path, fg="green")
|
||||||
|
|
||||||
|
|
||||||
@@ -80,26 +138,63 @@ def package_pack(package, output):
|
|||||||
default=True,
|
default=True,
|
||||||
help="Notify by email when package is processed",
|
help="Notify by email when package is processed",
|
||||||
)
|
)
|
||||||
def package_publish(package, owner, released_at, private, notify):
|
@click.option(
|
||||||
assert ensure_python3()
|
"--non-interactive",
|
||||||
|
is_flag=True,
|
||||||
# publish .tar.gz instantly without repacking
|
help="Do not show interactive prompt",
|
||||||
if not os.path.isdir(package) and isinstance(
|
)
|
||||||
|
def package_publish( # pylint: disable=too-many-arguments, too-many-locals
|
||||||
|
package, owner, released_at, private, notify, non_interactive
|
||||||
|
):
|
||||||
|
click.secho("Preparing a package...", fg="cyan")
|
||||||
|
owner = owner or AccountClient().get_logged_username()
|
||||||
|
do_not_pack = not os.path.isdir(package) and isinstance(
|
||||||
FileUnpacker.new_archiver(package), TARArchiver
|
FileUnpacker.new_archiver(package), TARArchiver
|
||||||
):
|
|
||||||
response = RegistryClient().publish_package(
|
|
||||||
package, owner, released_at, private, notify
|
|
||||||
)
|
)
|
||||||
click.secho(response.get("message"), fg="green")
|
archive_path = None
|
||||||
return
|
|
||||||
|
|
||||||
with tempfile.TemporaryDirectory() as tmp_dir: # pylint: disable=no-member
|
with tempfile.TemporaryDirectory() as tmp_dir: # pylint: disable=no-member
|
||||||
|
# publish .tar.gz instantly without repacking
|
||||||
|
if do_not_pack:
|
||||||
|
archive_path = package
|
||||||
|
else:
|
||||||
with fs.cd(tmp_dir):
|
with fs.cd(tmp_dir):
|
||||||
p = PackagePacker(package)
|
p = PackagePacker(package)
|
||||||
archive_path = p.pack()
|
archive_path = p.pack()
|
||||||
response = RegistryClient().publish_package(
|
|
||||||
archive_path, owner, released_at, private, notify
|
type_ = PackageType.from_archive(archive_path)
|
||||||
|
manifest = load_manifest_from_archive(archive_path)
|
||||||
|
name = manifest.get("name")
|
||||||
|
version = manifest.get("version")
|
||||||
|
data = [
|
||||||
|
("Type:", type_),
|
||||||
|
("Owner:", owner),
|
||||||
|
("Name:", name),
|
||||||
|
("Version:", version),
|
||||||
|
]
|
||||||
|
if manifest.get("system"):
|
||||||
|
data.insert(len(data) - 1, ("System:", ", ".join(manifest.get("system"))))
|
||||||
|
click.echo(tabulate(data, tablefmt="plain"))
|
||||||
|
|
||||||
|
# look for duplicates
|
||||||
|
check_package_duplicates(owner, type_, name, version, manifest.get("system"))
|
||||||
|
|
||||||
|
if not non_interactive:
|
||||||
|
click.confirm(
|
||||||
|
"Are you sure you want to publish the %s %s to the registry?\n"
|
||||||
|
% (
|
||||||
|
type_,
|
||||||
|
click.style(
|
||||||
|
"%s/%s@%s" % (owner, name, version),
|
||||||
|
fg="cyan",
|
||||||
|
),
|
||||||
|
),
|
||||||
|
abort=True,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
response = RegistryClient().publish_package(
|
||||||
|
owner, type_, archive_path, released_at, private, notify
|
||||||
|
)
|
||||||
|
if not do_not_pack:
|
||||||
os.remove(archive_path)
|
os.remove(archive_path)
|
||||||
click.secho(response.get("message"), fg="green")
|
click.secho(response.get("message"), fg="green")
|
||||||
|
|
||||||
@@ -122,9 +217,9 @@ def package_publish(package, owner, released_at, private, notify):
|
|||||||
def package_unpublish(package, type, undo): # pylint: disable=redefined-builtin
|
def package_unpublish(package, type, undo): # pylint: disable=redefined-builtin
|
||||||
spec = PackageSpec(package)
|
spec = PackageSpec(package)
|
||||||
response = RegistryClient().unpublish_package(
|
response = RegistryClient().unpublish_package(
|
||||||
|
owner=spec.owner or AccountClient().get_logged_username(),
|
||||||
type=type,
|
type=type,
|
||||||
name=spec.name,
|
name=spec.name,
|
||||||
owner=spec.owner,
|
|
||||||
version=str(spec.requirements),
|
version=str(spec.requirements),
|
||||||
undo=undo,
|
undo=undo,
|
||||||
)
|
)
|
||||||
|
@@ -12,13 +12,13 @@
|
|||||||
# See the License for the specific language governing permissions and
|
# See the License for the specific language governing permissions and
|
||||||
# limitations under the License.
|
# limitations under the License.
|
||||||
|
|
||||||
|
import json
|
||||||
import os
|
import os
|
||||||
|
|
||||||
import click
|
import click
|
||||||
|
|
||||||
from platformio.cache import cleanup_content_cache
|
from platformio.cache import cleanup_content_cache
|
||||||
from platformio.commands.boards import print_boards
|
from platformio.commands.boards import print_boards
|
||||||
from platformio.compat import dump_json_to_unicode
|
|
||||||
from platformio.package.manager.platform import PlatformPackageManager
|
from platformio.package.manager.platform import PlatformPackageManager
|
||||||
from platformio.package.meta import PackageItem, PackageSpec
|
from platformio.package.meta import PackageItem, PackageSpec
|
||||||
from platformio.package.version import get_original_version
|
from platformio.package.version import get_original_version
|
||||||
@@ -31,6 +31,301 @@ def cli():
|
|||||||
pass
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
@cli.command("search", short_help="Search for development platform")
|
||||||
|
@click.argument("query", required=False)
|
||||||
|
@click.option("--json-output", is_flag=True)
|
||||||
|
def platform_search(query, json_output):
|
||||||
|
platforms = []
|
||||||
|
for platform in _get_registry_platforms():
|
||||||
|
if query == "all":
|
||||||
|
query = ""
|
||||||
|
search_data = json.dumps(platform)
|
||||||
|
if query and query.lower() not in search_data.lower():
|
||||||
|
continue
|
||||||
|
platforms.append(
|
||||||
|
_get_registry_platform_data(
|
||||||
|
platform["name"], with_boards=False, expose_packages=False
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
if json_output:
|
||||||
|
click.echo(json.dumps(platforms))
|
||||||
|
else:
|
||||||
|
_print_platforms(platforms)
|
||||||
|
|
||||||
|
|
||||||
|
@cli.command("frameworks", short_help="List supported frameworks, SDKs")
|
||||||
|
@click.argument("query", required=False)
|
||||||
|
@click.option("--json-output", is_flag=True)
|
||||||
|
def platform_frameworks(query, json_output):
|
||||||
|
regclient = PlatformPackageManager().get_registry_client_instance()
|
||||||
|
frameworks = []
|
||||||
|
for framework in regclient.fetch_json_data(
|
||||||
|
"get", "/v2/frameworks", cache_valid="1d"
|
||||||
|
):
|
||||||
|
if query == "all":
|
||||||
|
query = ""
|
||||||
|
search_data = json.dumps(framework)
|
||||||
|
if query and query.lower() not in search_data.lower():
|
||||||
|
continue
|
||||||
|
framework["homepage"] = "https://platformio.org/frameworks/" + framework["name"]
|
||||||
|
framework["platforms"] = [
|
||||||
|
platform["name"]
|
||||||
|
for platform in _get_registry_platforms()
|
||||||
|
if framework["name"] in platform["frameworks"]
|
||||||
|
]
|
||||||
|
frameworks.append(framework)
|
||||||
|
|
||||||
|
frameworks = sorted(frameworks, key=lambda manifest: manifest["name"])
|
||||||
|
if json_output:
|
||||||
|
click.echo(json.dumps(frameworks))
|
||||||
|
else:
|
||||||
|
_print_platforms(frameworks)
|
||||||
|
|
||||||
|
|
||||||
|
@cli.command("list", short_help="List installed development platforms")
|
||||||
|
@click.option("--json-output", is_flag=True)
|
||||||
|
def platform_list(json_output):
|
||||||
|
platforms = []
|
||||||
|
pm = PlatformPackageManager()
|
||||||
|
for pkg in pm.get_installed():
|
||||||
|
platforms.append(
|
||||||
|
_get_installed_platform_data(pkg, with_boards=False, expose_packages=False)
|
||||||
|
)
|
||||||
|
|
||||||
|
platforms = sorted(platforms, key=lambda manifest: manifest["name"])
|
||||||
|
if json_output:
|
||||||
|
click.echo(json.dumps(platforms))
|
||||||
|
else:
|
||||||
|
_print_platforms(platforms)
|
||||||
|
|
||||||
|
|
||||||
|
@cli.command("show", short_help="Show details about development platform")
|
||||||
|
@click.argument("platform")
|
||||||
|
@click.option("--json-output", is_flag=True)
|
||||||
|
def platform_show(platform, json_output): # pylint: disable=too-many-branches
|
||||||
|
data = _get_platform_data(platform)
|
||||||
|
if not data:
|
||||||
|
raise UnknownPlatform(platform)
|
||||||
|
if json_output:
|
||||||
|
return click.echo(json.dumps(data))
|
||||||
|
|
||||||
|
dep = "{ownername}/{name}".format(**data) if "ownername" in data else data["name"]
|
||||||
|
click.echo(
|
||||||
|
"{dep} ~ {title}".format(dep=click.style(dep, fg="cyan"), title=data["title"])
|
||||||
|
)
|
||||||
|
click.echo("=" * (3 + len(dep + data["title"])))
|
||||||
|
click.echo(data["description"])
|
||||||
|
click.echo()
|
||||||
|
if "version" in data:
|
||||||
|
click.echo("Version: %s" % data["version"])
|
||||||
|
if data["homepage"]:
|
||||||
|
click.echo("Home: %s" % data["homepage"])
|
||||||
|
if data["repository"]:
|
||||||
|
click.echo("Repository: %s" % data["repository"])
|
||||||
|
if data["url"]:
|
||||||
|
click.echo("Vendor: %s" % data["url"])
|
||||||
|
if data["license"]:
|
||||||
|
click.echo("License: %s" % data["license"])
|
||||||
|
if data["frameworks"]:
|
||||||
|
click.echo("Frameworks: %s" % ", ".join(data["frameworks"]))
|
||||||
|
|
||||||
|
if not data["packages"]:
|
||||||
|
return None
|
||||||
|
|
||||||
|
if not isinstance(data["packages"][0], dict):
|
||||||
|
click.echo("Packages: %s" % ", ".join(data["packages"]))
|
||||||
|
else:
|
||||||
|
click.echo()
|
||||||
|
click.secho("Packages", bold=True)
|
||||||
|
click.echo("--------")
|
||||||
|
for item in data["packages"]:
|
||||||
|
click.echo()
|
||||||
|
click.echo("Package %s" % click.style(item["name"], fg="yellow"))
|
||||||
|
click.echo("-" * (8 + len(item["name"])))
|
||||||
|
if item["type"]:
|
||||||
|
click.echo("Type: %s" % item["type"])
|
||||||
|
click.echo("Requirements: %s" % item["requirements"])
|
||||||
|
click.echo(
|
||||||
|
"Installed: %s" % ("Yes" if item.get("version") else "No (optional)")
|
||||||
|
)
|
||||||
|
if "version" in item:
|
||||||
|
click.echo("Version: %s" % item["version"])
|
||||||
|
if "originalVersion" in item:
|
||||||
|
click.echo("Original version: %s" % item["originalVersion"])
|
||||||
|
if "description" in item:
|
||||||
|
click.echo("Description: %s" % item["description"])
|
||||||
|
|
||||||
|
if data["boards"]:
|
||||||
|
click.echo()
|
||||||
|
click.secho("Boards", bold=True)
|
||||||
|
click.echo("------")
|
||||||
|
print_boards(data["boards"])
|
||||||
|
|
||||||
|
return True
|
||||||
|
|
||||||
|
|
||||||
|
@cli.command("install", short_help="Install new development platform")
|
||||||
|
@click.argument("platforms", nargs=-1, required=True, metavar="[PLATFORM...]")
|
||||||
|
@click.option("--with-package", multiple=True)
|
||||||
|
@click.option("--without-package", multiple=True)
|
||||||
|
@click.option("--skip-default-package", is_flag=True)
|
||||||
|
@click.option("--with-all-packages", is_flag=True)
|
||||||
|
@click.option("-s", "--silent", is_flag=True, help="Suppress progress reporting")
|
||||||
|
@click.option(
|
||||||
|
"-f",
|
||||||
|
"--force",
|
||||||
|
is_flag=True,
|
||||||
|
help="Reinstall/redownload dev/platform and its packages if exist",
|
||||||
|
)
|
||||||
|
def platform_install( # pylint: disable=too-many-arguments
|
||||||
|
platforms,
|
||||||
|
with_package,
|
||||||
|
without_package,
|
||||||
|
skip_default_package,
|
||||||
|
with_all_packages,
|
||||||
|
silent,
|
||||||
|
force,
|
||||||
|
):
|
||||||
|
return _platform_install(
|
||||||
|
platforms,
|
||||||
|
with_package,
|
||||||
|
without_package,
|
||||||
|
skip_default_package,
|
||||||
|
with_all_packages,
|
||||||
|
silent,
|
||||||
|
force,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def _platform_install( # pylint: disable=too-many-arguments
|
||||||
|
platforms,
|
||||||
|
with_package=None,
|
||||||
|
without_package=None,
|
||||||
|
skip_default_package=False,
|
||||||
|
with_all_packages=False,
|
||||||
|
silent=False,
|
||||||
|
force=False,
|
||||||
|
):
|
||||||
|
pm = PlatformPackageManager()
|
||||||
|
for platform in platforms:
|
||||||
|
pkg = pm.install(
|
||||||
|
spec=platform,
|
||||||
|
with_packages=with_package or [],
|
||||||
|
without_packages=without_package or [],
|
||||||
|
skip_default_package=skip_default_package,
|
||||||
|
with_all_packages=with_all_packages,
|
||||||
|
silent=silent,
|
||||||
|
force=force,
|
||||||
|
)
|
||||||
|
if pkg and not silent:
|
||||||
|
click.secho(
|
||||||
|
"The platform '%s' has been successfully installed!\n"
|
||||||
|
"The rest of the packages will be installed later "
|
||||||
|
"depending on your build environment." % platform,
|
||||||
|
fg="green",
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@cli.command("uninstall", short_help="Uninstall development platform")
|
||||||
|
@click.argument("platforms", nargs=-1, required=True, metavar="[PLATFORM...]")
|
||||||
|
def platform_uninstall(platforms):
|
||||||
|
pm = PlatformPackageManager()
|
||||||
|
for platform in platforms:
|
||||||
|
if pm.uninstall(platform):
|
||||||
|
click.secho(
|
||||||
|
"The platform '%s' has been successfully removed!" % platform,
|
||||||
|
fg="green",
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@cli.command("update", short_help="Update installed development platforms")
|
||||||
|
@click.argument("platforms", nargs=-1, required=False, metavar="[PLATFORM...]")
|
||||||
|
@click.option(
|
||||||
|
"-p", "--only-packages", is_flag=True, help="Update only the platform packages"
|
||||||
|
)
|
||||||
|
@click.option(
|
||||||
|
"-c",
|
||||||
|
"--only-check",
|
||||||
|
is_flag=True,
|
||||||
|
help="DEPRECATED. Please use `--dry-run` instead",
|
||||||
|
)
|
||||||
|
@click.option(
|
||||||
|
"--dry-run", is_flag=True, help="Do not update, only check for the new versions"
|
||||||
|
)
|
||||||
|
@click.option("-s", "--silent", is_flag=True, help="Suppress progress reporting")
|
||||||
|
@click.option("--json-output", is_flag=True)
|
||||||
|
def platform_update( # pylint: disable=too-many-locals, too-many-arguments
|
||||||
|
platforms, only_packages, only_check, dry_run, silent, json_output
|
||||||
|
):
|
||||||
|
pm = PlatformPackageManager()
|
||||||
|
platforms = platforms or pm.get_installed()
|
||||||
|
only_check = dry_run or only_check
|
||||||
|
|
||||||
|
if only_check and json_output:
|
||||||
|
result = []
|
||||||
|
for platform in platforms:
|
||||||
|
spec = None
|
||||||
|
pkg = None
|
||||||
|
if isinstance(platform, PackageItem):
|
||||||
|
pkg = platform
|
||||||
|
else:
|
||||||
|
spec = PackageSpec(platform)
|
||||||
|
pkg = pm.get_package(spec)
|
||||||
|
if not pkg:
|
||||||
|
continue
|
||||||
|
outdated = pm.outdated(pkg, spec)
|
||||||
|
if (
|
||||||
|
not outdated.is_outdated(allow_incompatible=True)
|
||||||
|
and not PlatformFactory.new(pkg).are_outdated_packages()
|
||||||
|
):
|
||||||
|
continue
|
||||||
|
data = _get_installed_platform_data(
|
||||||
|
pkg, with_boards=False, expose_packages=False
|
||||||
|
)
|
||||||
|
if outdated.is_outdated(allow_incompatible=True):
|
||||||
|
data["versionLatest"] = (
|
||||||
|
str(outdated.latest) if outdated.latest else None
|
||||||
|
)
|
||||||
|
result.append(data)
|
||||||
|
return click.echo(json.dumps(result))
|
||||||
|
|
||||||
|
# cleanup cached board and platform lists
|
||||||
|
cleanup_content_cache("http")
|
||||||
|
|
||||||
|
for platform in platforms:
|
||||||
|
click.echo(
|
||||||
|
"Platform %s"
|
||||||
|
% click.style(
|
||||||
|
platform.metadata.name
|
||||||
|
if isinstance(platform, PackageItem)
|
||||||
|
else platform,
|
||||||
|
fg="cyan",
|
||||||
|
)
|
||||||
|
)
|
||||||
|
click.echo("--------")
|
||||||
|
pm.update(
|
||||||
|
platform, only_packages=only_packages, only_check=only_check, silent=silent
|
||||||
|
)
|
||||||
|
click.echo()
|
||||||
|
|
||||||
|
return True
|
||||||
|
|
||||||
|
|
||||||
|
#
|
||||||
|
# Helpers
|
||||||
|
#
|
||||||
|
|
||||||
|
|
||||||
|
def init_platform(name, skip_default_package=True, auto_install=True):
|
||||||
|
try:
|
||||||
|
return PlatformFactory.new(name)
|
||||||
|
except UnknownPlatform:
|
||||||
|
if auto_install:
|
||||||
|
_platform_install([name], skip_default_package=skip_default_package)
|
||||||
|
return PlatformFactory.new(name)
|
||||||
|
|
||||||
|
|
||||||
def _print_platforms(platforms):
|
def _print_platforms(platforms):
|
||||||
for platform in platforms:
|
for platform in platforms:
|
||||||
click.echo(
|
click.echo(
|
||||||
@@ -162,264 +457,3 @@ def _get_registry_platform_data( # pylint: disable=unused-argument
|
|||||||
]
|
]
|
||||||
|
|
||||||
return data
|
return data
|
||||||
|
|
||||||
|
|
||||||
@cli.command("search", short_help="Search for development platform")
|
|
||||||
@click.argument("query", required=False)
|
|
||||||
@click.option("--json-output", is_flag=True)
|
|
||||||
def platform_search(query, json_output):
|
|
||||||
platforms = []
|
|
||||||
for platform in _get_registry_platforms():
|
|
||||||
if query == "all":
|
|
||||||
query = ""
|
|
||||||
search_data = dump_json_to_unicode(platform)
|
|
||||||
if query and query.lower() not in search_data.lower():
|
|
||||||
continue
|
|
||||||
platforms.append(
|
|
||||||
_get_registry_platform_data(
|
|
||||||
platform["name"], with_boards=False, expose_packages=False
|
|
||||||
)
|
|
||||||
)
|
|
||||||
|
|
||||||
if json_output:
|
|
||||||
click.echo(dump_json_to_unicode(platforms))
|
|
||||||
else:
|
|
||||||
_print_platforms(platforms)
|
|
||||||
|
|
||||||
|
|
||||||
@cli.command("frameworks", short_help="List supported frameworks, SDKs")
|
|
||||||
@click.argument("query", required=False)
|
|
||||||
@click.option("--json-output", is_flag=True)
|
|
||||||
def platform_frameworks(query, json_output):
|
|
||||||
regclient = PlatformPackageManager().get_registry_client_instance()
|
|
||||||
frameworks = []
|
|
||||||
for framework in regclient.fetch_json_data(
|
|
||||||
"get", "/v2/frameworks", cache_valid="1d"
|
|
||||||
):
|
|
||||||
if query == "all":
|
|
||||||
query = ""
|
|
||||||
search_data = dump_json_to_unicode(framework)
|
|
||||||
if query and query.lower() not in search_data.lower():
|
|
||||||
continue
|
|
||||||
framework["homepage"] = "https://platformio.org/frameworks/" + framework["name"]
|
|
||||||
framework["platforms"] = [
|
|
||||||
platform["name"]
|
|
||||||
for platform in _get_registry_platforms()
|
|
||||||
if framework["name"] in platform["frameworks"]
|
|
||||||
]
|
|
||||||
frameworks.append(framework)
|
|
||||||
|
|
||||||
frameworks = sorted(frameworks, key=lambda manifest: manifest["name"])
|
|
||||||
if json_output:
|
|
||||||
click.echo(dump_json_to_unicode(frameworks))
|
|
||||||
else:
|
|
||||||
_print_platforms(frameworks)
|
|
||||||
|
|
||||||
|
|
||||||
@cli.command("list", short_help="List installed development platforms")
|
|
||||||
@click.option("--json-output", is_flag=True)
|
|
||||||
def platform_list(json_output):
|
|
||||||
platforms = []
|
|
||||||
pm = PlatformPackageManager()
|
|
||||||
for pkg in pm.get_installed():
|
|
||||||
platforms.append(
|
|
||||||
_get_installed_platform_data(pkg, with_boards=False, expose_packages=False)
|
|
||||||
)
|
|
||||||
|
|
||||||
platforms = sorted(platforms, key=lambda manifest: manifest["name"])
|
|
||||||
if json_output:
|
|
||||||
click.echo(dump_json_to_unicode(platforms))
|
|
||||||
else:
|
|
||||||
_print_platforms(platforms)
|
|
||||||
|
|
||||||
|
|
||||||
@cli.command("show", short_help="Show details about development platform")
|
|
||||||
@click.argument("platform")
|
|
||||||
@click.option("--json-output", is_flag=True)
|
|
||||||
def platform_show(platform, json_output): # pylint: disable=too-many-branches
|
|
||||||
data = _get_platform_data(platform)
|
|
||||||
if not data:
|
|
||||||
raise UnknownPlatform(platform)
|
|
||||||
if json_output:
|
|
||||||
return click.echo(dump_json_to_unicode(data))
|
|
||||||
|
|
||||||
dep = "{ownername}/{name}".format(**data) if "ownername" in data else data["name"]
|
|
||||||
click.echo(
|
|
||||||
"{dep} ~ {title}".format(dep=click.style(dep, fg="cyan"), title=data["title"])
|
|
||||||
)
|
|
||||||
click.echo("=" * (3 + len(dep + data["title"])))
|
|
||||||
click.echo(data["description"])
|
|
||||||
click.echo()
|
|
||||||
if "version" in data:
|
|
||||||
click.echo("Version: %s" % data["version"])
|
|
||||||
if data["homepage"]:
|
|
||||||
click.echo("Home: %s" % data["homepage"])
|
|
||||||
if data["repository"]:
|
|
||||||
click.echo("Repository: %s" % data["repository"])
|
|
||||||
if data["url"]:
|
|
||||||
click.echo("Vendor: %s" % data["url"])
|
|
||||||
if data["license"]:
|
|
||||||
click.echo("License: %s" % data["license"])
|
|
||||||
if data["frameworks"]:
|
|
||||||
click.echo("Frameworks: %s" % ", ".join(data["frameworks"]))
|
|
||||||
|
|
||||||
if not data["packages"]:
|
|
||||||
return None
|
|
||||||
|
|
||||||
if not isinstance(data["packages"][0], dict):
|
|
||||||
click.echo("Packages: %s" % ", ".join(data["packages"]))
|
|
||||||
else:
|
|
||||||
click.echo()
|
|
||||||
click.secho("Packages", bold=True)
|
|
||||||
click.echo("--------")
|
|
||||||
for item in data["packages"]:
|
|
||||||
click.echo()
|
|
||||||
click.echo("Package %s" % click.style(item["name"], fg="yellow"))
|
|
||||||
click.echo("-" * (8 + len(item["name"])))
|
|
||||||
if item["type"]:
|
|
||||||
click.echo("Type: %s" % item["type"])
|
|
||||||
click.echo("Requirements: %s" % item["requirements"])
|
|
||||||
click.echo(
|
|
||||||
"Installed: %s" % ("Yes" if item.get("version") else "No (optional)")
|
|
||||||
)
|
|
||||||
if "version" in item:
|
|
||||||
click.echo("Version: %s" % item["version"])
|
|
||||||
if "originalVersion" in item:
|
|
||||||
click.echo("Original version: %s" % item["originalVersion"])
|
|
||||||
if "description" in item:
|
|
||||||
click.echo("Description: %s" % item["description"])
|
|
||||||
|
|
||||||
if data["boards"]:
|
|
||||||
click.echo()
|
|
||||||
click.secho("Boards", bold=True)
|
|
||||||
click.echo("------")
|
|
||||||
print_boards(data["boards"])
|
|
||||||
|
|
||||||
return True
|
|
||||||
|
|
||||||
|
|
||||||
@cli.command("install", short_help="Install new development platform")
|
|
||||||
@click.argument("platforms", nargs=-1, required=True, metavar="[PLATFORM...]")
|
|
||||||
@click.option("--with-package", multiple=True)
|
|
||||||
@click.option("--without-package", multiple=True)
|
|
||||||
@click.option("--skip-default-package", is_flag=True)
|
|
||||||
@click.option("--with-all-packages", is_flag=True)
|
|
||||||
@click.option("-s", "--silent", is_flag=True, help="Suppress progress reporting")
|
|
||||||
@click.option(
|
|
||||||
"-f",
|
|
||||||
"--force",
|
|
||||||
is_flag=True,
|
|
||||||
help="Reinstall/redownload dev/platform and its packages if exist",
|
|
||||||
)
|
|
||||||
def platform_install( # pylint: disable=too-many-arguments
|
|
||||||
platforms,
|
|
||||||
with_package,
|
|
||||||
without_package,
|
|
||||||
skip_default_package,
|
|
||||||
with_all_packages,
|
|
||||||
silent,
|
|
||||||
force,
|
|
||||||
):
|
|
||||||
pm = PlatformPackageManager()
|
|
||||||
for platform in platforms:
|
|
||||||
pkg = pm.install(
|
|
||||||
spec=platform,
|
|
||||||
with_packages=with_package,
|
|
||||||
without_packages=without_package,
|
|
||||||
skip_default_package=skip_default_package,
|
|
||||||
with_all_packages=with_all_packages,
|
|
||||||
silent=silent,
|
|
||||||
force=force,
|
|
||||||
)
|
|
||||||
if pkg and not silent:
|
|
||||||
click.secho(
|
|
||||||
"The platform '%s' has been successfully installed!\n"
|
|
||||||
"The rest of the packages will be installed later "
|
|
||||||
"depending on your build environment." % platform,
|
|
||||||
fg="green",
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
@cli.command("uninstall", short_help="Uninstall development platform")
|
|
||||||
@click.argument("platforms", nargs=-1, required=True, metavar="[PLATFORM...]")
|
|
||||||
def platform_uninstall(platforms):
|
|
||||||
pm = PlatformPackageManager()
|
|
||||||
for platform in platforms:
|
|
||||||
if pm.uninstall(platform):
|
|
||||||
click.secho(
|
|
||||||
"The platform '%s' has been successfully removed!" % platform,
|
|
||||||
fg="green",
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
@cli.command("update", short_help="Update installed development platforms")
|
|
||||||
@click.argument("platforms", nargs=-1, required=False, metavar="[PLATFORM...]")
|
|
||||||
@click.option(
|
|
||||||
"-p", "--only-packages", is_flag=True, help="Update only the platform packages"
|
|
||||||
)
|
|
||||||
@click.option(
|
|
||||||
"-c",
|
|
||||||
"--only-check",
|
|
||||||
is_flag=True,
|
|
||||||
help="DEPRECATED. Please use `--dry-run` instead",
|
|
||||||
)
|
|
||||||
@click.option(
|
|
||||||
"--dry-run", is_flag=True, help="Do not update, only check for the new versions"
|
|
||||||
)
|
|
||||||
@click.option("-s", "--silent", is_flag=True, help="Suppress progress reporting")
|
|
||||||
@click.option("--json-output", is_flag=True)
|
|
||||||
def platform_update( # pylint: disable=too-many-locals, too-many-arguments
|
|
||||||
platforms, only_packages, only_check, dry_run, silent, json_output
|
|
||||||
):
|
|
||||||
pm = PlatformPackageManager()
|
|
||||||
platforms = platforms or pm.get_installed()
|
|
||||||
only_check = dry_run or only_check
|
|
||||||
|
|
||||||
if only_check and json_output:
|
|
||||||
result = []
|
|
||||||
for platform in platforms:
|
|
||||||
spec = None
|
|
||||||
pkg = None
|
|
||||||
if isinstance(platform, PackageItem):
|
|
||||||
pkg = platform
|
|
||||||
else:
|
|
||||||
spec = PackageSpec(platform)
|
|
||||||
pkg = pm.get_package(spec)
|
|
||||||
if not pkg:
|
|
||||||
continue
|
|
||||||
outdated = pm.outdated(pkg, spec)
|
|
||||||
if (
|
|
||||||
not outdated.is_outdated(allow_incompatible=True)
|
|
||||||
and not PlatformFactory.new(pkg).are_outdated_packages()
|
|
||||||
):
|
|
||||||
continue
|
|
||||||
data = _get_installed_platform_data(
|
|
||||||
pkg, with_boards=False, expose_packages=False
|
|
||||||
)
|
|
||||||
if outdated.is_outdated(allow_incompatible=True):
|
|
||||||
data["versionLatest"] = (
|
|
||||||
str(outdated.latest) if outdated.latest else None
|
|
||||||
)
|
|
||||||
result.append(data)
|
|
||||||
return click.echo(dump_json_to_unicode(result))
|
|
||||||
|
|
||||||
# cleanup cached board and platform lists
|
|
||||||
cleanup_content_cache("http")
|
|
||||||
|
|
||||||
for platform in platforms:
|
|
||||||
click.echo(
|
|
||||||
"Platform %s"
|
|
||||||
% click.style(
|
|
||||||
platform.metadata.name
|
|
||||||
if isinstance(platform, PackageItem)
|
|
||||||
else platform,
|
|
||||||
fg="cyan",
|
|
||||||
)
|
|
||||||
)
|
|
||||||
click.echo("--------")
|
|
||||||
pm.update(
|
|
||||||
platform, only_packages=only_packages, only_check=only_check, silent=silent
|
|
||||||
)
|
|
||||||
click.echo()
|
|
||||||
|
|
||||||
return True
|
|
||||||
|
@@ -240,7 +240,7 @@ def init_base_project(project_dir):
|
|||||||
|
|
||||||
|
|
||||||
def init_include_readme(include_dir):
|
def init_include_readme(include_dir):
|
||||||
with open(os.path.join(include_dir, "README"), "w") as fp:
|
with open(os.path.join(include_dir, "README"), mode="w", encoding="utf8") as fp:
|
||||||
fp.write(
|
fp.write(
|
||||||
"""
|
"""
|
||||||
This directory is intended for project header files.
|
This directory is intended for project header files.
|
||||||
@@ -286,7 +286,7 @@ https://gcc.gnu.org/onlinedocs/cpp/Header-Files.html
|
|||||||
|
|
||||||
|
|
||||||
def init_lib_readme(lib_dir):
|
def init_lib_readme(lib_dir):
|
||||||
with open(os.path.join(lib_dir, "README"), "w") as fp:
|
with open(os.path.join(lib_dir, "README"), mode="w", encoding="utf8") as fp:
|
||||||
fp.write(
|
fp.write(
|
||||||
"""
|
"""
|
||||||
This directory is intended for project specific (private) libraries.
|
This directory is intended for project specific (private) libraries.
|
||||||
@@ -339,7 +339,7 @@ More information about PlatformIO Library Dependency Finder
|
|||||||
|
|
||||||
|
|
||||||
def init_test_readme(test_dir):
|
def init_test_readme(test_dir):
|
||||||
with open(os.path.join(test_dir, "README"), "w") as fp:
|
with open(os.path.join(test_dir, "README"), mode="w", encoding="utf8") as fp:
|
||||||
fp.write(
|
fp.write(
|
||||||
"""
|
"""
|
||||||
This directory is intended for PlatformIO Unit Testing and project tests.
|
This directory is intended for PlatformIO Unit Testing and project tests.
|
||||||
@@ -360,7 +360,7 @@ def init_cvs_ignore(project_dir):
|
|||||||
conf_path = os.path.join(project_dir, ".gitignore")
|
conf_path = os.path.join(project_dir, ".gitignore")
|
||||||
if os.path.isfile(conf_path):
|
if os.path.isfile(conf_path):
|
||||||
return
|
return
|
||||||
with open(conf_path, "w") as fp:
|
with open(conf_path, mode="w", encoding="utf8") as fp:
|
||||||
fp.write(".pio\n")
|
fp.write(".pio\n")
|
||||||
|
|
||||||
|
|
||||||
|
@@ -173,7 +173,11 @@ class DeviceMonitorClient( # pylint: disable=too-many-instance-attributes
|
|||||||
address = port.getHost()
|
address = port.getHost()
|
||||||
self.log.debug("Serial Bridge is started on {address!r}", address=address)
|
self.log.debug("Serial Bridge is started on {address!r}", address=address)
|
||||||
if "sock" in self.cmd_options:
|
if "sock" in self.cmd_options:
|
||||||
with open(os.path.join(self.cmd_options["sock"], "sock"), "w") as fp:
|
with open(
|
||||||
|
os.path.join(self.cmd_options["sock"], "sock"),
|
||||||
|
mode="w",
|
||||||
|
encoding="utf8",
|
||||||
|
) as fp:
|
||||||
fp.write("socket://localhost:%d" % address.port)
|
fp.write("socket://localhost:%d" % address.port)
|
||||||
|
|
||||||
def client_terminal_stopped(self):
|
def client_terminal_stopped(self):
|
||||||
|
@@ -28,7 +28,6 @@ from platformio.commands.device import helpers as device_helpers
|
|||||||
from platformio.commands.device.command import device_monitor as cmd_device_monitor
|
from platformio.commands.device.command import device_monitor as cmd_device_monitor
|
||||||
from platformio.commands.run.command import cli as cmd_run
|
from platformio.commands.run.command import cli as cmd_run
|
||||||
from platformio.commands.test.command import cli as cmd_test
|
from platformio.commands.test.command import cli as cmd_test
|
||||||
from platformio.compat import ensure_python3
|
|
||||||
from platformio.package.manager.core import inject_contrib_pysite
|
from platformio.package.manager.core import inject_contrib_pysite
|
||||||
from platformio.project.exception import NotPlatformIOProjectError
|
from platformio.project.exception import NotPlatformIOProjectError
|
||||||
|
|
||||||
@@ -37,7 +36,6 @@ from platformio.project.exception import NotPlatformIOProjectError
|
|||||||
@click.option("-a", "--agent", multiple=True)
|
@click.option("-a", "--agent", multiple=True)
|
||||||
@click.pass_context
|
@click.pass_context
|
||||||
def cli(ctx, agent):
|
def cli(ctx, agent):
|
||||||
assert ensure_python3()
|
|
||||||
ctx.obj = agent
|
ctx.obj = agent
|
||||||
inject_contrib_pysite(verify_openssl=True)
|
inject_contrib_pysite(verify_openssl=True)
|
||||||
|
|
||||||
@@ -352,7 +350,7 @@ def device_monitor(ctx, agents, **kwargs):
|
|||||||
sleep(0.1)
|
sleep(0.1)
|
||||||
if not t.is_alive():
|
if not t.is_alive():
|
||||||
return
|
return
|
||||||
with open(sock_file) as fp:
|
with open(sock_file, encoding="utf8") as fp:
|
||||||
kwargs["port"] = fp.read()
|
kwargs["port"] = fp.read()
|
||||||
ctx.invoke(cmd_device_monitor, **kwargs)
|
ctx.invoke(cmd_device_monitor, **kwargs)
|
||||||
t.join(2)
|
t.join(2)
|
||||||
|
@@ -14,6 +14,7 @@
|
|||||||
|
|
||||||
import operator
|
import operator
|
||||||
import os
|
import os
|
||||||
|
import shutil
|
||||||
from multiprocessing import cpu_count
|
from multiprocessing import cpu_count
|
||||||
from time import time
|
from time import time
|
||||||
|
|
||||||
@@ -200,7 +201,7 @@ def print_processing_header(env, config, verbose=False):
|
|||||||
"Processing %s (%s)"
|
"Processing %s (%s)"
|
||||||
% (click.style(env, fg="cyan", bold=True), "; ".join(env_dump))
|
% (click.style(env, fg="cyan", bold=True), "; ".join(env_dump))
|
||||||
)
|
)
|
||||||
terminal_width, _ = click.get_terminal_size()
|
terminal_width, _ = shutil.get_terminal_size()
|
||||||
click.secho("-" * terminal_width, bold=True)
|
click.secho("-" * terminal_width, bold=True)
|
||||||
|
|
||||||
|
|
||||||
|
@@ -54,11 +54,11 @@ def clean_build_dir(build_dir, config):
|
|||||||
if isdir(build_dir):
|
if isdir(build_dir):
|
||||||
# check project structure
|
# check project structure
|
||||||
if isfile(checksum_file):
|
if isfile(checksum_file):
|
||||||
with open(checksum_file) as fp:
|
with open(checksum_file, encoding="utf8") as fp:
|
||||||
if fp.read() == checksum:
|
if fp.read() == checksum:
|
||||||
return
|
return
|
||||||
fs.rmtree(build_dir)
|
fs.rmtree(build_dir)
|
||||||
|
|
||||||
makedirs(build_dir)
|
makedirs(build_dir)
|
||||||
with open(checksum_file, "w") as fp:
|
with open(checksum_file, mode="w", encoding="utf8") as fp:
|
||||||
fp.write(checksum)
|
fp.write(checksum)
|
||||||
|
@@ -12,10 +12,8 @@
|
|||||||
# See the License for the specific language governing permissions and
|
# See the License for the specific language governing permissions and
|
||||||
# limitations under the License.
|
# limitations under the License.
|
||||||
|
|
||||||
from platformio.commands.platform import platform_install as cmd_platform_install
|
from platformio.commands.platform import init_platform
|
||||||
from platformio.commands.test.processor import CTX_META_TEST_RUNNING_NAME
|
from platformio.commands.test.processor import CTX_META_TEST_RUNNING_NAME
|
||||||
from platformio.platform.exception import UnknownPlatform
|
|
||||||
from platformio.platform.factory import PlatformFactory
|
|
||||||
from platformio.project.exception import UndefinedEnvPlatformError
|
from platformio.project.exception import UndefinedEnvPlatformError
|
||||||
|
|
||||||
# pylint: disable=too-many-instance-attributes
|
# pylint: disable=too-many-instance-attributes
|
||||||
@@ -66,15 +64,7 @@ class EnvironmentProcessor(object):
|
|||||||
if "monitor" in build_targets:
|
if "monitor" in build_targets:
|
||||||
build_targets.remove("monitor")
|
build_targets.remove("monitor")
|
||||||
|
|
||||||
try:
|
result = init_platform(self.options["platform"]).run(
|
||||||
p = PlatformFactory.new(self.options["platform"])
|
build_vars, build_targets, self.silent, self.verbose, self.jobs
|
||||||
except UnknownPlatform:
|
|
||||||
self.cmd_ctx.invoke(
|
|
||||||
cmd_platform_install,
|
|
||||||
platforms=[self.options["platform"]],
|
|
||||||
skip_default_package=True,
|
|
||||||
)
|
)
|
||||||
p = PlatformFactory.new(self.options["platform"])
|
|
||||||
|
|
||||||
result = p.run(build_vars, build_targets, self.silent, self.verbose, self.jobs)
|
|
||||||
return result["returncode"] == 0
|
return result["returncode"] == 0
|
||||||
|
@@ -69,7 +69,7 @@ def system_info(json_output):
|
|||||||
data["platformio_exe"] = {
|
data["platformio_exe"] = {
|
||||||
"title": "PlatformIO Core Executable",
|
"title": "PlatformIO Core Executable",
|
||||||
"value": proc.where_is_program(
|
"value": proc.where_is_program(
|
||||||
"platformio.exe" if proc.WINDOWS else "platformio"
|
"platformio.exe" if compat.IS_WINDOWS else "platformio"
|
||||||
),
|
),
|
||||||
}
|
}
|
||||||
data["python_exe"] = {
|
data["python_exe"] = {
|
||||||
|
@@ -42,7 +42,7 @@ def is_completion_code_installed(shell, path):
|
|||||||
|
|
||||||
import click_completion # pylint: disable=import-error,import-outside-toplevel
|
import click_completion # pylint: disable=import-error,import-outside-toplevel
|
||||||
|
|
||||||
with open(path) as fp:
|
with open(path, encoding="utf8") as fp:
|
||||||
return click_completion.get_code(shell=shell) in fp.read()
|
return click_completion.get_code(shell=shell) in fp.read()
|
||||||
|
|
||||||
|
|
||||||
@@ -64,7 +64,7 @@ def uninstall_completion_code(shell, path):
|
|||||||
|
|
||||||
import click_completion # pylint: disable=import-error,import-outside-toplevel
|
import click_completion # pylint: disable=import-error,import-outside-toplevel
|
||||||
|
|
||||||
with open(path, "r+") as fp:
|
with open(path, "r+", encoding="utf8") as fp:
|
||||||
contents = fp.read()
|
contents = fp.read()
|
||||||
fp.seek(0)
|
fp.seek(0)
|
||||||
fp.truncate()
|
fp.truncate()
|
||||||
|
@@ -14,18 +14,19 @@
|
|||||||
|
|
||||||
# pylint: disable=too-many-arguments, too-many-locals, too-many-branches
|
# pylint: disable=too-many-arguments, too-many-locals, too-many-branches
|
||||||
|
|
||||||
from fnmatch import fnmatch
|
import fnmatch
|
||||||
from os import getcwd, listdir
|
import os
|
||||||
from os.path import isdir, join
|
import shutil
|
||||||
from time import time
|
from time import time
|
||||||
|
|
||||||
import click
|
import click
|
||||||
from tabulate import tabulate
|
from tabulate import tabulate
|
||||||
|
|
||||||
from platformio import app, exception, fs, util
|
from platformio import app, exception, fs, util
|
||||||
|
from platformio.commands.platform import init_platform
|
||||||
from platformio.commands.test.embedded import EmbeddedTestProcessor
|
from platformio.commands.test.embedded import EmbeddedTestProcessor
|
||||||
|
from platformio.commands.test.helpers import get_test_names
|
||||||
from platformio.commands.test.native import NativeTestProcessor
|
from platformio.commands.test.native import NativeTestProcessor
|
||||||
from platformio.platform.factory import PlatformFactory
|
|
||||||
from platformio.project.config import ProjectConfig
|
from platformio.project.config import ProjectConfig
|
||||||
|
|
||||||
|
|
||||||
@@ -50,7 +51,7 @@ from platformio.project.config import ProjectConfig
|
|||||||
@click.option(
|
@click.option(
|
||||||
"-d",
|
"-d",
|
||||||
"--project-dir",
|
"--project-dir",
|
||||||
default=getcwd,
|
default=os.getcwd,
|
||||||
type=click.Path(
|
type=click.Path(
|
||||||
exists=True, file_okay=False, dir_okay=True, writable=True, resolve_path=True
|
exists=True, file_okay=False, dir_okay=True, writable=True, resolve_path=True
|
||||||
),
|
),
|
||||||
@@ -102,11 +103,7 @@ def cli( # pylint: disable=redefined-builtin
|
|||||||
with fs.cd(project_dir):
|
with fs.cd(project_dir):
|
||||||
config = ProjectConfig.get_instance(project_conf)
|
config = ProjectConfig.get_instance(project_conf)
|
||||||
config.validate(envs=environment)
|
config.validate(envs=environment)
|
||||||
|
test_names = get_test_names(config)
|
||||||
test_dir = config.get_optional_dir("test")
|
|
||||||
if not isdir(test_dir):
|
|
||||||
raise exception.TestDirNotExists(test_dir)
|
|
||||||
test_names = get_test_names(test_dir)
|
|
||||||
|
|
||||||
if not verbose:
|
if not verbose:
|
||||||
click.echo("Verbose mode can be enabled via `-v, --verbose` option")
|
click.echo("Verbose mode can be enabled via `-v, --verbose` option")
|
||||||
@@ -129,9 +126,11 @@ def cli( # pylint: disable=redefined-builtin
|
|||||||
not environment and default_envs and envname not in default_envs,
|
not environment and default_envs and envname not in default_envs,
|
||||||
testname != "*"
|
testname != "*"
|
||||||
and patterns["filter"]
|
and patterns["filter"]
|
||||||
and not any(fnmatch(testname, p) for p in patterns["filter"]),
|
and not any(
|
||||||
|
fnmatch.fnmatch(testname, p) for p in patterns["filter"]
|
||||||
|
),
|
||||||
testname != "*"
|
testname != "*"
|
||||||
and any(fnmatch(testname, p) for p in patterns["ignore"]),
|
and any(fnmatch.fnmatch(testname, p) for p in patterns["ignore"]),
|
||||||
]
|
]
|
||||||
if any(skip_conditions):
|
if any(skip_conditions):
|
||||||
results.append({"env": envname, "test": testname})
|
results.append({"env": envname, "test": testname})
|
||||||
@@ -142,7 +141,8 @@ def cli( # pylint: disable=redefined-builtin
|
|||||||
|
|
||||||
cls = (
|
cls = (
|
||||||
EmbeddedTestProcessor
|
EmbeddedTestProcessor
|
||||||
if is_embedded_platform(config.get(section, "platform"))
|
if config.get(section, "platform")
|
||||||
|
and init_platform(config.get(section, "platform")).is_embedded()
|
||||||
else NativeTestProcessor
|
else NativeTestProcessor
|
||||||
)
|
)
|
||||||
tp = cls(
|
tp = cls(
|
||||||
@@ -185,22 +185,6 @@ def cli( # pylint: disable=redefined-builtin
|
|||||||
raise exception.ReturnErrorCode(1)
|
raise exception.ReturnErrorCode(1)
|
||||||
|
|
||||||
|
|
||||||
def get_test_names(test_dir):
|
|
||||||
names = []
|
|
||||||
for item in sorted(listdir(test_dir)):
|
|
||||||
if isdir(join(test_dir, item)):
|
|
||||||
names.append(item)
|
|
||||||
if not names:
|
|
||||||
names = ["*"]
|
|
||||||
return names
|
|
||||||
|
|
||||||
|
|
||||||
def is_embedded_platform(name):
|
|
||||||
if not name:
|
|
||||||
return False
|
|
||||||
return PlatformFactory.new(name).is_embedded()
|
|
||||||
|
|
||||||
|
|
||||||
def print_processing_header(test, env):
|
def print_processing_header(test, env):
|
||||||
click.echo(
|
click.echo(
|
||||||
"Processing %s in %s environment"
|
"Processing %s in %s environment"
|
||||||
@@ -209,7 +193,7 @@ def print_processing_header(test, env):
|
|||||||
click.style(env, fg="cyan", bold=True),
|
click.style(env, fg="cyan", bold=True),
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
terminal_width, _ = click.get_terminal_size()
|
terminal_width, _ = shutil.get_terminal_size()
|
||||||
click.secho("-" * terminal_width, bold=True)
|
click.secho("-" * terminal_width, bold=True)
|
||||||
|
|
||||||
|
|
||||||
|
@@ -117,13 +117,10 @@ class EmbeddedTestProcessor(TestProcessorBase):
|
|||||||
port = item["port"]
|
port = item["port"]
|
||||||
for hwid in board_hwids:
|
for hwid in board_hwids:
|
||||||
hwid_str = ("%s:%s" % (hwid[0], hwid[1])).replace("0x", "")
|
hwid_str = ("%s:%s" % (hwid[0], hwid[1])).replace("0x", "")
|
||||||
if hwid_str in item["hwid"]:
|
if hwid_str in item["hwid"] and self.is_serial_port_ready(port):
|
||||||
return port
|
return port
|
||||||
|
|
||||||
# check if port is already configured
|
if port and not self.is_serial_port_ready(port):
|
||||||
try:
|
|
||||||
serial.Serial(port, timeout=self.SERIAL_TIMEOUT).close()
|
|
||||||
except serial.SerialException:
|
|
||||||
port = None
|
port = None
|
||||||
|
|
||||||
if not port:
|
if not port:
|
||||||
@@ -136,3 +133,18 @@ class EmbeddedTestProcessor(TestProcessorBase):
|
|||||||
"global `--test-port` option."
|
"global `--test-port` option."
|
||||||
)
|
)
|
||||||
return port
|
return port
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def is_serial_port_ready(port, timeout=3):
|
||||||
|
if not port:
|
||||||
|
return False
|
||||||
|
elapsed = 0
|
||||||
|
while elapsed < timeout:
|
||||||
|
try:
|
||||||
|
serial.Serial(port, timeout=1).close()
|
||||||
|
return True
|
||||||
|
except: # pylint: disable=bare-except
|
||||||
|
pass
|
||||||
|
sleep(1)
|
||||||
|
elapsed += 1
|
||||||
|
return False
|
||||||
|
30
platformio/commands/test/helpers.py
Normal file
30
platformio/commands/test/helpers.py
Normal file
@@ -0,0 +1,30 @@
|
|||||||
|
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
|
||||||
|
#
|
||||||
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
# you may not use this file except in compliance with the License.
|
||||||
|
# You may obtain a copy of the License at
|
||||||
|
#
|
||||||
|
# http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
#
|
||||||
|
# Unless required by applicable law or agreed to in writing, software
|
||||||
|
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
# See the License for the specific language governing permissions and
|
||||||
|
# limitations under the License.
|
||||||
|
|
||||||
|
import os
|
||||||
|
|
||||||
|
from platformio import exception
|
||||||
|
|
||||||
|
|
||||||
|
def get_test_names(config):
|
||||||
|
test_dir = config.get_optional_dir("test")
|
||||||
|
if not os.path.isdir(test_dir):
|
||||||
|
raise exception.TestDirNotExists(test_dir)
|
||||||
|
names = []
|
||||||
|
for item in sorted(os.listdir(test_dir)):
|
||||||
|
if os.path.isdir(os.path.join(test_dir, item)):
|
||||||
|
names.append(item)
|
||||||
|
if not names:
|
||||||
|
names = ["*"]
|
||||||
|
return names
|
@@ -139,9 +139,9 @@ class TestProcessorBase(object):
|
|||||||
cmd_run,
|
cmd_run,
|
||||||
project_dir=self.options["project_dir"],
|
project_dir=self.options["project_dir"],
|
||||||
project_conf=self.options["project_config"].path,
|
project_conf=self.options["project_config"].path,
|
||||||
upload_port=self.options["upload_port"],
|
upload_port=self.options.get("upload_port"),
|
||||||
verbose=self.options["verbose"],
|
verbose=self.options["verbose"],
|
||||||
silent=self.options["silent"],
|
silent=self.options.get("silent"),
|
||||||
environment=[self.env_name],
|
environment=[self.env_name],
|
||||||
disable_auto_clean="nobuild" in target,
|
disable_auto_clean="nobuild" in target,
|
||||||
target=target,
|
target=target,
|
||||||
@@ -224,7 +224,7 @@ class TestProcessorBase(object):
|
|||||||
test_dir,
|
test_dir,
|
||||||
"%s.%s" % (tmp_file_prefix, transport_options.get("language", "c")),
|
"%s.%s" % (tmp_file_prefix, transport_options.get("language", "c")),
|
||||||
)
|
)
|
||||||
with open(tmp_file, "w") as fp:
|
with open(tmp_file, mode="w", encoding="utf8") as fp:
|
||||||
fp.write(data)
|
fp.write(data)
|
||||||
|
|
||||||
atexit.register(delete_tmptest_files, test_dir)
|
atexit.register(delete_tmptest_files, test_dir)
|
||||||
|
@@ -21,7 +21,7 @@ import click
|
|||||||
|
|
||||||
from platformio import VERSION, __version__, app, exception
|
from platformio import VERSION, __version__, app, exception
|
||||||
from platformio.clients.http import fetch_remote_content
|
from platformio.clients.http import fetch_remote_content
|
||||||
from platformio.compat import WINDOWS
|
from platformio.compat import IS_WINDOWS
|
||||||
from platformio.proc import exec_command, get_pythonexe_path
|
from platformio.proc import exec_command, get_pythonexe_path
|
||||||
from platformio.project.helpers import get_project_cache_dir
|
from platformio.project.helpers import get_project_cache_dir
|
||||||
|
|
||||||
@@ -40,7 +40,7 @@ def cli(dev):
|
|||||||
|
|
||||||
to_develop = dev or not all(c.isdigit() for c in __version__ if c != ".")
|
to_develop = dev or not all(c.isdigit() for c in __version__ if c != ".")
|
||||||
cmds = (
|
cmds = (
|
||||||
["pip", "install", "--upgrade", get_pip_package(to_develop)],
|
["pip", "install", "--upgrade", download_dist_package(to_develop)],
|
||||||
["platformio", "--version"],
|
["platformio", "--version"],
|
||||||
)
|
)
|
||||||
|
|
||||||
@@ -73,7 +73,7 @@ def cli(dev):
|
|||||||
if not r:
|
if not r:
|
||||||
raise exception.UpgradeError("\n".join([str(cmd), str(e)]))
|
raise exception.UpgradeError("\n".join([str(cmd), str(e)]))
|
||||||
permission_errors = ("permission denied", "not permitted")
|
permission_errors = ("permission denied", "not permitted")
|
||||||
if any(m in r["err"].lower() for m in permission_errors) and not WINDOWS:
|
if any(m in r["err"].lower() for m in permission_errors) and not IS_WINDOWS:
|
||||||
click.secho(
|
click.secho(
|
||||||
"""
|
"""
|
||||||
-----------------
|
-----------------
|
||||||
@@ -94,7 +94,7 @@ WARNING! Don't use `sudo` for the rest PlatformIO commands.
|
|||||||
return True
|
return True
|
||||||
|
|
||||||
|
|
||||||
def get_pip_package(to_develop):
|
def download_dist_package(to_develop):
|
||||||
if not to_develop:
|
if not to_develop:
|
||||||
return "platformio"
|
return "platformio"
|
||||||
dl_url = "https://github.com/platformio/platformio-core/archive/develop.zip"
|
dl_url = "https://github.com/platformio/platformio-core/archive/develop.zip"
|
||||||
@@ -103,7 +103,7 @@ def get_pip_package(to_develop):
|
|||||||
os.makedirs(cache_dir)
|
os.makedirs(cache_dir)
|
||||||
pkg_name = os.path.join(cache_dir, "piocoredevelop.zip")
|
pkg_name = os.path.join(cache_dir, "piocoredevelop.zip")
|
||||||
try:
|
try:
|
||||||
with open(pkg_name, "w") as fp:
|
with open(pkg_name, "wb") as fp:
|
||||||
r = exec_command(
|
r = exec_command(
|
||||||
["curl", "-fsSL", dl_url], stdout=fp, universal_newlines=True
|
["curl", "-fsSL", dl_url], stdout=fp, universal_newlines=True
|
||||||
)
|
)
|
||||||
|
@@ -12,23 +12,57 @@
|
|||||||
# See the License for the specific language governing permissions and
|
# See the License for the specific language governing permissions and
|
||||||
# limitations under the License.
|
# limitations under the License.
|
||||||
|
|
||||||
# pylint: disable=unused-import, no-name-in-module, import-error,
|
# pylint: disable=unused-import,no-name-in-module
|
||||||
# pylint: disable=no-member, undefined-variable, unexpected-keyword-arg
|
|
||||||
|
|
||||||
import glob
|
|
||||||
import inspect
|
import inspect
|
||||||
import json
|
|
||||||
import locale
|
import locale
|
||||||
import os
|
|
||||||
import re
|
|
||||||
import sys
|
import sys
|
||||||
|
|
||||||
from platformio.exception import UserSideException
|
from platformio.exception import UserSideException
|
||||||
|
|
||||||
|
if sys.version_info >= (3,):
|
||||||
|
if sys.version_info >= (3, 7):
|
||||||
|
from asyncio import create_task as aio_create_task
|
||||||
|
from asyncio import get_running_loop as aio_get_running_loop
|
||||||
|
else:
|
||||||
|
from asyncio import ensure_future as aio_create_task
|
||||||
|
from asyncio import get_event_loop as aio_get_running_loop
|
||||||
|
|
||||||
|
|
||||||
PY2 = sys.version_info[0] == 2
|
PY2 = sys.version_info[0] == 2
|
||||||
CYGWIN = sys.platform.startswith("cygwin")
|
IS_CYGWIN = sys.platform.startswith("cygwin")
|
||||||
WINDOWS = sys.platform.startswith("win")
|
IS_WINDOWS = WINDOWS = sys.platform.startswith("win")
|
||||||
MACOS = sys.platform.startswith("darwin")
|
IS_MACOS = sys.platform.startswith("darwin")
|
||||||
|
string_types = (str,)
|
||||||
|
|
||||||
|
|
||||||
|
def is_bytes(x):
|
||||||
|
return isinstance(x, (bytes, memoryview, bytearray))
|
||||||
|
|
||||||
|
|
||||||
|
def ci_strings_are_equal(a, b):
|
||||||
|
if a == b:
|
||||||
|
return True
|
||||||
|
if not a or not b:
|
||||||
|
return False
|
||||||
|
return a.strip().lower() == b.strip().lower()
|
||||||
|
|
||||||
|
|
||||||
|
def hashlib_encode_data(data):
|
||||||
|
if is_bytes(data):
|
||||||
|
return data
|
||||||
|
if not isinstance(data, string_types):
|
||||||
|
data = str(data)
|
||||||
|
return data.encode()
|
||||||
|
|
||||||
|
|
||||||
|
def load_python_module(name, pathname):
|
||||||
|
import importlib.util # pylint: disable=import-outside-toplevel
|
||||||
|
|
||||||
|
spec = importlib.util.spec_from_file_location(name, pathname)
|
||||||
|
module = importlib.util.module_from_spec(spec)
|
||||||
|
spec.loader.exec_module(module)
|
||||||
|
return module
|
||||||
|
|
||||||
|
|
||||||
def get_filesystem_encoding():
|
def get_filesystem_encoding():
|
||||||
@@ -53,14 +87,6 @@ def get_object_members(obj, ignore_private=True):
|
|||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
def ci_strings_are_equal(a, b):
|
|
||||||
if a == b:
|
|
||||||
return True
|
|
||||||
if not a or not b:
|
|
||||||
return False
|
|
||||||
return a.strip().lower() == b.strip().lower()
|
|
||||||
|
|
||||||
|
|
||||||
def ensure_python3(raise_exception=True):
|
def ensure_python3(raise_exception=True):
|
||||||
compatible = sys.version_info >= (3, 6)
|
compatible = sys.version_info >= (3, 6)
|
||||||
if not raise_exception or compatible:
|
if not raise_exception or compatible:
|
||||||
@@ -73,99 +99,9 @@ def ensure_python3(raise_exception=True):
|
|||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
if PY2:
|
def path_to_unicode(path):
|
||||||
import imp
|
"""
|
||||||
|
Deprecated: Compatibility with dev-platforms,
|
||||||
string_types = (str, unicode)
|
and custom device monitor filters
|
||||||
|
"""
|
||||||
def create_task(coro, name=None):
|
|
||||||
raise NotImplementedError
|
|
||||||
|
|
||||||
def get_running_loop():
|
|
||||||
raise NotImplementedError
|
|
||||||
|
|
||||||
def is_bytes(x):
|
|
||||||
return isinstance(x, (buffer, bytearray))
|
|
||||||
|
|
||||||
def path_to_unicode(path):
|
|
||||||
if isinstance(path, unicode):
|
|
||||||
return path
|
return path
|
||||||
return path.decode(get_filesystem_encoding())
|
|
||||||
|
|
||||||
def hashlib_encode_data(data):
|
|
||||||
if is_bytes(data):
|
|
||||||
return data
|
|
||||||
if isinstance(data, unicode):
|
|
||||||
data = data.encode(get_filesystem_encoding())
|
|
||||||
elif not isinstance(data, string_types):
|
|
||||||
data = str(data)
|
|
||||||
return data
|
|
||||||
|
|
||||||
def dump_json_to_unicode(obj):
|
|
||||||
if isinstance(obj, unicode):
|
|
||||||
return obj
|
|
||||||
return json.dumps(
|
|
||||||
obj, encoding=get_filesystem_encoding(), ensure_ascii=False
|
|
||||||
).encode("utf8")
|
|
||||||
|
|
||||||
_magic_check = re.compile("([*?[])")
|
|
||||||
_magic_check_bytes = re.compile(b"([*?[])")
|
|
||||||
|
|
||||||
def glob_recursive(pathname):
|
|
||||||
return glob.glob(pathname)
|
|
||||||
|
|
||||||
def glob_escape(pathname):
|
|
||||||
"""Escape all special characters."""
|
|
||||||
# https://github.com/python/cpython/blob/master/Lib/glob.py#L161
|
|
||||||
# Escaping is done by wrapping any of "*?[" between square brackets.
|
|
||||||
# Metacharacters do not work in the drive part and shouldn't be
|
|
||||||
# escaped.
|
|
||||||
drive, pathname = os.path.splitdrive(pathname)
|
|
||||||
if isinstance(pathname, bytes):
|
|
||||||
pathname = _magic_check_bytes.sub(br"[\1]", pathname)
|
|
||||||
else:
|
|
||||||
pathname = _magic_check.sub(r"[\1]", pathname)
|
|
||||||
return drive + pathname
|
|
||||||
|
|
||||||
def load_python_module(name, pathname):
|
|
||||||
return imp.load_source(name, pathname)
|
|
||||||
|
|
||||||
|
|
||||||
else:
|
|
||||||
import importlib.util
|
|
||||||
from glob import escape as glob_escape
|
|
||||||
|
|
||||||
if sys.version_info >= (3, 7):
|
|
||||||
from asyncio import create_task, get_running_loop
|
|
||||||
else:
|
|
||||||
from asyncio import ensure_future as create_task
|
|
||||||
from asyncio import get_event_loop as get_running_loop
|
|
||||||
|
|
||||||
string_types = (str,)
|
|
||||||
|
|
||||||
def is_bytes(x):
|
|
||||||
return isinstance(x, (bytes, memoryview, bytearray))
|
|
||||||
|
|
||||||
def path_to_unicode(path):
|
|
||||||
return path
|
|
||||||
|
|
||||||
def hashlib_encode_data(data):
|
|
||||||
if is_bytes(data):
|
|
||||||
return data
|
|
||||||
if not isinstance(data, string_types):
|
|
||||||
data = str(data)
|
|
||||||
return data.encode()
|
|
||||||
|
|
||||||
def dump_json_to_unicode(obj):
|
|
||||||
if isinstance(obj, string_types):
|
|
||||||
return obj
|
|
||||||
return json.dumps(obj)
|
|
||||||
|
|
||||||
def glob_recursive(pathname):
|
|
||||||
return glob.glob(pathname, recursive=True)
|
|
||||||
|
|
||||||
def load_python_module(name, pathname):
|
|
||||||
spec = importlib.util.spec_from_file_location(name, pathname)
|
|
||||||
module = importlib.util.module_from_spec(spec)
|
|
||||||
spec.loader.exec_module(module)
|
|
||||||
return module
|
|
||||||
|
246
platformio/debug/config/base.py
Normal file
246
platformio/debug/config/base.py
Normal file
@@ -0,0 +1,246 @@
|
|||||||
|
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
|
||||||
|
#
|
||||||
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
# you may not use this file except in compliance with the License.
|
||||||
|
# You may obtain a copy of the License at
|
||||||
|
#
|
||||||
|
# http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
#
|
||||||
|
# Unless required by applicable law or agreed to in writing, software
|
||||||
|
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
# See the License for the specific language governing permissions and
|
||||||
|
# limitations under the License.
|
||||||
|
|
||||||
|
import json
|
||||||
|
import os
|
||||||
|
|
||||||
|
from platformio import fs, proc, util
|
||||||
|
from platformio.compat import string_types
|
||||||
|
from platformio.debug.exception import DebugInvalidOptionsError
|
||||||
|
from platformio.debug.helpers import reveal_debug_port
|
||||||
|
from platformio.project.config import ProjectConfig
|
||||||
|
from platformio.project.helpers import get_project_core_dir, load_project_ide_data
|
||||||
|
from platformio.project.options import ProjectOptions
|
||||||
|
|
||||||
|
|
||||||
|
class DebugConfigBase: # pylint: disable=too-many-instance-attributes
|
||||||
|
def __init__(self, platform, project_config, env_name):
|
||||||
|
self.platform = platform
|
||||||
|
self.project_config = project_config
|
||||||
|
self.env_name = env_name
|
||||||
|
self.env_options = project_config.items(env=env_name, as_dict=True)
|
||||||
|
self.build_data = self._load_build_data()
|
||||||
|
|
||||||
|
self.tool_name = None
|
||||||
|
self.board_config = {}
|
||||||
|
self.tool_settings = {}
|
||||||
|
if "board" in self.env_options:
|
||||||
|
self.board_config = platform.board_config(self.env_options["board"])
|
||||||
|
self.tool_name = self.board_config.get_debug_tool_name(
|
||||||
|
self.env_options.get("debug_tool")
|
||||||
|
)
|
||||||
|
self.tool_settings = (
|
||||||
|
self.board_config.get("debug", {})
|
||||||
|
.get("tools", {})
|
||||||
|
.get(self.tool_name, {})
|
||||||
|
)
|
||||||
|
|
||||||
|
self._load_cmds = None
|
||||||
|
self._port = None
|
||||||
|
|
||||||
|
self.server = self._configure_server()
|
||||||
|
|
||||||
|
try:
|
||||||
|
platform.configure_debug_session(self)
|
||||||
|
except NotImplementedError:
|
||||||
|
pass
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def cleanup_cmds(items):
|
||||||
|
items = ProjectConfig.parse_multi_values(items)
|
||||||
|
return ["$LOAD_CMDS" if item == "$LOAD_CMD" else item for item in items]
|
||||||
|
|
||||||
|
@property
|
||||||
|
def program_path(self):
|
||||||
|
return self.build_data["prog_path"]
|
||||||
|
|
||||||
|
@property
|
||||||
|
def client_executable_path(self):
|
||||||
|
return self.build_data["gdb_path"]
|
||||||
|
|
||||||
|
@property
|
||||||
|
def load_cmds(self):
|
||||||
|
if self._load_cmds is not None:
|
||||||
|
return self._load_cmds
|
||||||
|
result = self.env_options.get("debug_load_cmds")
|
||||||
|
if not result:
|
||||||
|
result = self.tool_settings.get("load_cmds")
|
||||||
|
if not result:
|
||||||
|
# legacy
|
||||||
|
result = self.tool_settings.get("load_cmd")
|
||||||
|
if not result:
|
||||||
|
result = ProjectOptions["env.debug_load_cmds"].default
|
||||||
|
return self.cleanup_cmds(result)
|
||||||
|
|
||||||
|
@load_cmds.setter
|
||||||
|
def load_cmds(self, cmds):
|
||||||
|
self._load_cmds = cmds
|
||||||
|
|
||||||
|
@property
|
||||||
|
def load_mode(self):
|
||||||
|
result = self.env_options.get("debug_load_mode")
|
||||||
|
if not result:
|
||||||
|
result = self.tool_settings.get("load_mode")
|
||||||
|
return result or ProjectOptions["env.debug_load_mode"].default
|
||||||
|
|
||||||
|
@property
|
||||||
|
def init_break(self):
|
||||||
|
missed = object()
|
||||||
|
result = self.env_options.get("debug_init_break", missed)
|
||||||
|
if result != missed:
|
||||||
|
return result
|
||||||
|
result = None
|
||||||
|
if not result:
|
||||||
|
result = self.tool_settings.get("init_break")
|
||||||
|
return result or ProjectOptions["env.debug_init_break"].default
|
||||||
|
|
||||||
|
@property
|
||||||
|
def init_cmds(self):
|
||||||
|
return self.cleanup_cmds(
|
||||||
|
self.env_options.get("debug_init_cmds", self.tool_settings.get("init_cmds"))
|
||||||
|
)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def extra_cmds(self):
|
||||||
|
return self.cleanup_cmds(
|
||||||
|
self.env_options.get("debug_extra_cmds")
|
||||||
|
) + self.cleanup_cmds(self.tool_settings.get("extra_cmds"))
|
||||||
|
|
||||||
|
@property
|
||||||
|
def port(self):
|
||||||
|
return reveal_debug_port(
|
||||||
|
self.env_options.get("debug_port", self.tool_settings.get("port"))
|
||||||
|
or self._port,
|
||||||
|
self.tool_name,
|
||||||
|
self.tool_settings,
|
||||||
|
)
|
||||||
|
|
||||||
|
@port.setter
|
||||||
|
def port(self, value):
|
||||||
|
self._port = value
|
||||||
|
|
||||||
|
@property
|
||||||
|
def upload_protocol(self):
|
||||||
|
return self.env_options.get(
|
||||||
|
"upload_protocol", self.board_config.get("upload", {}).get("protocol")
|
||||||
|
)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def speed(self):
|
||||||
|
return self.env_options.get("debug_speed", self.tool_settings.get("speed"))
|
||||||
|
|
||||||
|
@property
|
||||||
|
def server_ready_pattern(self):
|
||||||
|
return self.env_options.get(
|
||||||
|
"debug_server_ready_pattern", (self.server or {}).get("ready_pattern")
|
||||||
|
)
|
||||||
|
|
||||||
|
def _load_build_data(self):
|
||||||
|
data = load_project_ide_data(os.getcwd(), self.env_name, cache=True)
|
||||||
|
if data:
|
||||||
|
return data
|
||||||
|
raise DebugInvalidOptionsError("Could not load a build configuration")
|
||||||
|
|
||||||
|
def _configure_server(self):
|
||||||
|
result = None
|
||||||
|
# specific server per a system
|
||||||
|
if isinstance(self.tool_settings.get("server", {}), list):
|
||||||
|
for item in self.tool_settings["server"][:]:
|
||||||
|
self.tool_settings["server"] = item
|
||||||
|
if util.get_systype() in item.get("system", []):
|
||||||
|
break
|
||||||
|
|
||||||
|
# user overwrites debug server
|
||||||
|
if self.env_options.get("debug_server"):
|
||||||
|
result = {
|
||||||
|
"cwd": None,
|
||||||
|
"executable": None,
|
||||||
|
"arguments": self.env_options.get("debug_server"),
|
||||||
|
}
|
||||||
|
result["executable"] = result["arguments"][0]
|
||||||
|
result["arguments"] = result["arguments"][1:]
|
||||||
|
elif "server" in self.tool_settings:
|
||||||
|
result = self.tool_settings["server"]
|
||||||
|
server_package = result.get("package")
|
||||||
|
server_package_dir = (
|
||||||
|
self.platform.get_package_dir(server_package)
|
||||||
|
if server_package
|
||||||
|
else None
|
||||||
|
)
|
||||||
|
if server_package and not server_package_dir:
|
||||||
|
self.platform.install_packages(
|
||||||
|
with_packages=[server_package],
|
||||||
|
skip_default_package=True,
|
||||||
|
silent=True,
|
||||||
|
)
|
||||||
|
server_package_dir = self.platform.get_package_dir(server_package)
|
||||||
|
result.update(
|
||||||
|
dict(
|
||||||
|
cwd=server_package_dir if server_package else None,
|
||||||
|
executable=result.get("executable"),
|
||||||
|
arguments=[
|
||||||
|
a.replace("$PACKAGE_DIR", server_package_dir)
|
||||||
|
if server_package_dir
|
||||||
|
else a
|
||||||
|
for a in result.get("arguments", [])
|
||||||
|
],
|
||||||
|
)
|
||||||
|
)
|
||||||
|
return self.reveal_patterns(result) if result else None
|
||||||
|
|
||||||
|
def get_init_script(self, debugger):
|
||||||
|
try:
|
||||||
|
return getattr(self, "%s_INIT_SCRIPT" % debugger.upper())
|
||||||
|
except AttributeError:
|
||||||
|
raise NotImplementedError
|
||||||
|
|
||||||
|
def reveal_patterns(self, source, recursive=True):
|
||||||
|
program_path = self.program_path or ""
|
||||||
|
patterns = {
|
||||||
|
"PLATFORMIO_CORE_DIR": get_project_core_dir(),
|
||||||
|
"PYTHONEXE": proc.get_pythonexe_path(),
|
||||||
|
"PROJECT_DIR": self.project_config.path,
|
||||||
|
"PROG_PATH": program_path,
|
||||||
|
"PROG_DIR": os.path.dirname(program_path),
|
||||||
|
"PROG_NAME": os.path.basename(os.path.splitext(program_path)[0]),
|
||||||
|
"DEBUG_PORT": self.port,
|
||||||
|
"UPLOAD_PROTOCOL": self.upload_protocol,
|
||||||
|
"INIT_BREAK": self.init_break or "",
|
||||||
|
"LOAD_CMDS": "\n".join(self.load_cmds or []),
|
||||||
|
}
|
||||||
|
for key, value in patterns.items():
|
||||||
|
if key.endswith(("_DIR", "_PATH")):
|
||||||
|
patterns[key] = fs.to_unix_path(value)
|
||||||
|
|
||||||
|
def _replace(text):
|
||||||
|
for key, value in patterns.items():
|
||||||
|
pattern = "$%s" % key
|
||||||
|
text = text.replace(pattern, value or "")
|
||||||
|
return text
|
||||||
|
|
||||||
|
if isinstance(source, string_types):
|
||||||
|
source = _replace(source)
|
||||||
|
elif isinstance(source, (list, dict)):
|
||||||
|
items = enumerate(source) if isinstance(source, list) else source.items()
|
||||||
|
for key, value in items:
|
||||||
|
if isinstance(value, string_types):
|
||||||
|
source[key] = _replace(value)
|
||||||
|
elif isinstance(value, (list, dict)) and recursive:
|
||||||
|
source[key] = self.reveal_patterns(value, patterns)
|
||||||
|
|
||||||
|
data = json.dumps(source)
|
||||||
|
if any(("$" + key) in data for key in patterns):
|
||||||
|
source = self.reveal_patterns(source, patterns)
|
||||||
|
|
||||||
|
return source
|
49
platformio/debug/config/blackmagic.py
Normal file
49
platformio/debug/config/blackmagic.py
Normal file
@@ -0,0 +1,49 @@
|
|||||||
|
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
|
||||||
|
#
|
||||||
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
# you may not use this file except in compliance with the License.
|
||||||
|
# You may obtain a copy of the License at
|
||||||
|
#
|
||||||
|
# http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
#
|
||||||
|
# Unless required by applicable law or agreed to in writing, software
|
||||||
|
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
# See the License for the specific language governing permissions and
|
||||||
|
# limitations under the License.
|
||||||
|
|
||||||
|
from platformio.debug.config.base import DebugConfigBase
|
||||||
|
|
||||||
|
|
||||||
|
class BlackmagicDebugConfig(DebugConfigBase):
|
||||||
|
|
||||||
|
GDB_INIT_SCRIPT = """
|
||||||
|
define pio_reset_halt_target
|
||||||
|
set language c
|
||||||
|
set *0xE000ED0C = 0x05FA0004
|
||||||
|
set $busy = (*0xE000ED0C & 0x4)
|
||||||
|
while ($busy)
|
||||||
|
set $busy = (*0xE000ED0C & 0x4)
|
||||||
|
end
|
||||||
|
set language auto
|
||||||
|
end
|
||||||
|
|
||||||
|
define pio_reset_run_target
|
||||||
|
pio_reset_halt_target
|
||||||
|
end
|
||||||
|
|
||||||
|
target extended-remote $DEBUG_PORT
|
||||||
|
monitor swdp_scan
|
||||||
|
attach 1
|
||||||
|
set mem inaccessible-by-default off
|
||||||
|
$LOAD_CMDS
|
||||||
|
$INIT_BREAK
|
||||||
|
|
||||||
|
set language c
|
||||||
|
set *0xE000ED0C = 0x05FA0004
|
||||||
|
set $busy = (*0xE000ED0C & 0x4)
|
||||||
|
while ($busy)
|
||||||
|
set $busy = (*0xE000ED0C & 0x4)
|
||||||
|
end
|
||||||
|
set language auto
|
||||||
|
"""
|
48
platformio/debug/config/factory.py
Normal file
48
platformio/debug/config/factory.py
Normal file
@@ -0,0 +1,48 @@
|
|||||||
|
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
|
||||||
|
#
|
||||||
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
# you may not use this file except in compliance with the License.
|
||||||
|
# You may obtain a copy of the License at
|
||||||
|
#
|
||||||
|
# http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
#
|
||||||
|
# Unless required by applicable law or agreed to in writing, software
|
||||||
|
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
# See the License for the specific language governing permissions and
|
||||||
|
# limitations under the License.
|
||||||
|
|
||||||
|
import importlib
|
||||||
|
import re
|
||||||
|
|
||||||
|
from platformio.debug.config.generic import GenericDebugConfig
|
||||||
|
from platformio.debug.config.native import NativeDebugConfig
|
||||||
|
|
||||||
|
|
||||||
|
class DebugConfigFactory(object):
|
||||||
|
@staticmethod
|
||||||
|
def get_clsname(name):
|
||||||
|
name = re.sub(r"[^\da-z\_\-]+", "", name, flags=re.I)
|
||||||
|
return "%s%sDebugConfig" % (name.upper()[0], name.lower()[1:])
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def new(cls, platform, project_config, env_name):
|
||||||
|
board_config = platform.board_config(
|
||||||
|
project_config.get("env:" + env_name, "board")
|
||||||
|
)
|
||||||
|
tool_name = (
|
||||||
|
board_config.get_debug_tool_name(
|
||||||
|
project_config.get("env:" + env_name, "debug_tool")
|
||||||
|
)
|
||||||
|
if board_config
|
||||||
|
else None
|
||||||
|
)
|
||||||
|
config_cls = None
|
||||||
|
try:
|
||||||
|
mod = importlib.import_module("platformio.debug.config.%s" % tool_name)
|
||||||
|
config_cls = getattr(mod, cls.get_clsname(tool_name))
|
||||||
|
except ModuleNotFoundError:
|
||||||
|
config_cls = (
|
||||||
|
GenericDebugConfig if platform.is_embedded() else NativeDebugConfig
|
||||||
|
)
|
||||||
|
return config_cls(platform, project_config, env_name)
|
38
platformio/debug/config/generic.py
Normal file
38
platformio/debug/config/generic.py
Normal file
@@ -0,0 +1,38 @@
|
|||||||
|
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
|
||||||
|
#
|
||||||
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
# you may not use this file except in compliance with the License.
|
||||||
|
# You may obtain a copy of the License at
|
||||||
|
#
|
||||||
|
# http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
#
|
||||||
|
# Unless required by applicable law or agreed to in writing, software
|
||||||
|
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
# See the License for the specific language governing permissions and
|
||||||
|
# limitations under the License.
|
||||||
|
|
||||||
|
from platformio.debug.config.base import DebugConfigBase
|
||||||
|
|
||||||
|
|
||||||
|
class GenericDebugConfig(DebugConfigBase):
|
||||||
|
|
||||||
|
GDB_INIT_SCRIPT = """
|
||||||
|
define pio_reset_halt_target
|
||||||
|
monitor reset halt
|
||||||
|
end
|
||||||
|
|
||||||
|
define pio_reset_run_target
|
||||||
|
monitor reset
|
||||||
|
end
|
||||||
|
|
||||||
|
target extended-remote $DEBUG_PORT
|
||||||
|
monitor init
|
||||||
|
$LOAD_CMDS
|
||||||
|
pio_reset_halt_target
|
||||||
|
$INIT_BREAK
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(self, *args, **kwargs):
|
||||||
|
super(GenericDebugConfig, self).__init__(*args, **kwargs)
|
||||||
|
self.port = ":3333"
|
48
platformio/debug/config/jlink.py
Normal file
48
platformio/debug/config/jlink.py
Normal file
@@ -0,0 +1,48 @@
|
|||||||
|
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
|
||||||
|
#
|
||||||
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
# you may not use this file except in compliance with the License.
|
||||||
|
# You may obtain a copy of the License at
|
||||||
|
#
|
||||||
|
# http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
#
|
||||||
|
# Unless required by applicable law or agreed to in writing, software
|
||||||
|
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
# See the License for the specific language governing permissions and
|
||||||
|
# limitations under the License.
|
||||||
|
|
||||||
|
from platformio.debug.config.base import DebugConfigBase
|
||||||
|
|
||||||
|
|
||||||
|
class JlinkDebugConfig(DebugConfigBase):
|
||||||
|
|
||||||
|
GDB_INIT_SCRIPT = """
|
||||||
|
define pio_reset_halt_target
|
||||||
|
monitor reset
|
||||||
|
monitor halt
|
||||||
|
end
|
||||||
|
|
||||||
|
define pio_reset_run_target
|
||||||
|
monitor clrbp
|
||||||
|
monitor reset
|
||||||
|
monitor go
|
||||||
|
end
|
||||||
|
|
||||||
|
target extended-remote $DEBUG_PORT
|
||||||
|
monitor clrbp
|
||||||
|
monitor speed auto
|
||||||
|
pio_reset_halt_target
|
||||||
|
$LOAD_CMDS
|
||||||
|
$INIT_BREAK
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(self, *args, **kwargs):
|
||||||
|
super(JlinkDebugConfig, self).__init__(*args, **kwargs)
|
||||||
|
self.port = ":2331"
|
||||||
|
|
||||||
|
@property
|
||||||
|
def server_ready_pattern(self):
|
||||||
|
return super(JlinkDebugConfig, self).server_ready_pattern or (
|
||||||
|
"Waiting for GDB connection"
|
||||||
|
)
|
36
platformio/debug/config/mspdebug.py
Normal file
36
platformio/debug/config/mspdebug.py
Normal file
@@ -0,0 +1,36 @@
|
|||||||
|
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
|
||||||
|
#
|
||||||
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
# you may not use this file except in compliance with the License.
|
||||||
|
# You may obtain a copy of the License at
|
||||||
|
#
|
||||||
|
# http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
#
|
||||||
|
# Unless required by applicable law or agreed to in writing, software
|
||||||
|
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
# See the License for the specific language governing permissions and
|
||||||
|
# limitations under the License.
|
||||||
|
|
||||||
|
from platformio.debug.config.base import DebugConfigBase
|
||||||
|
|
||||||
|
|
||||||
|
class MspdebugDebugConfig(DebugConfigBase):
|
||||||
|
|
||||||
|
GDB_INIT_SCRIPT = """
|
||||||
|
define pio_reset_halt_target
|
||||||
|
end
|
||||||
|
|
||||||
|
define pio_reset_run_target
|
||||||
|
end
|
||||||
|
|
||||||
|
target remote $DEBUG_PORT
|
||||||
|
monitor erase
|
||||||
|
$LOAD_CMDS
|
||||||
|
pio_reset_halt_target
|
||||||
|
$INIT_BREAK
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(self, *args, **kwargs):
|
||||||
|
super(MspdebugDebugConfig, self).__init__(*args, **kwargs)
|
||||||
|
self.port = ":2000"
|
34
platformio/debug/config/native.py
Normal file
34
platformio/debug/config/native.py
Normal file
@@ -0,0 +1,34 @@
|
|||||||
|
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
|
||||||
|
#
|
||||||
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
# you may not use this file except in compliance with the License.
|
||||||
|
# You may obtain a copy of the License at
|
||||||
|
#
|
||||||
|
# http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
#
|
||||||
|
# Unless required by applicable law or agreed to in writing, software
|
||||||
|
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
# See the License for the specific language governing permissions and
|
||||||
|
# limitations under the License.
|
||||||
|
|
||||||
|
from platformio.compat import IS_WINDOWS
|
||||||
|
from platformio.debug.config.base import DebugConfigBase
|
||||||
|
|
||||||
|
|
||||||
|
class NativeDebugConfig(DebugConfigBase):
|
||||||
|
|
||||||
|
GDB_INIT_SCRIPT = """
|
||||||
|
define pio_reset_halt_target
|
||||||
|
end
|
||||||
|
|
||||||
|
define pio_reset_run_target
|
||||||
|
end
|
||||||
|
|
||||||
|
define pio_restart_target
|
||||||
|
end
|
||||||
|
|
||||||
|
$INIT_BREAK
|
||||||
|
""" + (
|
||||||
|
"set startup-with-shell off" if not IS_WINDOWS else ""
|
||||||
|
)
|
37
platformio/debug/config/qemu.py
Normal file
37
platformio/debug/config/qemu.py
Normal file
@@ -0,0 +1,37 @@
|
|||||||
|
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
|
||||||
|
#
|
||||||
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
# you may not use this file except in compliance with the License.
|
||||||
|
# You may obtain a copy of the License at
|
||||||
|
#
|
||||||
|
# http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
#
|
||||||
|
# Unless required by applicable law or agreed to in writing, software
|
||||||
|
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
# See the License for the specific language governing permissions and
|
||||||
|
# limitations under the License.
|
||||||
|
|
||||||
|
from platformio.debug.config.base import DebugConfigBase
|
||||||
|
|
||||||
|
|
||||||
|
class QemuDebugConfig(DebugConfigBase):
|
||||||
|
|
||||||
|
GDB_INIT_SCRIPT = """
|
||||||
|
define pio_reset_halt_target
|
||||||
|
monitor system_reset
|
||||||
|
end
|
||||||
|
|
||||||
|
define pio_reset_run_target
|
||||||
|
monitor system_reset
|
||||||
|
end
|
||||||
|
|
||||||
|
target extended-remote $DEBUG_PORT
|
||||||
|
$LOAD_CMDS
|
||||||
|
pio_reset_halt_target
|
||||||
|
$INIT_BREAK
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(self, *args, **kwargs):
|
||||||
|
super(QemuDebugConfig, self).__init__(*args, **kwargs)
|
||||||
|
self.port = ":1234"
|
45
platformio/debug/config/renode.py
Normal file
45
platformio/debug/config/renode.py
Normal file
@@ -0,0 +1,45 @@
|
|||||||
|
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
|
||||||
|
#
|
||||||
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
# you may not use this file except in compliance with the License.
|
||||||
|
# You may obtain a copy of the License at
|
||||||
|
#
|
||||||
|
# http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
#
|
||||||
|
# Unless required by applicable law or agreed to in writing, software
|
||||||
|
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
# See the License for the specific language governing permissions and
|
||||||
|
# limitations under the License.
|
||||||
|
|
||||||
|
from platformio.debug.config.base import DebugConfigBase
|
||||||
|
|
||||||
|
|
||||||
|
class RenodeDebugConfig(DebugConfigBase):
|
||||||
|
|
||||||
|
GDB_INIT_SCRIPT = """
|
||||||
|
define pio_reset_halt_target
|
||||||
|
monitor machine Reset
|
||||||
|
$LOAD_CMDS
|
||||||
|
monitor start
|
||||||
|
end
|
||||||
|
|
||||||
|
define pio_reset_run_target
|
||||||
|
pio_reset_halt_target
|
||||||
|
end
|
||||||
|
|
||||||
|
target extended-remote $DEBUG_PORT
|
||||||
|
$LOAD_CMDS
|
||||||
|
$INIT_BREAK
|
||||||
|
monitor start
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(self, *args, **kwargs):
|
||||||
|
super(RenodeDebugConfig, self).__init__(*args, **kwargs)
|
||||||
|
self.port = ":3333"
|
||||||
|
|
||||||
|
@property
|
||||||
|
def server_ready_pattern(self):
|
||||||
|
return super(RenodeDebugConfig, self).server_ready_pattern or (
|
||||||
|
"GDB server with all CPUs started on port"
|
||||||
|
)
|
204
platformio/debug/helpers.py
Normal file
204
platformio/debug/helpers.py
Normal file
@@ -0,0 +1,204 @@
|
|||||||
|
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
|
||||||
|
#
|
||||||
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
# you may not use this file except in compliance with the License.
|
||||||
|
# You may obtain a copy of the License at
|
||||||
|
#
|
||||||
|
# http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
#
|
||||||
|
# Unless required by applicable law or agreed to in writing, software
|
||||||
|
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
# See the License for the specific language governing permissions and
|
||||||
|
# limitations under the License.
|
||||||
|
|
||||||
|
import re
|
||||||
|
import sys
|
||||||
|
import time
|
||||||
|
from fnmatch import fnmatch
|
||||||
|
from hashlib import sha1
|
||||||
|
from io import BytesIO
|
||||||
|
from os.path import isfile
|
||||||
|
|
||||||
|
from platformio import util
|
||||||
|
from platformio.commands import PlatformioCLI
|
||||||
|
from platformio.commands.run.command import cli as cmd_run
|
||||||
|
from platformio.commands.run.command import print_processing_header
|
||||||
|
from platformio.commands.test.helpers import get_test_names
|
||||||
|
from platformio.commands.test.processor import TestProcessorBase
|
||||||
|
from platformio.compat import IS_WINDOWS, is_bytes
|
||||||
|
from platformio.debug.exception import DebugInvalidOptionsError
|
||||||
|
|
||||||
|
|
||||||
|
class GDBMIConsoleStream(BytesIO): # pylint: disable=too-few-public-methods
|
||||||
|
|
||||||
|
STDOUT = sys.stdout
|
||||||
|
|
||||||
|
def write(self, text):
|
||||||
|
self.STDOUT.write(escape_gdbmi_stream("~", text))
|
||||||
|
self.STDOUT.flush()
|
||||||
|
|
||||||
|
|
||||||
|
def is_gdbmi_mode():
|
||||||
|
return "--interpreter" in " ".join(PlatformioCLI.leftover_args)
|
||||||
|
|
||||||
|
|
||||||
|
def escape_gdbmi_stream(prefix, stream):
|
||||||
|
bytes_stream = False
|
||||||
|
if is_bytes(stream):
|
||||||
|
bytes_stream = True
|
||||||
|
stream = stream.decode()
|
||||||
|
|
||||||
|
if not stream:
|
||||||
|
return b"" if bytes_stream else ""
|
||||||
|
|
||||||
|
ends_nl = stream.endswith("\n")
|
||||||
|
stream = re.sub(r"\\+", "\\\\\\\\", stream)
|
||||||
|
stream = stream.replace('"', '\\"')
|
||||||
|
stream = stream.replace("\n", "\\n")
|
||||||
|
stream = '%s"%s"' % (prefix, stream)
|
||||||
|
if ends_nl:
|
||||||
|
stream += "\n"
|
||||||
|
|
||||||
|
return stream.encode() if bytes_stream else stream
|
||||||
|
|
||||||
|
|
||||||
|
def get_default_debug_env(config):
|
||||||
|
default_envs = config.default_envs()
|
||||||
|
all_envs = config.envs()
|
||||||
|
for env in default_envs:
|
||||||
|
if config.get("env:" + env, "build_type") == "debug":
|
||||||
|
return env
|
||||||
|
for env in all_envs:
|
||||||
|
if config.get("env:" + env, "build_type") == "debug":
|
||||||
|
return env
|
||||||
|
return default_envs[0] if default_envs else all_envs[0]
|
||||||
|
|
||||||
|
|
||||||
|
def predebug_project(
|
||||||
|
ctx, project_dir, project_config, env_name, preload, verbose
|
||||||
|
): # pylint: disable=too-many-arguments
|
||||||
|
debug_testname = project_config.get("env:" + env_name, "debug_test")
|
||||||
|
if debug_testname:
|
||||||
|
test_names = get_test_names(project_config)
|
||||||
|
if debug_testname not in test_names:
|
||||||
|
raise DebugInvalidOptionsError(
|
||||||
|
"Unknown test name `%s`. Valid names are `%s`"
|
||||||
|
% (debug_testname, ", ".join(test_names))
|
||||||
|
)
|
||||||
|
print_processing_header(env_name, project_config, verbose)
|
||||||
|
tp = TestProcessorBase(
|
||||||
|
ctx,
|
||||||
|
debug_testname,
|
||||||
|
env_name,
|
||||||
|
dict(
|
||||||
|
project_config=project_config,
|
||||||
|
project_dir=project_dir,
|
||||||
|
without_building=False,
|
||||||
|
without_uploading=True,
|
||||||
|
without_testing=True,
|
||||||
|
verbose=False,
|
||||||
|
),
|
||||||
|
)
|
||||||
|
tp.build_or_upload(["__debug", "__test"] + (["upload"] if preload else []))
|
||||||
|
else:
|
||||||
|
ctx.invoke(
|
||||||
|
cmd_run,
|
||||||
|
project_dir=project_dir,
|
||||||
|
project_conf=project_config.path,
|
||||||
|
environment=[env_name],
|
||||||
|
target=["__debug"] + (["upload"] if preload else []),
|
||||||
|
verbose=verbose,
|
||||||
|
)
|
||||||
|
|
||||||
|
if preload:
|
||||||
|
time.sleep(5)
|
||||||
|
|
||||||
|
|
||||||
|
def has_debug_symbols(prog_path):
|
||||||
|
if not isfile(prog_path):
|
||||||
|
return False
|
||||||
|
matched = {
|
||||||
|
b".debug_info": False,
|
||||||
|
b".debug_abbrev": False,
|
||||||
|
b" -Og": False,
|
||||||
|
b" -g": False,
|
||||||
|
# b"__PLATFORMIO_BUILD_DEBUG__": False,
|
||||||
|
}
|
||||||
|
with open(prog_path, "rb") as fp:
|
||||||
|
last_data = b""
|
||||||
|
while True:
|
||||||
|
data = fp.read(1024)
|
||||||
|
if not data:
|
||||||
|
break
|
||||||
|
for pattern, found in matched.items():
|
||||||
|
if found:
|
||||||
|
continue
|
||||||
|
if pattern in last_data + data:
|
||||||
|
matched[pattern] = True
|
||||||
|
last_data = data
|
||||||
|
return all(matched.values())
|
||||||
|
|
||||||
|
|
||||||
|
def is_prog_obsolete(prog_path):
|
||||||
|
prog_hash_path = prog_path + ".sha1"
|
||||||
|
if not isfile(prog_path):
|
||||||
|
return True
|
||||||
|
shasum = sha1()
|
||||||
|
with open(prog_path, "rb") as fp:
|
||||||
|
while True:
|
||||||
|
data = fp.read(1024)
|
||||||
|
if not data:
|
||||||
|
break
|
||||||
|
shasum.update(data)
|
||||||
|
new_digest = shasum.hexdigest()
|
||||||
|
old_digest = None
|
||||||
|
if isfile(prog_hash_path):
|
||||||
|
with open(prog_hash_path, encoding="utf8") as fp:
|
||||||
|
old_digest = fp.read()
|
||||||
|
if new_digest == old_digest:
|
||||||
|
return False
|
||||||
|
with open(prog_hash_path, mode="w", encoding="utf8") as fp:
|
||||||
|
fp.write(new_digest)
|
||||||
|
return True
|
||||||
|
|
||||||
|
|
||||||
|
def reveal_debug_port(env_debug_port, tool_name, tool_settings):
|
||||||
|
def _get_pattern():
|
||||||
|
if not env_debug_port:
|
||||||
|
return None
|
||||||
|
if set(["*", "?", "[", "]"]) & set(env_debug_port):
|
||||||
|
return env_debug_port
|
||||||
|
return None
|
||||||
|
|
||||||
|
def _is_match_pattern(port):
|
||||||
|
pattern = _get_pattern()
|
||||||
|
if not pattern:
|
||||||
|
return True
|
||||||
|
return fnmatch(port, pattern)
|
||||||
|
|
||||||
|
def _look_for_serial_port(hwids):
|
||||||
|
for item in util.get_serialports(filter_hwid=True):
|
||||||
|
if not _is_match_pattern(item["port"]):
|
||||||
|
continue
|
||||||
|
port = item["port"]
|
||||||
|
if tool_name.startswith("blackmagic"):
|
||||||
|
if IS_WINDOWS and port.startswith("COM") and len(port) > 4:
|
||||||
|
port = "\\\\.\\%s" % port
|
||||||
|
if "GDB" in item["description"]:
|
||||||
|
return port
|
||||||
|
for hwid in hwids:
|
||||||
|
hwid_str = ("%s:%s" % (hwid[0], hwid[1])).replace("0x", "")
|
||||||
|
if hwid_str in item["hwid"]:
|
||||||
|
return port
|
||||||
|
return None
|
||||||
|
|
||||||
|
if env_debug_port and not _get_pattern():
|
||||||
|
return env_debug_port
|
||||||
|
if not tool_settings.get("require_debug_port"):
|
||||||
|
return None
|
||||||
|
|
||||||
|
debug_port = _look_for_serial_port(tool_settings.get("hwids", []))
|
||||||
|
if not debug_port:
|
||||||
|
raise DebugInvalidOptionsError("Please specify `debug_port` for environment")
|
||||||
|
return debug_port
|
13
platformio/debug/process/__init__.py
Normal file
13
platformio/debug/process/__init__.py
Normal file
@@ -0,0 +1,13 @@
|
|||||||
|
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
|
||||||
|
#
|
||||||
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
# you may not use this file except in compliance with the License.
|
||||||
|
# You may obtain a copy of the License at
|
||||||
|
#
|
||||||
|
# http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
#
|
||||||
|
# Unless required by applicable law or agreed to in writing, software
|
||||||
|
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
# See the License for the specific language governing permissions and
|
||||||
|
# limitations under the License.
|
155
platformio/debug/process/base.py
Normal file
155
platformio/debug/process/base.py
Normal file
@@ -0,0 +1,155 @@
|
|||||||
|
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
|
||||||
|
#
|
||||||
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
# you may not use this file except in compliance with the License.
|
||||||
|
# You may obtain a copy of the License at
|
||||||
|
#
|
||||||
|
# http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
#
|
||||||
|
# Unless required by applicable law or agreed to in writing, software
|
||||||
|
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
# See the License for the specific language governing permissions and
|
||||||
|
# limitations under the License.
|
||||||
|
|
||||||
|
import asyncio
|
||||||
|
import signal
|
||||||
|
import subprocess
|
||||||
|
import sys
|
||||||
|
import time
|
||||||
|
|
||||||
|
from platformio.compat import (
|
||||||
|
IS_WINDOWS,
|
||||||
|
aio_create_task,
|
||||||
|
aio_get_running_loop,
|
||||||
|
get_locale_encoding,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class DebugSubprocessProtocol(asyncio.SubprocessProtocol):
|
||||||
|
def __init__(self, factory):
|
||||||
|
self.factory = factory
|
||||||
|
self._is_exited = False
|
||||||
|
|
||||||
|
def connection_made(self, transport):
|
||||||
|
self.factory.connection_made(transport)
|
||||||
|
|
||||||
|
def pipe_data_received(self, fd, data):
|
||||||
|
pipe_to_cb = [
|
||||||
|
self.factory.stdin_data_received,
|
||||||
|
self.factory.stdout_data_received,
|
||||||
|
self.factory.stderr_data_received,
|
||||||
|
]
|
||||||
|
pipe_to_cb[fd](data)
|
||||||
|
|
||||||
|
def connection_lost(self, exc):
|
||||||
|
self.process_exited()
|
||||||
|
|
||||||
|
def process_exited(self):
|
||||||
|
if self._is_exited:
|
||||||
|
return
|
||||||
|
self.factory.process_exited()
|
||||||
|
self._is_exited = True
|
||||||
|
|
||||||
|
|
||||||
|
class DebugBaseProcess:
|
||||||
|
|
||||||
|
STDOUT_CHUNK_SIZE = 2048
|
||||||
|
LOG_FILE = None
|
||||||
|
|
||||||
|
def __init__(self):
|
||||||
|
self.transport = None
|
||||||
|
self._is_running = False
|
||||||
|
self._last_activity = 0
|
||||||
|
self._exit_future = None
|
||||||
|
self._stdin_read_task = None
|
||||||
|
self._std_encoding = get_locale_encoding()
|
||||||
|
|
||||||
|
async def spawn(self, *args, **kwargs):
|
||||||
|
wait_until_exit = False
|
||||||
|
if "wait_until_exit" in kwargs:
|
||||||
|
wait_until_exit = kwargs["wait_until_exit"]
|
||||||
|
del kwargs["wait_until_exit"]
|
||||||
|
for pipe in ("stdin", "stdout", "stderr"):
|
||||||
|
if pipe not in kwargs:
|
||||||
|
kwargs[pipe] = subprocess.PIPE
|
||||||
|
loop = aio_get_running_loop()
|
||||||
|
await loop.subprocess_exec(
|
||||||
|
lambda: DebugSubprocessProtocol(self), *args, **kwargs
|
||||||
|
)
|
||||||
|
if wait_until_exit:
|
||||||
|
self._exit_future = loop.create_future()
|
||||||
|
await self._exit_future
|
||||||
|
|
||||||
|
def is_running(self):
|
||||||
|
return self._is_running
|
||||||
|
|
||||||
|
def connection_made(self, transport):
|
||||||
|
self._is_running = True
|
||||||
|
self.transport = transport
|
||||||
|
|
||||||
|
def connect_stdin_pipe(self):
|
||||||
|
self._stdin_read_task = aio_create_task(self._read_stdin_pipe())
|
||||||
|
|
||||||
|
async def _read_stdin_pipe(self):
|
||||||
|
loop = aio_get_running_loop()
|
||||||
|
if IS_WINDOWS:
|
||||||
|
while True:
|
||||||
|
self.stdin_data_received(
|
||||||
|
await loop.run_in_executor(None, sys.stdin.buffer.readline)
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
reader = asyncio.StreamReader()
|
||||||
|
protocol = asyncio.StreamReaderProtocol(reader)
|
||||||
|
await loop.connect_read_pipe(lambda: protocol, sys.stdin)
|
||||||
|
while True:
|
||||||
|
self.stdin_data_received(await reader.readline())
|
||||||
|
|
||||||
|
def stdin_data_received(self, data):
|
||||||
|
self._last_activity = time.time()
|
||||||
|
if self.LOG_FILE:
|
||||||
|
with open(self.LOG_FILE, "ab") as fp:
|
||||||
|
fp.write(data)
|
||||||
|
|
||||||
|
def stdout_data_received(self, data):
|
||||||
|
self._last_activity = time.time()
|
||||||
|
if self.LOG_FILE:
|
||||||
|
with open(self.LOG_FILE, "ab") as fp:
|
||||||
|
fp.write(data)
|
||||||
|
while data:
|
||||||
|
chunk = data[: self.STDOUT_CHUNK_SIZE]
|
||||||
|
print(chunk.decode(self._std_encoding, "replace"), end="", flush=True)
|
||||||
|
data = data[self.STDOUT_CHUNK_SIZE :]
|
||||||
|
|
||||||
|
def stderr_data_received(self, data):
|
||||||
|
self._last_activity = time.time()
|
||||||
|
if self.LOG_FILE:
|
||||||
|
with open(self.LOG_FILE, "ab") as fp:
|
||||||
|
fp.write(data)
|
||||||
|
print(
|
||||||
|
data.decode(self._std_encoding, "replace"),
|
||||||
|
end="",
|
||||||
|
file=sys.stderr,
|
||||||
|
flush=True,
|
||||||
|
)
|
||||||
|
|
||||||
|
def process_exited(self):
|
||||||
|
self._is_running = False
|
||||||
|
self._last_activity = time.time()
|
||||||
|
# Allow terminating via SIGINT/CTRL+C
|
||||||
|
signal.signal(signal.SIGINT, signal.default_int_handler)
|
||||||
|
if self._stdin_read_task:
|
||||||
|
self._stdin_read_task.cancel()
|
||||||
|
self._stdin_read_task = None
|
||||||
|
if self._exit_future:
|
||||||
|
self._exit_future.set_result(True)
|
||||||
|
self._exit_future = None
|
||||||
|
|
||||||
|
def terminate(self):
|
||||||
|
if not self.is_running() or not self.transport:
|
||||||
|
return
|
||||||
|
try:
|
||||||
|
self.transport.kill()
|
||||||
|
self.transport.close()
|
||||||
|
except: # pylint: disable=bare-except
|
||||||
|
pass
|
101
platformio/debug/process/client.py
Normal file
101
platformio/debug/process/client.py
Normal file
@@ -0,0 +1,101 @@
|
|||||||
|
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
|
||||||
|
#
|
||||||
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
# you may not use this file except in compliance with the License.
|
||||||
|
# You may obtain a copy of the License at
|
||||||
|
#
|
||||||
|
# http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
#
|
||||||
|
# Unless required by applicable law or agreed to in writing, software
|
||||||
|
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
# See the License for the specific language governing permissions and
|
||||||
|
# limitations under the License.
|
||||||
|
|
||||||
|
import hashlib
|
||||||
|
import os
|
||||||
|
import signal
|
||||||
|
import tempfile
|
||||||
|
|
||||||
|
from platformio import fs, proc
|
||||||
|
from platformio.cache import ContentCache
|
||||||
|
from platformio.compat import IS_WINDOWS, hashlib_encode_data
|
||||||
|
from platformio.debug.process.base import DebugBaseProcess
|
||||||
|
from platformio.debug.process.server import DebugServerProcess
|
||||||
|
from platformio.project.helpers import get_project_cache_dir
|
||||||
|
|
||||||
|
|
||||||
|
class DebugClientProcess(DebugBaseProcess):
|
||||||
|
def __init__(self, project_dir, debug_config):
|
||||||
|
super(DebugClientProcess, self).__init__()
|
||||||
|
self.project_dir = project_dir
|
||||||
|
self.debug_config = debug_config
|
||||||
|
|
||||||
|
self._server_process = None
|
||||||
|
self._session_id = None
|
||||||
|
|
||||||
|
if not os.path.isdir(get_project_cache_dir()):
|
||||||
|
os.makedirs(get_project_cache_dir())
|
||||||
|
self.working_dir = tempfile.mkdtemp(
|
||||||
|
dir=get_project_cache_dir(), prefix=".piodebug-"
|
||||||
|
)
|
||||||
|
|
||||||
|
self._target_is_running = False
|
||||||
|
self._errors_buffer = b""
|
||||||
|
|
||||||
|
async def run(self):
|
||||||
|
session_hash = (
|
||||||
|
self.debug_config.client_executable_path + self.debug_config.program_path
|
||||||
|
)
|
||||||
|
self._session_id = hashlib.sha1(hashlib_encode_data(session_hash)).hexdigest()
|
||||||
|
self._kill_previous_session()
|
||||||
|
|
||||||
|
if self.debug_config.server:
|
||||||
|
self._server_process = DebugServerProcess(self.debug_config)
|
||||||
|
self.debug_config.port = await self._server_process.run()
|
||||||
|
|
||||||
|
def connection_made(self, transport):
|
||||||
|
super(DebugClientProcess, self).connection_made(transport)
|
||||||
|
self._lock_session(transport.get_pid())
|
||||||
|
# Disable SIGINT and allow GDB's Ctrl+C interrupt
|
||||||
|
signal.signal(signal.SIGINT, lambda *args, **kwargs: None)
|
||||||
|
self.connect_stdin_pipe()
|
||||||
|
|
||||||
|
def process_exited(self):
|
||||||
|
if self._server_process:
|
||||||
|
self._server_process.terminate()
|
||||||
|
super(DebugClientProcess, self).process_exited()
|
||||||
|
|
||||||
|
def _kill_previous_session(self):
|
||||||
|
assert self._session_id
|
||||||
|
pid = None
|
||||||
|
with ContentCache() as cc:
|
||||||
|
pid = cc.get(self._session_id)
|
||||||
|
cc.delete(self._session_id)
|
||||||
|
if not pid:
|
||||||
|
return
|
||||||
|
if IS_WINDOWS:
|
||||||
|
kill = ["Taskkill", "/PID", pid, "/F"]
|
||||||
|
else:
|
||||||
|
kill = ["kill", pid]
|
||||||
|
try:
|
||||||
|
proc.exec_command(kill)
|
||||||
|
except: # pylint: disable=bare-except
|
||||||
|
pass
|
||||||
|
|
||||||
|
def _lock_session(self, pid):
|
||||||
|
if not self._session_id:
|
||||||
|
return
|
||||||
|
with ContentCache() as cc:
|
||||||
|
cc.set(self._session_id, str(pid), "1h")
|
||||||
|
|
||||||
|
def _unlock_session(self):
|
||||||
|
if not self._session_id:
|
||||||
|
return
|
||||||
|
with ContentCache() as cc:
|
||||||
|
cc.delete(self._session_id)
|
||||||
|
|
||||||
|
def __del__(self):
|
||||||
|
self._unlock_session()
|
||||||
|
if self.working_dir and os.path.isdir(self.working_dir):
|
||||||
|
fs.rmtree(self.working_dir)
|
193
platformio/debug/process/gdb.py
Normal file
193
platformio/debug/process/gdb.py
Normal file
@@ -0,0 +1,193 @@
|
|||||||
|
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
|
||||||
|
#
|
||||||
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
# you may not use this file except in compliance with the License.
|
||||||
|
# You may obtain a copy of the License at
|
||||||
|
#
|
||||||
|
# http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
#
|
||||||
|
# Unless required by applicable law or agreed to in writing, software
|
||||||
|
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
# See the License for the specific language governing permissions and
|
||||||
|
# limitations under the License.
|
||||||
|
|
||||||
|
import os
|
||||||
|
import re
|
||||||
|
import signal
|
||||||
|
import time
|
||||||
|
|
||||||
|
from platformio import telemetry
|
||||||
|
from platformio.compat import aio_get_running_loop, is_bytes
|
||||||
|
from platformio.debug import helpers
|
||||||
|
from platformio.debug.process.client import DebugClientProcess
|
||||||
|
|
||||||
|
|
||||||
|
class GDBClientProcess(DebugClientProcess):
|
||||||
|
|
||||||
|
PIO_SRC_NAME = ".pioinit"
|
||||||
|
INIT_COMPLETED_BANNER = "PlatformIO: Initialization completed"
|
||||||
|
|
||||||
|
def __init__(self, *args, **kwargs):
|
||||||
|
super(GDBClientProcess, self).__init__(*args, **kwargs)
|
||||||
|
self._target_is_running = False
|
||||||
|
self._errors_buffer = b""
|
||||||
|
|
||||||
|
async def run(self, extra_args): # pylint: disable=arguments-differ
|
||||||
|
await super(GDBClientProcess, self).run()
|
||||||
|
|
||||||
|
self.generate_init_script(os.path.join(self.working_dir, self.PIO_SRC_NAME))
|
||||||
|
gdb_path = self.debug_config.client_executable_path or "gdb"
|
||||||
|
# start GDB client
|
||||||
|
args = [
|
||||||
|
gdb_path,
|
||||||
|
"-q",
|
||||||
|
"--directory",
|
||||||
|
self.working_dir,
|
||||||
|
"--directory",
|
||||||
|
self.project_dir,
|
||||||
|
"-l",
|
||||||
|
"10",
|
||||||
|
]
|
||||||
|
args.extend(list(extra_args or []))
|
||||||
|
gdb_data_dir = self._get_data_dir(gdb_path)
|
||||||
|
if gdb_data_dir:
|
||||||
|
args.extend(["--data-directory", gdb_data_dir])
|
||||||
|
args.append(self.debug_config.program_path)
|
||||||
|
|
||||||
|
await self.spawn(*args, cwd=self.project_dir, wait_until_exit=True)
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def _get_data_dir(gdb_path):
|
||||||
|
if "msp430" in gdb_path:
|
||||||
|
return None
|
||||||
|
gdb_data_dir = os.path.realpath(
|
||||||
|
os.path.join(os.path.dirname(gdb_path), "..", "share", "gdb")
|
||||||
|
)
|
||||||
|
return gdb_data_dir if os.path.isdir(gdb_data_dir) else None
|
||||||
|
|
||||||
|
def generate_init_script(self, dst):
|
||||||
|
# default GDB init commands depending on debug tool
|
||||||
|
commands = self.debug_config.get_init_script("gdb").split("\n")
|
||||||
|
|
||||||
|
if self.debug_config.init_cmds:
|
||||||
|
commands = self.debug_config.init_cmds
|
||||||
|
commands.extend(self.debug_config.extra_cmds)
|
||||||
|
|
||||||
|
if not any("define pio_reset_run_target" in cmd for cmd in commands):
|
||||||
|
commands = [
|
||||||
|
"define pio_reset_run_target",
|
||||||
|
" echo Warning! Undefined pio_reset_run_target command\\n",
|
||||||
|
" monitor reset",
|
||||||
|
"end",
|
||||||
|
] + commands
|
||||||
|
if not any("define pio_reset_halt_target" in cmd for cmd in commands):
|
||||||
|
commands = [
|
||||||
|
"define pio_reset_halt_target",
|
||||||
|
" echo Warning! Undefined pio_reset_halt_target command\\n",
|
||||||
|
" monitor reset halt",
|
||||||
|
"end",
|
||||||
|
] + commands
|
||||||
|
if not any("define pio_restart_target" in cmd for cmd in commands):
|
||||||
|
commands += [
|
||||||
|
"define pio_restart_target",
|
||||||
|
" pio_reset_halt_target",
|
||||||
|
" $INIT_BREAK",
|
||||||
|
" %s" % ("continue" if self.debug_config.init_break else "next"),
|
||||||
|
"end",
|
||||||
|
]
|
||||||
|
|
||||||
|
banner = [
|
||||||
|
"echo PlatformIO Unified Debugger -> http://bit.ly/pio-debug\\n",
|
||||||
|
"echo PlatformIO: debug_tool = %s\\n" % self.debug_config.tool_name,
|
||||||
|
"echo PlatformIO: Initializing remote target...\\n",
|
||||||
|
]
|
||||||
|
footer = ["echo %s\\n" % self.INIT_COMPLETED_BANNER]
|
||||||
|
commands = banner + commands + footer
|
||||||
|
|
||||||
|
with open(dst, mode="w", encoding="utf8") as fp:
|
||||||
|
fp.write("\n".join(self.debug_config.reveal_patterns(commands)))
|
||||||
|
|
||||||
|
def stdin_data_received(self, data):
|
||||||
|
super(GDBClientProcess, self).stdin_data_received(data)
|
||||||
|
if b"-exec-run" in data:
|
||||||
|
if self._target_is_running:
|
||||||
|
token, _ = data.split(b"-", 1)
|
||||||
|
self.stdout_data_received(token + b"^running\n")
|
||||||
|
return
|
||||||
|
if self.debug_config.platform.is_embedded():
|
||||||
|
data = data.replace(b"-exec-run", b"-exec-continue")
|
||||||
|
|
||||||
|
if b"-exec-continue" in data:
|
||||||
|
self._target_is_running = True
|
||||||
|
if b"-gdb-exit" in data or data.strip() in (b"q", b"quit"):
|
||||||
|
# Allow terminating via SIGINT/CTRL+C
|
||||||
|
signal.signal(signal.SIGINT, signal.default_int_handler)
|
||||||
|
self.transport.get_pipe_transport(0).write(b"pio_reset_run_target\n")
|
||||||
|
self.transport.get_pipe_transport(0).write(data)
|
||||||
|
|
||||||
|
def stdout_data_received(self, data):
|
||||||
|
super(GDBClientProcess, self).stdout_data_received(data)
|
||||||
|
self._handle_error(data)
|
||||||
|
# go to init break automatically
|
||||||
|
if self.INIT_COMPLETED_BANNER.encode() in data:
|
||||||
|
telemetry.send_event(
|
||||||
|
"Debug",
|
||||||
|
"Started",
|
||||||
|
telemetry.dump_run_environment(self.debug_config.env_options),
|
||||||
|
)
|
||||||
|
self._auto_exec_continue()
|
||||||
|
|
||||||
|
def console_log(self, msg):
|
||||||
|
if helpers.is_gdbmi_mode():
|
||||||
|
msg = helpers.escape_gdbmi_stream("~", msg)
|
||||||
|
self.stdout_data_received(msg if is_bytes(msg) else msg.encode())
|
||||||
|
|
||||||
|
def _auto_exec_continue(self):
|
||||||
|
auto_exec_delay = 0.5 # in seconds
|
||||||
|
if self._last_activity > (time.time() - auto_exec_delay):
|
||||||
|
aio_get_running_loop().call_later(0.1, self._auto_exec_continue)
|
||||||
|
return
|
||||||
|
|
||||||
|
if not self.debug_config.init_break or self._target_is_running:
|
||||||
|
return
|
||||||
|
|
||||||
|
self.console_log(
|
||||||
|
"PlatformIO: Resume the execution to `debug_init_break = %s`\n"
|
||||||
|
% self.debug_config.init_break
|
||||||
|
)
|
||||||
|
self.console_log(
|
||||||
|
"PlatformIO: More configuration options -> http://bit.ly/pio-debug\n"
|
||||||
|
)
|
||||||
|
if self.debug_config.platform.is_embedded():
|
||||||
|
self.transport.get_pipe_transport(0).write(
|
||||||
|
b"0-exec-continue\n" if helpers.is_gdbmi_mode() else b"continue\n"
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
self.transport.get_pipe_transport(0).write(
|
||||||
|
b"0-exec-run\n" if helpers.is_gdbmi_mode() else b"run\n"
|
||||||
|
)
|
||||||
|
self._target_is_running = True
|
||||||
|
|
||||||
|
def stderr_data_received(self, data):
|
||||||
|
super(GDBClientProcess, self).stderr_data_received(data)
|
||||||
|
self._handle_error(data)
|
||||||
|
|
||||||
|
def _handle_error(self, data):
|
||||||
|
self._errors_buffer = (self._errors_buffer + data)[-8192:] # keep last 8 KBytes
|
||||||
|
if not (
|
||||||
|
self.PIO_SRC_NAME.encode() in self._errors_buffer
|
||||||
|
and b"Error in sourced" in self._errors_buffer
|
||||||
|
):
|
||||||
|
return
|
||||||
|
|
||||||
|
last_erros = self._errors_buffer.decode()
|
||||||
|
last_erros = " ".join(reversed(last_erros.split("\n")))
|
||||||
|
last_erros = re.sub(r'((~|&)"|\\n\"|\\t)', " ", last_erros, flags=re.M)
|
||||||
|
|
||||||
|
err = "%s -> %s" % (
|
||||||
|
telemetry.dump_run_environment(self.debug_config.env_options),
|
||||||
|
last_erros,
|
||||||
|
)
|
||||||
|
telemetry.send_exception("DebugInitError: %s" % err)
|
||||||
|
self.transport.close()
|
148
platformio/debug/process/server.py
Normal file
148
platformio/debug/process/server.py
Normal file
@@ -0,0 +1,148 @@
|
|||||||
|
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
|
||||||
|
#
|
||||||
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
# you may not use this file except in compliance with the License.
|
||||||
|
# You may obtain a copy of the License at
|
||||||
|
#
|
||||||
|
# http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
#
|
||||||
|
# Unless required by applicable law or agreed to in writing, software
|
||||||
|
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
# See the License for the specific language governing permissions and
|
||||||
|
# limitations under the License.
|
||||||
|
|
||||||
|
import asyncio
|
||||||
|
import os
|
||||||
|
import re
|
||||||
|
import time
|
||||||
|
|
||||||
|
from platformio import fs
|
||||||
|
from platformio.compat import IS_MACOS, IS_WINDOWS
|
||||||
|
from platformio.debug.exception import DebugInvalidOptionsError
|
||||||
|
from platformio.debug.helpers import escape_gdbmi_stream, is_gdbmi_mode
|
||||||
|
from platformio.debug.process.base import DebugBaseProcess
|
||||||
|
from platformio.proc import where_is_program
|
||||||
|
|
||||||
|
|
||||||
|
class DebugServerProcess(DebugBaseProcess):
|
||||||
|
|
||||||
|
STD_BUFFER_SIZE = 1024
|
||||||
|
|
||||||
|
def __init__(self, debug_config):
|
||||||
|
super(DebugServerProcess, self).__init__()
|
||||||
|
self.debug_config = debug_config
|
||||||
|
self._ready = False
|
||||||
|
self._std_buffer = {"out": b"", "err": b""}
|
||||||
|
|
||||||
|
async def run(self): # pylint: disable=too-many-branches
|
||||||
|
server = self.debug_config.server
|
||||||
|
if not server:
|
||||||
|
return None
|
||||||
|
server_executable = server["executable"]
|
||||||
|
if not server_executable:
|
||||||
|
return None
|
||||||
|
if server["cwd"]:
|
||||||
|
server_executable = os.path.join(server["cwd"], server_executable)
|
||||||
|
if (
|
||||||
|
IS_WINDOWS
|
||||||
|
and not server_executable.endswith(".exe")
|
||||||
|
and os.path.isfile(server_executable + ".exe")
|
||||||
|
):
|
||||||
|
server_executable = server_executable + ".exe"
|
||||||
|
|
||||||
|
if not os.path.isfile(server_executable):
|
||||||
|
server_executable = where_is_program(server_executable)
|
||||||
|
if not os.path.isfile(server_executable):
|
||||||
|
raise DebugInvalidOptionsError(
|
||||||
|
"Could not launch Debug Server '%s'. Please check that it "
|
||||||
|
"is installed and is included in a system PATH\n"
|
||||||
|
"See https://docs.platformio.org/page/plus/debugging.html"
|
||||||
|
% server_executable
|
||||||
|
)
|
||||||
|
|
||||||
|
openocd_pipe_allowed = all(
|
||||||
|
[
|
||||||
|
not self.debug_config.env_options.get("debug_port"),
|
||||||
|
"gdb" in self.debug_config.client_executable_path,
|
||||||
|
"openocd" in server_executable,
|
||||||
|
]
|
||||||
|
)
|
||||||
|
if openocd_pipe_allowed:
|
||||||
|
args = []
|
||||||
|
if server["cwd"]:
|
||||||
|
args.extend(["-s", server["cwd"]])
|
||||||
|
args.extend(
|
||||||
|
["-c", "gdb_port pipe; tcl_port disabled; telnet_port disabled"]
|
||||||
|
)
|
||||||
|
args.extend(server["arguments"])
|
||||||
|
str_args = " ".join(
|
||||||
|
[arg if arg.startswith("-") else '"%s"' % arg for arg in args]
|
||||||
|
)
|
||||||
|
return fs.to_unix_path('| "%s" %s' % (server_executable, str_args))
|
||||||
|
|
||||||
|
env = os.environ.copy()
|
||||||
|
# prepend server "lib" folder to LD path
|
||||||
|
if (
|
||||||
|
not IS_WINDOWS
|
||||||
|
and server["cwd"]
|
||||||
|
and os.path.isdir(os.path.join(server["cwd"], "lib"))
|
||||||
|
):
|
||||||
|
ld_key = "DYLD_LIBRARY_PATH" if IS_MACOS else "LD_LIBRARY_PATH"
|
||||||
|
env[ld_key] = os.path.join(server["cwd"], "lib")
|
||||||
|
if os.environ.get(ld_key):
|
||||||
|
env[ld_key] = "%s:%s" % (env[ld_key], os.environ.get(ld_key))
|
||||||
|
# prepend BIN to PATH
|
||||||
|
if server["cwd"] and os.path.isdir(os.path.join(server["cwd"], "bin")):
|
||||||
|
env["PATH"] = "%s%s%s" % (
|
||||||
|
os.path.join(server["cwd"], "bin"),
|
||||||
|
os.pathsep,
|
||||||
|
os.environ.get("PATH", os.environ.get("Path", "")),
|
||||||
|
)
|
||||||
|
|
||||||
|
await self.spawn(
|
||||||
|
*([server_executable] + server["arguments"]), cwd=server["cwd"], env=env
|
||||||
|
)
|
||||||
|
await self._wait_until_ready()
|
||||||
|
|
||||||
|
return self.debug_config.port
|
||||||
|
|
||||||
|
async def _wait_until_ready(self):
|
||||||
|
ready_pattern = self.debug_config.server_ready_pattern
|
||||||
|
timeout = 60 if ready_pattern else 10
|
||||||
|
elapsed = 0
|
||||||
|
delay = 0.5
|
||||||
|
auto_ready_delay = 0.5
|
||||||
|
while not self._ready and self.is_running() and elapsed < timeout:
|
||||||
|
await asyncio.sleep(delay)
|
||||||
|
if not ready_pattern:
|
||||||
|
self._ready = self._last_activity < (time.time() - auto_ready_delay)
|
||||||
|
elapsed += delay
|
||||||
|
|
||||||
|
def _check_ready_by_pattern(self, data):
|
||||||
|
if self._ready:
|
||||||
|
return self._ready
|
||||||
|
ready_pattern = self.debug_config.server_ready_pattern
|
||||||
|
if ready_pattern:
|
||||||
|
if ready_pattern.startswith("^"):
|
||||||
|
self._ready = re.match(
|
||||||
|
ready_pattern,
|
||||||
|
data.decode("utf-8", "ignore"),
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
self._ready = ready_pattern.encode() in data
|
||||||
|
return self._ready
|
||||||
|
|
||||||
|
def stdout_data_received(self, data):
|
||||||
|
super(DebugServerProcess, self).stdout_data_received(
|
||||||
|
escape_gdbmi_stream("@", data) if is_gdbmi_mode() else data
|
||||||
|
)
|
||||||
|
self._std_buffer["out"] += data
|
||||||
|
self._check_ready_by_pattern(self._std_buffer["out"])
|
||||||
|
self._std_buffer["out"] = self._std_buffer["out"][-1 * self.STD_BUFFER_SIZE :]
|
||||||
|
|
||||||
|
def stderr_data_received(self, data):
|
||||||
|
super(DebugServerProcess, self).stderr_data_received(data)
|
||||||
|
self._std_buffer["err"] += data
|
||||||
|
self._check_ready_by_pattern(self._std_buffer["err"])
|
||||||
|
self._std_buffer["err"] = self._std_buffer["err"][-1 * self.STD_BUFFER_SIZE :]
|
@@ -12,6 +12,7 @@
|
|||||||
# See the License for the specific language governing permissions and
|
# See the License for the specific language governing permissions and
|
||||||
# limitations under the License.
|
# limitations under the License.
|
||||||
|
|
||||||
|
import glob
|
||||||
import hashlib
|
import hashlib
|
||||||
import io
|
import io
|
||||||
import json
|
import json
|
||||||
@@ -24,7 +25,7 @@ import sys
|
|||||||
import click
|
import click
|
||||||
|
|
||||||
from platformio import exception
|
from platformio import exception
|
||||||
from platformio.compat import WINDOWS, glob_escape, glob_recursive
|
from platformio.compat import IS_WINDOWS
|
||||||
|
|
||||||
|
|
||||||
class cd(object):
|
class cd(object):
|
||||||
@@ -51,7 +52,7 @@ def get_source_dir():
|
|||||||
|
|
||||||
def load_json(file_path):
|
def load_json(file_path):
|
||||||
try:
|
try:
|
||||||
with open(file_path, "r") as f:
|
with open(file_path, mode="r", encoding="utf8") as f:
|
||||||
return json.load(f)
|
return json.load(f)
|
||||||
except ValueError:
|
except ValueError:
|
||||||
raise exception.InvalidJSONFile(file_path)
|
raise exception.InvalidJSONFile(file_path)
|
||||||
@@ -101,7 +102,7 @@ def ensure_udev_rules():
|
|||||||
|
|
||||||
def _rules_to_set(rules_path):
|
def _rules_to_set(rules_path):
|
||||||
result = set()
|
result = set()
|
||||||
with open(rules_path) as fp:
|
with open(rules_path, encoding="utf8") as fp:
|
||||||
for line in fp.readlines():
|
for line in fp.readlines():
|
||||||
line = line.strip()
|
line = line.strip()
|
||||||
if not line or line.startswith("#"):
|
if not line or line.startswith("#"):
|
||||||
@@ -158,7 +159,9 @@ def match_src_files(src_dir, src_filter=None, src_exts=None, followlinks=True):
|
|||||||
src_filter = src_filter.replace("/", os.sep).replace("\\", os.sep)
|
src_filter = src_filter.replace("/", os.sep).replace("\\", os.sep)
|
||||||
for (action, pattern) in re.findall(r"(\+|\-)<([^>]+)>", src_filter):
|
for (action, pattern) in re.findall(r"(\+|\-)<([^>]+)>", src_filter):
|
||||||
items = set()
|
items = set()
|
||||||
for item in glob_recursive(os.path.join(glob_escape(src_dir), pattern)):
|
for item in glob.glob(
|
||||||
|
os.path.join(glob.escape(src_dir), pattern), recursive=True
|
||||||
|
):
|
||||||
if os.path.isdir(item):
|
if os.path.isdir(item):
|
||||||
for root, _, files in os.walk(item, followlinks=followlinks):
|
for root, _, files in os.walk(item, followlinks=followlinks):
|
||||||
for f in files:
|
for f in files:
|
||||||
@@ -173,7 +176,7 @@ def match_src_files(src_dir, src_filter=None, src_exts=None, followlinks=True):
|
|||||||
|
|
||||||
|
|
||||||
def to_unix_path(path):
|
def to_unix_path(path):
|
||||||
if not WINDOWS or not path:
|
if not IS_WINDOWS or not path:
|
||||||
return path
|
return path
|
||||||
return re.sub(r"[\\]+", "/", path)
|
return re.sub(r"[\\]+", "/", path)
|
||||||
|
|
||||||
@@ -182,7 +185,7 @@ def expanduser(path):
|
|||||||
"""
|
"""
|
||||||
Be compatible with Python 3.8, on Windows skip HOME and check for USERPROFILE
|
Be compatible with Python 3.8, on Windows skip HOME and check for USERPROFILE
|
||||||
"""
|
"""
|
||||||
if not WINDOWS or not path.startswith("~") or "USERPROFILE" not in os.environ:
|
if not IS_WINDOWS or not path.startswith("~") or "USERPROFILE" not in os.environ:
|
||||||
return os.path.expanduser(path)
|
return os.path.expanduser(path)
|
||||||
return os.environ["USERPROFILE"] + path[1:]
|
return os.environ["USERPROFILE"] + path[1:]
|
||||||
|
|
||||||
|
@@ -12,8 +12,8 @@
|
|||||||
# See the License for the specific language governing permissions and
|
# See the License for the specific language governing permissions and
|
||||||
# limitations under the License.
|
# limitations under the License.
|
||||||
|
|
||||||
from os import getenv
|
import os
|
||||||
from os.path import join
|
import shutil
|
||||||
from time import time
|
from time import time
|
||||||
|
|
||||||
import click
|
import click
|
||||||
@@ -28,7 +28,6 @@ from platformio.commands.lib.command import lib_update as cmd_lib_update
|
|||||||
from platformio.commands.platform import platform_update as cmd_platform_update
|
from platformio.commands.platform import platform_update as cmd_platform_update
|
||||||
from platformio.commands.system.prune import calculate_unnecessary_system_data
|
from platformio.commands.system.prune import calculate_unnecessary_system_data
|
||||||
from platformio.commands.upgrade import get_latest_version
|
from platformio.commands.upgrade import get_latest_version
|
||||||
from platformio.compat import ensure_python3
|
|
||||||
from platformio.package.manager.core import update_core_packages
|
from platformio.package.manager.core import update_core_packages
|
||||||
from platformio.package.manager.library import LibraryPackageManager
|
from platformio.package.manager.library import LibraryPackageManager
|
||||||
from platformio.package.manager.platform import PlatformPackageManager
|
from platformio.package.manager.platform import PlatformPackageManager
|
||||||
@@ -40,8 +39,6 @@ from platformio.proc import is_container
|
|||||||
|
|
||||||
|
|
||||||
def on_platformio_start(ctx, force, caller):
|
def on_platformio_start(ctx, force, caller):
|
||||||
ensure_python3(raise_exception=True)
|
|
||||||
|
|
||||||
app.set_session_var("command_ctx", ctx)
|
app.set_session_var("command_ctx", ctx)
|
||||||
app.set_session_var("force_option", force)
|
app.set_session_var("force_option", force)
|
||||||
set_caller(caller)
|
set_caller(caller)
|
||||||
@@ -78,19 +75,19 @@ def on_platformio_exception(e):
|
|||||||
|
|
||||||
|
|
||||||
def set_caller(caller=None):
|
def set_caller(caller=None):
|
||||||
caller = caller or getenv("PLATFORMIO_CALLER")
|
caller = caller or os.getenv("PLATFORMIO_CALLER")
|
||||||
if caller:
|
if caller:
|
||||||
return app.set_session_var("caller_id", caller)
|
return app.set_session_var("caller_id", caller)
|
||||||
if getenv("VSCODE_PID") or getenv("VSCODE_NLS_CONFIG"):
|
if os.getenv("VSCODE_PID") or os.getenv("VSCODE_NLS_CONFIG"):
|
||||||
caller = "vscode"
|
caller = "vscode"
|
||||||
elif getenv("GITPOD_INSTANCE_ID") or getenv("GITPOD_WORKSPACE_URL"):
|
elif os.getenv("GITPOD_INSTANCE_ID") or os.getenv("GITPOD_WORKSPACE_URL"):
|
||||||
caller = "gitpod"
|
caller = "gitpod"
|
||||||
elif is_container():
|
elif is_container():
|
||||||
if getenv("C9_UID"):
|
if os.getenv("C9_UID"):
|
||||||
caller = "C9"
|
caller = "C9"
|
||||||
elif getenv("USER") == "cabox":
|
elif os.getenv("USER") == "cabox":
|
||||||
caller = "CA"
|
caller = "CA"
|
||||||
elif getenv("CHE_API", getenv("CHE_API_ENDPOINT")):
|
elif os.getenv("CHE_API", os.getenv("CHE_API_ENDPOINT")):
|
||||||
caller = "Che"
|
caller = "Che"
|
||||||
return app.set_session_var("caller_id", caller)
|
return app.set_session_var("caller_id", caller)
|
||||||
|
|
||||||
@@ -142,7 +139,7 @@ class Upgrader(object):
|
|||||||
|
|
||||||
|
|
||||||
def after_upgrade(ctx):
|
def after_upgrade(ctx):
|
||||||
terminal_width, _ = click.get_terminal_size()
|
terminal_width, _ = shutil.get_terminal_size()
|
||||||
last_version = app.get_state_item("last_version", "0.0.0")
|
last_version = app.get_state_item("last_version", "0.0.0")
|
||||||
if last_version == __version__:
|
if last_version == __version__:
|
||||||
return
|
return
|
||||||
@@ -207,7 +204,7 @@ def after_upgrade(ctx):
|
|||||||
click.style("https://github.com/platformio/platformio", fg="cyan"),
|
click.style("https://github.com/platformio/platformio", fg="cyan"),
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
if not getenv("PLATFORMIO_IDE"):
|
if not os.getenv("PLATFORMIO_IDE"):
|
||||||
click.echo(
|
click.echo(
|
||||||
"- %s PlatformIO IDE for embedded development > %s"
|
"- %s PlatformIO IDE for embedded development > %s"
|
||||||
% (
|
% (
|
||||||
@@ -238,7 +235,7 @@ def check_platformio_upgrade():
|
|||||||
if pepver_to_semver(latest_version) <= pepver_to_semver(__version__):
|
if pepver_to_semver(latest_version) <= pepver_to_semver(__version__):
|
||||||
return
|
return
|
||||||
|
|
||||||
terminal_width, _ = click.get_terminal_size()
|
terminal_width, _ = shutil.get_terminal_size()
|
||||||
|
|
||||||
click.echo("")
|
click.echo("")
|
||||||
click.echo("*" * terminal_width)
|
click.echo("*" * terminal_width)
|
||||||
@@ -248,10 +245,10 @@ def check_platformio_upgrade():
|
|||||||
fg="yellow",
|
fg="yellow",
|
||||||
nl=False,
|
nl=False,
|
||||||
)
|
)
|
||||||
if getenv("PLATFORMIO_IDE"):
|
if os.getenv("PLATFORMIO_IDE"):
|
||||||
click.secho("PlatformIO IDE Menu: Upgrade PlatformIO", fg="cyan", nl=False)
|
click.secho("PlatformIO IDE Menu: Upgrade PlatformIO", fg="cyan", nl=False)
|
||||||
click.secho("`.", fg="yellow")
|
click.secho("`.", fg="yellow")
|
||||||
elif join("Cellar", "platformio") in fs.get_source_dir():
|
elif os.path.join("Cellar", "platformio") in fs.get_source_dir():
|
||||||
click.secho("brew update && brew upgrade", fg="cyan", nl=False)
|
click.secho("brew update && brew upgrade", fg="cyan", nl=False)
|
||||||
click.secho("` command.", fg="yellow")
|
click.secho("` command.", fg="yellow")
|
||||||
else:
|
else:
|
||||||
@@ -291,7 +288,7 @@ def check_internal_updates(ctx, what): # pylint: disable=too-many-branches
|
|||||||
if not outdated_items:
|
if not outdated_items:
|
||||||
return
|
return
|
||||||
|
|
||||||
terminal_width, _ = click.get_terminal_size()
|
terminal_width, _ = shutil.get_terminal_size()
|
||||||
|
|
||||||
click.echo("")
|
click.echo("")
|
||||||
click.echo("*" * terminal_width)
|
click.echo("*" * terminal_width)
|
||||||
@@ -353,7 +350,7 @@ def check_prune_system():
|
|||||||
if (unnecessary_size / 1024) < threshold_mb:
|
if (unnecessary_size / 1024) < threshold_mb:
|
||||||
return
|
return
|
||||||
|
|
||||||
terminal_width, _ = click.get_terminal_size()
|
terminal_width, _ = shutil.get_terminal_size()
|
||||||
click.echo()
|
click.echo()
|
||||||
click.echo("*" * terminal_width)
|
click.echo("*" * terminal_width)
|
||||||
click.secho(
|
click.secho(
|
||||||
|
@@ -73,7 +73,7 @@ class FileDownloader(object):
|
|||||||
def start(self, with_progress=True, silent=False):
|
def start(self, with_progress=True, silent=False):
|
||||||
label = "Downloading"
|
label = "Downloading"
|
||||||
itercontent = self._request.iter_content(chunk_size=io.DEFAULT_BUFFER_SIZE)
|
itercontent = self._request.iter_content(chunk_size=io.DEFAULT_BUFFER_SIZE)
|
||||||
fp = open(self._destination, "wb")
|
fp = open(self._destination, "wb") # pylint: disable=consider-using-with
|
||||||
try:
|
try:
|
||||||
if not with_progress or self.get_size() == -1:
|
if not with_progress or self.get_size() == -1:
|
||||||
if not silent:
|
if not silent:
|
||||||
|
@@ -62,7 +62,9 @@ class LockFile(object):
|
|||||||
else:
|
else:
|
||||||
raise LockFileExists
|
raise LockFileExists
|
||||||
|
|
||||||
self._fp = open(self._lock_path, "w")
|
self._fp = open( # pylint: disable=consider-using-with
|
||||||
|
self._lock_path, mode="w", encoding="utf8"
|
||||||
|
)
|
||||||
try:
|
try:
|
||||||
if LOCKFILE_CURRENT_INTERFACE == LOCKFILE_INTERFACE_FCNTL:
|
if LOCKFILE_CURRENT_INTERFACE == LOCKFILE_INTERFACE_FCNTL:
|
||||||
fcntl.flock(self._fp.fileno(), fcntl.LOCK_EX | fcntl.LOCK_NB)
|
fcntl.flock(self._fp.fileno(), fcntl.LOCK_EX | fcntl.LOCK_NB)
|
||||||
|
@@ -42,7 +42,7 @@ class RegistryFileMirrorIterator(object):
|
|||||||
return self
|
return self
|
||||||
|
|
||||||
def next(self):
|
def next(self):
|
||||||
""" For Python 2 compatibility """
|
"""For Python 2 compatibility"""
|
||||||
return self.__next__()
|
return self.__next__()
|
||||||
|
|
||||||
def __next__(self):
|
def __next__(self):
|
||||||
|
@@ -20,7 +20,6 @@ import sys
|
|||||||
from datetime import date
|
from datetime import date
|
||||||
|
|
||||||
from platformio import __core_packages__, exception, fs, util
|
from platformio import __core_packages__, exception, fs, util
|
||||||
from platformio.compat import PY2
|
|
||||||
from platformio.package.exception import UnknownPackageError
|
from platformio.package.exception import UnknownPackageError
|
||||||
from platformio.package.manager.tool import ToolPackageManager
|
from platformio.package.manager.tool import ToolPackageManager
|
||||||
from platformio.package.meta import PackageItem, PackageSpec
|
from platformio.package.meta import PackageItem, PackageSpec
|
||||||
@@ -79,6 +78,7 @@ def remove_unnecessary_core_packages(dry_run=False):
|
|||||||
pkg = pm.get_package(spec)
|
pkg = pm.get_package(spec)
|
||||||
if not pkg:
|
if not pkg:
|
||||||
continue
|
continue
|
||||||
|
# pylint: disable=no-member
|
||||||
best_pkg_versions[pkg.metadata.name] = pkg.metadata.version
|
best_pkg_versions[pkg.metadata.name] = pkg.metadata.version
|
||||||
|
|
||||||
for pkg in pm.get_installed():
|
for pkg in pm.get_installed():
|
||||||
@@ -156,7 +156,9 @@ def build_contrib_pysite_package(target_dir, with_metadata=True):
|
|||||||
subprocess.check_call(args + [dep])
|
subprocess.check_call(args + [dep])
|
||||||
|
|
||||||
# build manifests
|
# build manifests
|
||||||
with open(os.path.join(target_dir, "package.json"), "w") as fp:
|
with open(
|
||||||
|
os.path.join(target_dir, "package.json"), mode="w", encoding="utf8"
|
||||||
|
) as fp:
|
||||||
json.dump(
|
json.dump(
|
||||||
dict(
|
dict(
|
||||||
name="contrib-pysite",
|
name="contrib-pysite",
|
||||||
@@ -207,7 +209,7 @@ def get_contrib_pysite_deps():
|
|||||||
sys_type = util.get_systype()
|
sys_type = util.get_systype()
|
||||||
py_version = "%d%d" % (sys.version_info.major, sys.version_info.minor)
|
py_version = "%d%d" % (sys.version_info.major, sys.version_info.minor)
|
||||||
|
|
||||||
twisted_version = "19.10.0" if PY2 else "20.3.0"
|
twisted_version = "20.3.0"
|
||||||
result = [
|
result = [
|
||||||
"twisted == %s" % twisted_version,
|
"twisted == %s" % twisted_version,
|
||||||
]
|
]
|
||||||
|
@@ -44,7 +44,9 @@ class LibraryPackageManager(BasePackageManager): # pylint: disable=too-many-anc
|
|||||||
root_dir = self.find_library_root(path)
|
root_dir = self.find_library_root(path)
|
||||||
|
|
||||||
# automatically generate library manifest
|
# automatically generate library manifest
|
||||||
with open(os.path.join(root_dir, "library.json"), "w") as fp:
|
with open(
|
||||||
|
os.path.join(root_dir, "library.json"), mode="w", encoding="utf8"
|
||||||
|
) as fp:
|
||||||
json.dump(
|
json.dump(
|
||||||
dict(
|
dict(
|
||||||
name=spec.name,
|
name=spec.name,
|
||||||
|
@@ -253,7 +253,7 @@ class ManifestSchema(BaseSchema):
|
|||||||
@staticmethod
|
@staticmethod
|
||||||
@memoized(expire="1h")
|
@memoized(expire="1h")
|
||||||
def load_spdx_licenses():
|
def load_spdx_licenses():
|
||||||
version = "3.12"
|
version = "3.14"
|
||||||
spdx_data_url = (
|
spdx_data_url = (
|
||||||
"https://raw.githubusercontent.com/spdx/license-list-data/"
|
"https://raw.githubusercontent.com/spdx/license-list-data/"
|
||||||
"v%s/json/licenses.json" % version
|
"v%s/json/licenses.json" % version
|
||||||
|
@@ -382,12 +382,12 @@ class PackageMetaData(object):
|
|||||||
)
|
)
|
||||||
|
|
||||||
def dump(self, path):
|
def dump(self, path):
|
||||||
with open(path, "w") as fp:
|
with open(path, mode="w", encoding="utf8") as fp:
|
||||||
return json.dump(self.as_dict(), fp)
|
return json.dump(self.as_dict(), fp)
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def load(path):
|
def load(path):
|
||||||
with open(path) as fp:
|
with open(path, encoding="utf8") as fp:
|
||||||
data = json.load(fp)
|
data = json.load(fp)
|
||||||
if data["spec"]:
|
if data["spec"]:
|
||||||
data["spec"] = PackageSpec(**data["spec"])
|
data["spec"] = PackageSpec(**data["spec"])
|
||||||
|
@@ -20,7 +20,7 @@ import tarfile
|
|||||||
import tempfile
|
import tempfile
|
||||||
|
|
||||||
from platformio import fs
|
from platformio import fs
|
||||||
from platformio.compat import WINDOWS, ensure_python3
|
from platformio.compat import IS_WINDOWS
|
||||||
from platformio.package.exception import PackageException, UserSideException
|
from platformio.package.exception import PackageException, UserSideException
|
||||||
from platformio.package.manifest.parser import ManifestFileType, ManifestParserFactory
|
from platformio.package.manifest.parser import ManifestFileType, ManifestParserFactory
|
||||||
from platformio.package.manifest.schema import ManifestSchema
|
from platformio.package.manifest.schema import ManifestSchema
|
||||||
@@ -94,7 +94,6 @@ class PackagePacker(object):
|
|||||||
]
|
]
|
||||||
|
|
||||||
def __init__(self, package, manifest_uri=None):
|
def __init__(self, package, manifest_uri=None):
|
||||||
assert ensure_python3()
|
|
||||||
self.package = package
|
self.package = package
|
||||||
self.manifest_uri = manifest_uri
|
self.manifest_uri = manifest_uri
|
||||||
|
|
||||||
@@ -117,7 +116,7 @@ class PackagePacker(object):
|
|||||||
|
|
||||||
# if zip/tar.gz -> unpack to tmp dir
|
# if zip/tar.gz -> unpack to tmp dir
|
||||||
if not os.path.isdir(src):
|
if not os.path.isdir(src):
|
||||||
if WINDOWS:
|
if IS_WINDOWS:
|
||||||
raise UserSideException(
|
raise UserSideException(
|
||||||
"Packaging from an archive does not work on Windows OS. Please "
|
"Packaging from an archive does not work on Windows OS. Please "
|
||||||
"extract data from `%s` manually and pack a folder instead"
|
"extract data from `%s` manually and pack a folder instead"
|
||||||
@@ -182,7 +181,9 @@ class PackagePacker(object):
|
|||||||
and os.path.isdir(os.path.join(src, include[0]))
|
and os.path.isdir(os.path.join(src, include[0]))
|
||||||
):
|
):
|
||||||
src = os.path.join(src, include[0])
|
src = os.path.join(src, include[0])
|
||||||
with open(os.path.join(src, "library.json"), "w") as fp:
|
with open(
|
||||||
|
os.path.join(src, "library.json"), mode="w", encoding="utf8"
|
||||||
|
) as fp:
|
||||||
manifest_updated = manifest.copy()
|
manifest_updated = manifest.copy()
|
||||||
del manifest_updated["export"]["include"]
|
del manifest_updated["export"]["include"]
|
||||||
json.dump(manifest_updated, fp, indent=2, ensure_ascii=False)
|
json.dump(manifest_updated, fp, indent=2, ensure_ascii=False)
|
||||||
|
@@ -57,7 +57,9 @@ class BaseArchiver(object):
|
|||||||
|
|
||||||
class TARArchiver(BaseArchiver):
|
class TARArchiver(BaseArchiver):
|
||||||
def __init__(self, archpath):
|
def __init__(self, archpath):
|
||||||
super(TARArchiver, self).__init__(tarfile_open(archpath))
|
super(TARArchiver, self).__init__(
|
||||||
|
tarfile_open(archpath) # pylint: disable=consider-using-with
|
||||||
|
)
|
||||||
|
|
||||||
def get_items(self):
|
def get_items(self):
|
||||||
return self._afo.getmembers()
|
return self._afo.getmembers()
|
||||||
@@ -99,7 +101,9 @@ class TARArchiver(BaseArchiver):
|
|||||||
|
|
||||||
class ZIPArchiver(BaseArchiver):
|
class ZIPArchiver(BaseArchiver):
|
||||||
def __init__(self, archpath):
|
def __init__(self, archpath):
|
||||||
super(ZIPArchiver, self).__init__(ZipFile(archpath))
|
super(ZIPArchiver, self).__init__(
|
||||||
|
ZipFile(archpath) # pylint: disable=consider-using-with
|
||||||
|
)
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def preserve_permissions(item, dest_dir):
|
def preserve_permissions(item, dest_dir):
|
||||||
|
@@ -150,7 +150,7 @@ class GitClient(VCSClientBase):
|
|||||||
if path:
|
if path:
|
||||||
proc.append_env_path("PATH", path)
|
proc.append_env_path("PATH", path)
|
||||||
return True
|
return True
|
||||||
except subprocess.CalledProcessError:
|
except (subprocess.CalledProcessError, FileNotFoundError):
|
||||||
pass
|
pass
|
||||||
return False
|
return False
|
||||||
|
|
||||||
|
@@ -20,7 +20,7 @@ import sys
|
|||||||
import click
|
import click
|
||||||
|
|
||||||
from platformio import app, fs, proc, telemetry
|
from platformio import app, fs, proc, telemetry
|
||||||
from platformio.compat import PY2, hashlib_encode_data, is_bytes
|
from platformio.compat import hashlib_encode_data, is_bytes
|
||||||
from platformio.package.manager.core import get_core_package_dir
|
from platformio.package.manager.core import get_core_package_dir
|
||||||
from platformio.platform.exception import BuildScriptNotFound
|
from platformio.platform.exception import BuildScriptNotFound
|
||||||
|
|
||||||
@@ -90,14 +90,9 @@ class PlatformRunMixin(object):
|
|||||||
|
|
||||||
def _run_scons(self, variables, targets, jobs):
|
def _run_scons(self, variables, targets, jobs):
|
||||||
scons_dir = get_core_package_dir("tool-scons")
|
scons_dir = get_core_package_dir("tool-scons")
|
||||||
script_path = (
|
|
||||||
os.path.join(scons_dir, "script", "scons")
|
|
||||||
if PY2
|
|
||||||
else os.path.join(scons_dir, "scons.py")
|
|
||||||
)
|
|
||||||
args = [
|
args = [
|
||||||
proc.get_pythonexe_path(),
|
proc.get_pythonexe_path(),
|
||||||
script_path,
|
os.path.join(scons_dir, "scons.py"),
|
||||||
"-Q",
|
"-Q",
|
||||||
"--warn=no-no-parallel-support",
|
"--warn=no-no-parallel-support",
|
||||||
"--jobs",
|
"--jobs",
|
||||||
|
@@ -203,7 +203,7 @@ class PlatformBase( # pylint: disable=too-many-instance-attributes,too-many-pub
|
|||||||
elif "nobuild" in targets and opts.get("type") != "framework":
|
elif "nobuild" in targets and opts.get("type") != "framework":
|
||||||
self.packages[name]["optional"] = True
|
self.packages[name]["optional"] = True
|
||||||
|
|
||||||
def configure_debug_options(self, initial_debug_options, ide_data):
|
def configure_debug_session(self, debug_config):
|
||||||
raise NotImplementedError
|
raise NotImplementedError
|
||||||
|
|
||||||
def get_lib_storages(self):
|
def get_lib_storages(self):
|
||||||
|
@@ -15,11 +15,7 @@
|
|||||||
import os
|
import os
|
||||||
|
|
||||||
from platformio import fs, telemetry, util
|
from platformio import fs, telemetry, util
|
||||||
from platformio.commands.debug.exception import (
|
from platformio.debug.exception import DebugInvalidOptionsError, DebugSupportError
|
||||||
DebugInvalidOptionsError,
|
|
||||||
DebugSupportError,
|
|
||||||
)
|
|
||||||
from platformio.compat import PY2
|
|
||||||
from platformio.exception import UserSideException
|
from platformio.exception import UserSideException
|
||||||
from platformio.platform.exception import InvalidBoardManifest
|
from platformio.platform.exception import InvalidBoardManifest
|
||||||
|
|
||||||
@@ -43,15 +39,6 @@ class PlatformBoardConfig(object):
|
|||||||
value = self._manifest
|
value = self._manifest
|
||||||
for k in path.split("."):
|
for k in path.split("."):
|
||||||
value = value[k]
|
value = value[k]
|
||||||
# pylint: disable=undefined-variable
|
|
||||||
if PY2 and isinstance(value, unicode):
|
|
||||||
# cast to plain string from unicode for PY2, resolves issue in
|
|
||||||
# dev/platform when BoardConfig.get() is used in pair with
|
|
||||||
# os.path.join(file_encoding, unicode_encoding)
|
|
||||||
try:
|
|
||||||
value = value.encode("utf-8")
|
|
||||||
except UnicodeEncodeError:
|
|
||||||
pass
|
|
||||||
return value
|
return value
|
||||||
except KeyError:
|
except KeyError:
|
||||||
if default is not None:
|
if default is not None:
|
||||||
|
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user