mirror of
https://github.com/platformio/platformio-core.git
synced 2025-06-25 09:31:53 +02:00
Compare commits
78 Commits
v5.0.0
...
cpp17-qtcr
Author | SHA1 | Date | |
---|---|---|---|
7a8c061d79 | |||
8d4cde4534 | |||
7292024ee6 | |||
d6df6cbb5d | |||
344e94d8a1 | |||
5cf73a9165 | |||
96b1a1c79c | |||
0bbe7f8c73 | |||
e333bb1cca | |||
454cd8d784 | |||
743a43ae17 | |||
5a1b0e19b2 | |||
da6cde5cbd | |||
5ea864da39 | |||
175448deda | |||
16f90dd821 | |||
9efac669e6 | |||
adf9ba29df | |||
cacddb9abb | |||
edbe213410 | |||
891f78be37 | |||
175be346a8 | |||
9ae981614f | |||
16f5f3ef46 | |||
2cd19b0273 | |||
e158e54a26 | |||
63a6fe9133 | |||
779eaee310 | |||
0ecfe8105f | |||
b8cc867ba4 | |||
7230556d1b | |||
afd79f4655 | |||
5d87fb8757 | |||
23e9596506 | |||
428f46fafe | |||
ee847e03a6 | |||
a870981266 | |||
411bf1107d | |||
5b74c8a942 | |||
a24bab0a27 | |||
1cb7764b0e | |||
d835f52a18 | |||
9c20ab81cb | |||
14de3e79c5 | |||
21c12030d5 | |||
2370e16f1b | |||
a384411a28 | |||
1e0ca8f79c | |||
2b5e590819 | |||
bf57b777bf | |||
f656d19ed5 | |||
eb09af06ed | |||
687c339f20 | |||
7bc170a53e | |||
65297c24d4 | |||
ea21f3fba0 | |||
b515a004d3 | |||
7d3fc1ec1a | |||
6987d6c1c6 | |||
de2b5ea905 | |||
f946a0bc08 | |||
4f47ca5742 | |||
54b51fc2fd | |||
1f284e853d | |||
2a30ad0fdf | |||
c454ae336d | |||
cd59c829e0 | |||
429f416b38 | |||
0a881d582d | |||
65b1029216 | |||
c7758fd30e | |||
46f300d62f | |||
4234dfb6f9 | |||
9695720343 | |||
1f28056459 | |||
7dacceef04 | |||
39883e8d68 | |||
949ef2c48a |
6
.github/workflows/examples.yml
vendored
6
.github/workflows/examples.yml
vendored
@ -26,7 +26,7 @@ jobs:
|
||||
- name: Run on Linux
|
||||
if: startsWith(matrix.os, 'ubuntu')
|
||||
env:
|
||||
PIO_INSTALL_DEVPLATFORMS_IGNORE: "ststm8,infineonxmc,intel_mcs51,aceinna_imu"
|
||||
PIO_INSTALL_DEVPLATFORMS_IGNORE: "ststm8,infineonxmc,siwigsm,intel_mcs51,aceinna_imu"
|
||||
run: |
|
||||
# ChipKIT issue: install 32-bit support for GCC PIC32
|
||||
sudo apt-get install libc6-i386
|
||||
@ -40,7 +40,7 @@ jobs:
|
||||
- name: Run on macOS
|
||||
if: startsWith(matrix.os, 'macos')
|
||||
env:
|
||||
PIO_INSTALL_DEVPLATFORMS_IGNORE: "ststm8,infineonxmc,microchippic32,gd32v,nuclei"
|
||||
PIO_INSTALL_DEVPLATFORMS_IGNORE: "ststm8,infineonxmc,siwigsm,microchippic32,gd32v,nuclei,lattice_ice40"
|
||||
run: |
|
||||
df -h
|
||||
tox -e testexamples
|
||||
@ -50,7 +50,7 @@ jobs:
|
||||
env:
|
||||
PLATFORMIO_CORE_DIR: C:/pio
|
||||
PLATFORMIO_WORKSPACE_DIR: C:/pio-workspace/$PROJECT_HASH
|
||||
PIO_INSTALL_DEVPLATFORMS_IGNORE: "ststm8,infineonxmc,riscv_gap"
|
||||
PIO_INSTALL_DEVPLATFORMS_IGNORE: "ststm8,infineonxmc,siwigsm,riscv_gap"
|
||||
run: |
|
||||
tox -e testexamples
|
||||
|
||||
|
28
HISTORY.rst
28
HISTORY.rst
@ -8,11 +8,37 @@ PlatformIO Core 5
|
||||
|
||||
**A professional collaborative platform for embedded development**
|
||||
|
||||
- `Migration guide from 4.x to 5.0 <https://docs.platformio.org/page/core/migration.html>`__
|
||||
5.0.2 (2020-10-30)
|
||||
~~~~~~~~~~~~~~~~~~
|
||||
|
||||
- Initialize a new project or update the existing passing working environment name and its options (`issue #3686 <https://github.com/platformio/platformio-core/issues/3686>`_)
|
||||
- Automatically build PlatformIO Core extra Python dependencies on a host machine if they are missed in the registry (`issue #3700 <https://github.com/platformio/platformio-core/issues/3700>`_)
|
||||
- Improved "core.call" RPC for PlatformIO Home (`issue #3671 <https://github.com/platformio/platformio-core/issues/3671>`_)
|
||||
- Fixed a "PermissionError: [WinError 5]" on Windows when an external repository is used with `lib_deps <https://docs.platformio.org/page/projectconf/section_env_library.html#lib-deps>`__ option (`issue #3664 <https://github.com/platformio/platformio-core/issues/3664>`_)
|
||||
- Fixed a "KeyError: 'versions'" when dependency does not exist in the registry (`issue #3666 <https://github.com/platformio/platformio-core/issues/3666>`_)
|
||||
- Fixed an issue with GCC linker when "native" dev-platform is used in pair with library dependencies (`issue #3669 <https://github.com/platformio/platformio-core/issues/3669>`_)
|
||||
- Fixed an "AssertionError: ensure_dir_exists" when checking library updates from simultaneous subprocesses (`issue #3677 <https://github.com/platformio/platformio-core/issues/3677>`_)
|
||||
- Fixed an issue when `pio package publish <https://docs.platformio.org/page/core/userguide/package/cmd_publish.html>`__ command removes original archive after submitting to the registry (`issue #3716 <https://github.com/platformio/platformio-core/issues/3716>`_)
|
||||
- Fixed an issue when multiple `pio lib install <https://docs.platformio.org/page/core/userguide/lib/cmd_install.html>`__ command with the same local library results in duplicates in ``lib_deps`` (`issue #3715 <https://github.com/platformio/platformio-core/issues/3715>`_)
|
||||
- Fixed an issue with a "wrong" timestamp in device monitor output using `"time" filter <https://docs.platformio.org/page/core/userguide/device/cmd_monitor.html#filters>`__ (`issue #3712 <https://github.com/platformio/platformio-core/issues/3712>`_)
|
||||
|
||||
5.0.1 (2020-09-10)
|
||||
~~~~~~~~~~~~~~~~~~
|
||||
|
||||
- Added support for "owner" requirement when declaring ``dependencies`` using `library.json <https://docs.platformio.org/page/librarymanager/config.html#dependencies>`__
|
||||
- Fixed an issue when using a custom git/ssh package with `platform_packages <https://docs.platformio.org/page/projectconf/section_env_platform.html#platform-packages>`__ option (`issue #3624 <https://github.com/platformio/platformio-core/issues/3624>`_)
|
||||
- Fixed an issue with "ImportError: cannot import name '_get_backend' from 'cryptography.hazmat.backends'" when using `Remote Development <https://docs.platformio.org/page/plus/pio-remote.html>`__ on RaspberryPi device (`issue #3652 <https://github.com/platformio/platformio-core/issues/3652>`_)
|
||||
- Fixed an issue when `pio package unpublish <https://docs.platformio.org/page/core/userguide/package/cmd_unpublish.html>`__ command crashes (`issue #3660 <https://github.com/platformio/platformio-core/issues/3660>`_)
|
||||
- Fixed an issue when the package manager tries to install a built-in library from the registry (`issue #3662 <https://github.com/platformio/platformio-core/issues/3662>`_)
|
||||
- Fixed an issue with incorrect value for C++ language standard in IDE projects when an in-progress language standard is used (`issue #3653 <https://github.com/platformio/platformio-core/issues/3653>`_)
|
||||
- Fixed an issue with "Invalid simple block (semantic_version)" from library dependency that refs to an external source (repository, ZIP/Tar archives) (`issue #3658 <https://github.com/platformio/platformio-core/issues/3658>`_)
|
||||
- Fixed an issue when can not remove update or remove external dev-platform using PlatformIO Home (`issue #3663 <https://github.com/platformio/platformio-core/issues/3663>`_)
|
||||
|
||||
5.0.0 (2020-09-03)
|
||||
~~~~~~~~~~~~~~~~~~
|
||||
|
||||
Please check `Migration guide from 4.x to 5.0 <https://docs.platformio.org/page/core/migration.html>`__.
|
||||
|
||||
* Integration with the new **PlatformIO Trusted Registry**
|
||||
|
||||
- Enterprise-grade package storage with high availability (multi replicas)
|
||||
|
3
Makefile
3
Makefile
@ -31,5 +31,8 @@ profile:
|
||||
python -m cProfile -o .tox/.tmp/cprofile.prof -m platformio ${PIOARGS}
|
||||
snakeviz .tox/.tmp/cprofile.prof
|
||||
|
||||
pack:
|
||||
python setup.py sdist
|
||||
|
||||
publish:
|
||||
python setup.py sdist upload
|
||||
|
30
README.rst
30
README.rst
@ -16,23 +16,21 @@ PlatformIO
|
||||
.. image:: https://img.shields.io/badge/license-Apache%202.0-blue.svg
|
||||
:target: https://pypi.python.org/pypi/platformio/
|
||||
:alt: License
|
||||
.. image:: https://img.shields.io/badge/PlatformIO-Community-orange.svg
|
||||
:alt: Community Forums
|
||||
:target: https://community.platformio.org?utm_source=github&utm_medium=core
|
||||
.. image:: https://img.shields.io/badge/PlatformIO-Labs-orange.svg
|
||||
:alt: Community Labs
|
||||
:target: https://piolabs.com/?utm_source=github&utm_medium=core
|
||||
|
||||
**Quick Links:** `Web <https://platformio.org?utm_source=github&utm_medium=core>`_ |
|
||||
`PlatformIO IDE <https://platformio.org/platformio-ide?utm_source=github&utm_medium=core>`_ |
|
||||
`Project Examples <https://github.com/platformio/platformio-examples/>`__ |
|
||||
`Docs <https://docs.platformio.org?utm_source=github&utm_medium=core>`_ |
|
||||
`Donate <https://platformio.org/donate?utm_source=github&utm_medium=core>`_ |
|
||||
`Contact Us <https://platformio.org/contact?utm_source=github&utm_medium=core>`_
|
||||
`Contact Us <https://piolabs.com/?utm_source=github&utm_medium=core>`_
|
||||
|
||||
**Social:** `Twitter <https://twitter.com/PlatformIO_Org>`_ |
|
||||
`LinkedIn <https://www.linkedin.com/company/platformio/>`_ |
|
||||
**Social:** `LinkedIn <https://www.linkedin.com/company/platformio/>`_ |
|
||||
`Twitter <https://twitter.com/PlatformIO_Org>`_ |
|
||||
`Facebook <https://www.facebook.com/platformio>`_ |
|
||||
`Hackaday <https://hackaday.io/project/7980-platformio>`_ |
|
||||
`Bintray <https://bintray.com/platformio>`_ |
|
||||
`Community <https://community.platformio.org?utm_source=github&utm_medium=core>`_
|
||||
`Community Forums <https://community.platformio.org?utm_source=github&utm_medium=core>`_
|
||||
|
||||
.. image:: https://raw.githubusercontent.com/platformio/platformio-web/develop/app/images/platformio-ide-laptop.png
|
||||
:target: https://platformio.org?utm_source=github&utm_medium=core
|
||||
@ -51,20 +49,18 @@ Get Started
|
||||
-----------
|
||||
|
||||
* `What is PlatformIO? <https://docs.platformio.org/page/what-is-platformio.html?utm_source=github&utm_medium=core>`_
|
||||
|
||||
Instruments
|
||||
-----------
|
||||
|
||||
* `PlatformIO IDE <https://platformio.org/platformio-ide?utm_source=github&utm_medium=core>`_
|
||||
* `PlatformIO Core (CLI) <https://docs.platformio.org/page/core.html?utm_source=github&utm_medium=core>`_
|
||||
* `Library Management <https://docs.platformio.org/page/librarymanager/index.html?utm_source=github&utm_medium=core>`_
|
||||
* `Project Examples <https://github.com/platformio/platformio-examples?utm_source=github&utm_medium=core>`__
|
||||
|
||||
Solutions
|
||||
---------
|
||||
|
||||
* `Library Management <https://docs.platformio.org/page/librarymanager/index.html?utm_source=github&utm_medium=core>`_
|
||||
* `Desktop IDEs Integration <https://docs.platformio.org/page/ide.html?utm_source=github&utm_medium=core>`_
|
||||
* `Continuous Integration <https://docs.platformio.org/page/ci/index.html?utm_source=github&utm_medium=core>`_
|
||||
* `Advanced Scripting API <https://docs.platformio.org/page/projectconf/advanced_scripting.html?utm_source=github&utm_medium=core>`_
|
||||
|
||||
Professional
|
||||
------------
|
||||
**Advanced**
|
||||
|
||||
* `Debugging <https://docs.platformio.org/page/plus/debugging.html?utm_source=github&utm_medium=core>`_
|
||||
* `Unit Testing <https://docs.platformio.org/page/plus/unit-testing.html?utm_source=github&utm_medium=core>`_
|
||||
|
2
docs
2
docs
Submodule docs updated: 03a83c996f...deae09a880
@ -14,7 +14,7 @@
|
||||
|
||||
import sys
|
||||
|
||||
VERSION = (5, 0, 0)
|
||||
VERSION = (5, 0, 2)
|
||||
__version__ = ".".join([str(s) for s in VERSION])
|
||||
|
||||
__title__ = "platformio"
|
||||
@ -47,7 +47,7 @@ __pioremote_endpoint__ = "ssl:host=remote.platformio.org:port=4413"
|
||||
__default_requests_timeout__ = (10, None) # (connect, read)
|
||||
|
||||
__core_packages__ = {
|
||||
"contrib-piohome": "~3.3.0",
|
||||
"contrib-piohome": "~3.3.1",
|
||||
"contrib-pysite": "~2.%d%d.0" % (sys.version_info.major, sys.version_info.minor),
|
||||
"tool-unity": "~1.20500.0",
|
||||
"tool-scons": "~2.20501.7" if sys.version_info.major == 2 else "~4.40001.0",
|
||||
@ -57,8 +57,7 @@ __core_packages__ = {
|
||||
}
|
||||
|
||||
__check_internet_hosts__ = [
|
||||
"140.82.118.3", # Github.com
|
||||
"35.231.145.151", # Gitlab.com
|
||||
"185.199.110.153", # Github.com
|
||||
"88.198.170.159", # platformio.org
|
||||
"github.com",
|
||||
"platformio.org",
|
||||
|
@ -78,6 +78,7 @@ DEFAULT_ENV_OPTIONS = dict(
|
||||
PROGNAME="program",
|
||||
PROG_PATH=join("$BUILD_DIR", "$PROGNAME$PROGSUFFIX"),
|
||||
PYTHONEXE=get_pythonexe_path(),
|
||||
IDE_EXTRA_DATA={},
|
||||
)
|
||||
|
||||
if not int(ARGUMENTS.get("PIOVERBOSE", 0)):
|
||||
|
@ -93,7 +93,9 @@ def _dump_defines(env):
|
||||
defines = []
|
||||
# global symbols
|
||||
for item in processDefines(env.get("CPPDEFINES", [])):
|
||||
defines.append(env.subst(item).replace("\\", ""))
|
||||
item = item.strip()
|
||||
if item:
|
||||
defines.append(env.subst(item).replace("\\", ""))
|
||||
|
||||
# special symbol for Atmel AVR MCU
|
||||
if env["PIOPLATFORM"] == "atmelavr":
|
||||
@ -164,14 +166,17 @@ def DumpIDEData(env, globalenv):
|
||||
"cxx_path": where_is_program(env.subst("$CXX"), env.subst("${ENV['PATH']}")),
|
||||
"gdb_path": where_is_program(env.subst("$GDB"), env.subst("${ENV['PATH']}")),
|
||||
"prog_path": env.subst("$PROG_PATH"),
|
||||
"flash_extra_images": [
|
||||
{"offset": item[0], "path": env.subst(item[1])}
|
||||
for item in env.get("FLASH_EXTRA_IMAGES", [])
|
||||
],
|
||||
"svd_path": _get_svd_path(env),
|
||||
"compiler_type": env.GetCompilerType(),
|
||||
"targets": globalenv.DumpTargets(),
|
||||
"extra": dict(
|
||||
flash_images=[
|
||||
{"offset": item[0], "path": env.subst(item[1])}
|
||||
for item in env.get("FLASH_EXTRA_IMAGES", [])
|
||||
]
|
||||
),
|
||||
}
|
||||
data["extra"].update(env.get("IDE_EXTRA_DATA", {}))
|
||||
|
||||
env_ = env.Clone()
|
||||
# https://github.com/platformio/platformio-atom-ide/issues/34
|
||||
|
@ -27,7 +27,7 @@ from SCons.Script import Export # pylint: disable=import-error
|
||||
from SCons.Script import SConscript # pylint: disable=import-error
|
||||
|
||||
from platformio import __version__, fs
|
||||
from platformio.compat import string_types
|
||||
from platformio.compat import MACOS, string_types
|
||||
from platformio.package.version import pepver_to_semver
|
||||
|
||||
SRC_HEADER_EXT = ["h", "hpp"]
|
||||
@ -69,7 +69,7 @@ def BuildProgram(env):
|
||||
if (
|
||||
env.get("LIBS")
|
||||
and env.GetCompilerType() == "gcc"
|
||||
and env.PioPlatform().is_embedded()
|
||||
and (env.PioPlatform().is_embedded() or not MACOS)
|
||||
):
|
||||
env.Prepend(_LIBFLAGS="-Wl,--start-group ")
|
||||
env.Append(_LIBFLAGS=" -Wl,--end-group")
|
||||
|
@ -80,7 +80,9 @@ class AccountClient(HTTPClient): # pylint:disable=too-many-public-methods
|
||||
)
|
||||
|
||||
data = self.fetch_json_data(
|
||||
"post", "/v1/login", data={"username": username, "password": password},
|
||||
"post",
|
||||
"/v1/login",
|
||||
data={"username": username, "password": password},
|
||||
)
|
||||
app.set_state_item("account", data)
|
||||
return data
|
||||
@ -108,7 +110,9 @@ class AccountClient(HTTPClient): # pylint:disable=too-many-public-methods
|
||||
self.delete_local_session()
|
||||
try:
|
||||
self.fetch_json_data(
|
||||
"post", "/v1/logout", data={"refresh_token": refresh_token},
|
||||
"post",
|
||||
"/v1/logout",
|
||||
data={"refresh_token": refresh_token},
|
||||
)
|
||||
except AccountError:
|
||||
pass
|
||||
@ -153,15 +157,26 @@ class AccountClient(HTTPClient): # pylint:disable=too-many-public-methods
|
||||
).get("auth_token")
|
||||
|
||||
def forgot_password(self, username):
|
||||
return self.fetch_json_data("post", "/v1/forgot", data={"username": username},)
|
||||
return self.fetch_json_data(
|
||||
"post",
|
||||
"/v1/forgot",
|
||||
data={"username": username},
|
||||
)
|
||||
|
||||
def get_profile(self):
|
||||
return self.send_auth_request("get", "/v1/profile",)
|
||||
return self.send_auth_request(
|
||||
"get",
|
||||
"/v1/profile",
|
||||
)
|
||||
|
||||
def update_profile(self, profile, current_password):
|
||||
profile["current_password"] = current_password
|
||||
self.delete_local_state("summary")
|
||||
response = self.send_auth_request("put", "/v1/profile", data=profile,)
|
||||
response = self.send_auth_request(
|
||||
"put",
|
||||
"/v1/profile",
|
||||
data=profile,
|
||||
)
|
||||
return response
|
||||
|
||||
def get_account_info(self, offline=False):
|
||||
@ -178,7 +193,10 @@ class AccountClient(HTTPClient): # pylint:disable=too-many-public-methods
|
||||
"username": account.get("username"),
|
||||
}
|
||||
}
|
||||
result = self.send_auth_request("get", "/v1/summary",)
|
||||
result = self.send_auth_request(
|
||||
"get",
|
||||
"/v1/summary",
|
||||
)
|
||||
account["summary"] = dict(
|
||||
profile=result.get("profile"),
|
||||
packages=result.get("packages"),
|
||||
@ -203,7 +221,10 @@ class AccountClient(HTTPClient): # pylint:disable=too-many-public-methods
|
||||
return self.send_auth_request("get", "/v1/orgs/%s" % orgname)
|
||||
|
||||
def list_orgs(self):
|
||||
return self.send_auth_request("get", "/v1/orgs",)
|
||||
return self.send_auth_request(
|
||||
"get",
|
||||
"/v1/orgs",
|
||||
)
|
||||
|
||||
def update_org(self, orgname, data):
|
||||
return self.send_auth_request(
|
||||
@ -211,19 +232,29 @@ class AccountClient(HTTPClient): # pylint:disable=too-many-public-methods
|
||||
)
|
||||
|
||||
def destroy_org(self, orgname):
|
||||
return self.send_auth_request("delete", "/v1/orgs/%s" % orgname,)
|
||||
return self.send_auth_request(
|
||||
"delete",
|
||||
"/v1/orgs/%s" % orgname,
|
||||
)
|
||||
|
||||
def add_org_owner(self, orgname, username):
|
||||
return self.send_auth_request(
|
||||
"post", "/v1/orgs/%s/owners" % orgname, data={"username": username},
|
||||
"post",
|
||||
"/v1/orgs/%s/owners" % orgname,
|
||||
data={"username": username},
|
||||
)
|
||||
|
||||
def list_org_owners(self, orgname):
|
||||
return self.send_auth_request("get", "/v1/orgs/%s/owners" % orgname,)
|
||||
return self.send_auth_request(
|
||||
"get",
|
||||
"/v1/orgs/%s/owners" % orgname,
|
||||
)
|
||||
|
||||
def remove_org_owner(self, orgname, username):
|
||||
return self.send_auth_request(
|
||||
"delete", "/v1/orgs/%s/owners" % orgname, data={"username": username},
|
||||
"delete",
|
||||
"/v1/orgs/%s/owners" % orgname,
|
||||
data={"username": username},
|
||||
)
|
||||
|
||||
def create_team(self, orgname, teamname, description):
|
||||
@ -235,16 +266,21 @@ class AccountClient(HTTPClient): # pylint:disable=too-many-public-methods
|
||||
|
||||
def destroy_team(self, orgname, teamname):
|
||||
return self.send_auth_request(
|
||||
"delete", "/v1/orgs/%s/teams/%s" % (orgname, teamname),
|
||||
"delete",
|
||||
"/v1/orgs/%s/teams/%s" % (orgname, teamname),
|
||||
)
|
||||
|
||||
def get_team(self, orgname, teamname):
|
||||
return self.send_auth_request(
|
||||
"get", "/v1/orgs/%s/teams/%s" % (orgname, teamname),
|
||||
"get",
|
||||
"/v1/orgs/%s/teams/%s" % (orgname, teamname),
|
||||
)
|
||||
|
||||
def list_teams(self, orgname):
|
||||
return self.send_auth_request("get", "/v1/orgs/%s/teams" % orgname,)
|
||||
return self.send_auth_request(
|
||||
"get",
|
||||
"/v1/orgs/%s/teams" % orgname,
|
||||
)
|
||||
|
||||
def update_team(self, orgname, teamname, data):
|
||||
return self.send_auth_request(
|
||||
|
@ -133,9 +133,7 @@ class HTTPClient(object):
|
||||
def fetch_json_data(self, method, path, **kwargs):
|
||||
cache_valid = kwargs.pop("cache_valid") if "cache_valid" in kwargs else None
|
||||
if not cache_valid:
|
||||
return self.raise_error_from_response(
|
||||
self.send_request(method, path, **kwargs)
|
||||
)
|
||||
return self._parse_json_response(self.send_request(method, path, **kwargs))
|
||||
cache_key = ContentCache.key_from_args(
|
||||
method, path, kwargs.get("params"), kwargs.get("data")
|
||||
)
|
||||
@ -144,11 +142,12 @@ class HTTPClient(object):
|
||||
if result is not None:
|
||||
return json.loads(result)
|
||||
response = self.send_request(method, path, **kwargs)
|
||||
data = self._parse_json_response(response)
|
||||
cc.set(cache_key, response.text, cache_valid)
|
||||
return self.raise_error_from_response(response)
|
||||
return data
|
||||
|
||||
@staticmethod
|
||||
def raise_error_from_response(response, expected_codes=(200, 201, 202)):
|
||||
def _parse_json_response(response, expected_codes=(200, 201, 202)):
|
||||
if response.status_code in expected_codes:
|
||||
try:
|
||||
return response.json()
|
||||
|
@ -70,12 +70,16 @@ class RegistryClient(HTTPClient):
|
||||
if version:
|
||||
path += "/" + version
|
||||
return self.send_auth_request(
|
||||
"delete", path, params={"undo": 1 if undo else 0},
|
||||
"delete",
|
||||
path,
|
||||
params={"undo": 1 if undo else 0},
|
||||
)
|
||||
|
||||
def update_resource(self, urn, private):
|
||||
return self.send_auth_request(
|
||||
"put", "/v3/resources/%s" % urn, data={"private": int(private)},
|
||||
"put",
|
||||
"/v3/resources/%s" % urn,
|
||||
data={"private": int(private)},
|
||||
)
|
||||
|
||||
def grant_access_for_resource(self, urn, client, level):
|
||||
@ -87,7 +91,9 @@ class RegistryClient(HTTPClient):
|
||||
|
||||
def revoke_access_from_resource(self, urn, client):
|
||||
return self.send_auth_request(
|
||||
"delete", "/v3/resources/%s/access" % urn, data={"client": client},
|
||||
"delete",
|
||||
"/v3/resources/%s/access" % urn,
|
||||
data={"client": client},
|
||||
)
|
||||
|
||||
def list_resources(self, owner):
|
||||
@ -136,6 +142,6 @@ class RegistryClient(HTTPClient):
|
||||
cache_valid="1h",
|
||||
)
|
||||
except HTTPClientError as e:
|
||||
if e.response.status_code == 404:
|
||||
if e.response is not None and e.response.status_code == 404:
|
||||
return None
|
||||
raise e
|
||||
|
@ -47,27 +47,31 @@ def validate_urn(value):
|
||||
|
||||
@cli.command("public", short_help="Make resource public")
|
||||
@click.argument(
|
||||
"urn", callback=lambda _, __, value: validate_urn(value),
|
||||
"urn",
|
||||
callback=lambda _, __, value: validate_urn(value),
|
||||
)
|
||||
@click.option("--urn-type", type=click.Choice(["prn:reg:pkg"]), default="prn:reg:pkg")
|
||||
def access_public(urn, urn_type):
|
||||
client = RegistryClient()
|
||||
client.update_resource(urn=urn, private=0)
|
||||
return click.secho(
|
||||
"The resource %s has been successfully updated." % urn, fg="green",
|
||||
"The resource %s has been successfully updated." % urn,
|
||||
fg="green",
|
||||
)
|
||||
|
||||
|
||||
@cli.command("private", short_help="Make resource private")
|
||||
@click.argument(
|
||||
"urn", callback=lambda _, __, value: validate_urn(value),
|
||||
"urn",
|
||||
callback=lambda _, __, value: validate_urn(value),
|
||||
)
|
||||
@click.option("--urn-type", type=click.Choice(["prn:reg:pkg"]), default="prn:reg:pkg")
|
||||
def access_private(urn, urn_type):
|
||||
client = RegistryClient()
|
||||
client.update_resource(urn=urn, private=1)
|
||||
return click.secho(
|
||||
"The resource %s has been successfully updated." % urn, fg="green",
|
||||
"The resource %s has been successfully updated." % urn,
|
||||
fg="green",
|
||||
)
|
||||
|
||||
|
||||
@ -79,14 +83,16 @@ def access_private(urn, urn_type):
|
||||
callback=lambda _, __, value: validate_client(value),
|
||||
)
|
||||
@click.argument(
|
||||
"urn", callback=lambda _, __, value: validate_urn(value),
|
||||
"urn",
|
||||
callback=lambda _, __, value: validate_urn(value),
|
||||
)
|
||||
@click.option("--urn-type", type=click.Choice(["prn:reg:pkg"]), default="prn:reg:pkg")
|
||||
def access_grant(level, client, urn, urn_type):
|
||||
reg_client = RegistryClient()
|
||||
reg_client.grant_access_for_resource(urn=urn, client=client, level=level)
|
||||
return click.secho(
|
||||
"Access for resource %s has been granted for %s" % (urn, client), fg="green",
|
||||
"Access for resource %s has been granted for %s" % (urn, client),
|
||||
fg="green",
|
||||
)
|
||||
|
||||
|
||||
@ -97,14 +103,16 @@ def access_grant(level, client, urn, urn_type):
|
||||
callback=lambda _, __, value: validate_client(value),
|
||||
)
|
||||
@click.argument(
|
||||
"urn", callback=lambda _, __, value: validate_urn(value),
|
||||
"urn",
|
||||
callback=lambda _, __, value: validate_urn(value),
|
||||
)
|
||||
@click.option("--urn-type", type=click.Choice(["prn:reg:pkg"]), default="prn:reg:pkg")
|
||||
def access_revoke(client, urn, urn_type):
|
||||
reg_client = RegistryClient()
|
||||
reg_client.revoke_access_from_resource(urn=urn, client=client)
|
||||
return click.secho(
|
||||
"Access for resource %s has been revoked for %s" % (urn, client), fg="green",
|
||||
"Access for resource %s has been revoked for %s" % (urn, client),
|
||||
fg="green",
|
||||
)
|
||||
|
||||
|
||||
|
@ -192,7 +192,10 @@ def account_destroy():
|
||||
client.logout()
|
||||
except AccountNotAuthorized:
|
||||
pass
|
||||
return click.secho("User account has been destroyed.", fg="green",)
|
||||
return click.secho(
|
||||
"User account has been destroyed.",
|
||||
fg="green",
|
||||
)
|
||||
|
||||
|
||||
@cli.command("show", short_help="PlatformIO Account information")
|
||||
|
@ -24,7 +24,10 @@ import click
|
||||
from platformio import app, exception, fs, proc
|
||||
from platformio.commands.debug import helpers
|
||||
from platformio.commands.debug.exception import DebugInvalidOptionsError
|
||||
from platformio.commands.platform import platform_install as cmd_platform_install
|
||||
from platformio.package.manager.core import inject_contrib_pysite
|
||||
from platformio.platform.exception import UnknownPlatform
|
||||
from platformio.platform.factory import PlatformFactory
|
||||
from platformio.project.config import ProjectConfig
|
||||
from platformio.project.exception import ProjectEnvsNotAvailableError
|
||||
from platformio.project.helpers import is_platformio_project, load_project_ide_data
|
||||
@ -73,18 +76,29 @@ def cli(ctx, project_dir, project_conf, environment, verbose, interface, __unpro
|
||||
env_options = config.items(env=env_name, as_dict=True)
|
||||
if not set(env_options.keys()) >= set(["platform", "board"]):
|
||||
raise ProjectEnvsNotAvailableError()
|
||||
debug_options = helpers.validate_debug_options(ctx, env_options)
|
||||
|
||||
try:
|
||||
platform = PlatformFactory.new(env_options["platform"])
|
||||
except UnknownPlatform:
|
||||
ctx.invoke(
|
||||
cmd_platform_install,
|
||||
platforms=[env_options["platform"]],
|
||||
skip_default_package=True,
|
||||
)
|
||||
platform = PlatformFactory.new(env_options["platform"])
|
||||
|
||||
debug_options = helpers.configure_initial_debug_options(platform, env_options)
|
||||
assert debug_options
|
||||
|
||||
if not interface:
|
||||
return helpers.predebug_project(ctx, project_dir, env_name, False, verbose)
|
||||
|
||||
configuration = load_project_ide_data(project_dir, env_name)
|
||||
if not configuration:
|
||||
raise DebugInvalidOptionsError("Could not load debug configuration")
|
||||
ide_data = load_project_ide_data(project_dir, env_name)
|
||||
if not ide_data:
|
||||
raise DebugInvalidOptionsError("Could not load a build configuration")
|
||||
|
||||
if "--version" in __unprocessed:
|
||||
result = proc.exec_command([configuration["gdb_path"], "--version"])
|
||||
result = proc.exec_command([ide_data["gdb_path"], "--version"])
|
||||
if result["returncode"] == 0:
|
||||
return click.echo(result["out"])
|
||||
raise exception.PlatformioException("\n".join([result["out"], result["err"]]))
|
||||
@ -99,23 +113,25 @@ def cli(ctx, project_dir, project_conf, environment, verbose, interface, __unpro
|
||||
nl=False,
|
||||
)
|
||||
|
||||
debug_options["load_cmds"] = helpers.configure_esp32_load_cmds(
|
||||
debug_options, configuration
|
||||
)
|
||||
try:
|
||||
debug_options = platform.configure_debug_options(debug_options, ide_data)
|
||||
except NotImplementedError:
|
||||
# legacy for ESP32 dev-platform <=2.0.0
|
||||
debug_options["load_cmds"] = helpers.configure_esp32_load_cmds(
|
||||
debug_options, ide_data
|
||||
)
|
||||
|
||||
rebuild_prog = False
|
||||
preload = debug_options["load_cmds"] == ["preload"]
|
||||
load_mode = debug_options["load_mode"]
|
||||
if load_mode == "always":
|
||||
rebuild_prog = preload or not helpers.has_debug_symbols(
|
||||
configuration["prog_path"]
|
||||
)
|
||||
rebuild_prog = preload or not helpers.has_debug_symbols(ide_data["prog_path"])
|
||||
elif load_mode == "modified":
|
||||
rebuild_prog = helpers.is_prog_obsolete(
|
||||
configuration["prog_path"]
|
||||
) or not helpers.has_debug_symbols(configuration["prog_path"])
|
||||
ide_data["prog_path"]
|
||||
) or not helpers.has_debug_symbols(ide_data["prog_path"])
|
||||
else:
|
||||
rebuild_prog = not isfile(configuration["prog_path"])
|
||||
rebuild_prog = not isfile(ide_data["prog_path"])
|
||||
|
||||
if preload or (not rebuild_prog and load_mode != "always"):
|
||||
# don't load firmware through debug server
|
||||
@ -139,9 +155,9 @@ def cli(ctx, project_dir, project_conf, environment, verbose, interface, __unpro
|
||||
|
||||
# save SHA sum of newly created prog
|
||||
if load_mode == "modified":
|
||||
helpers.is_prog_obsolete(configuration["prog_path"])
|
||||
helpers.is_prog_obsolete(ide_data["prog_path"])
|
||||
|
||||
if not isfile(configuration["prog_path"]):
|
||||
if not isfile(ide_data["prog_path"]):
|
||||
raise DebugInvalidOptionsError("Program/firmware is missed")
|
||||
|
||||
# run debugging client
|
||||
@ -151,7 +167,7 @@ def cli(ctx, project_dir, project_conf, environment, verbose, interface, __unpro
|
||||
from platformio.commands.debug.process.client import GDBClient, reactor
|
||||
|
||||
client = GDBClient(project_dir, __unprocessed, debug_options, env_options)
|
||||
client.spawn(configuration["gdb_path"], configuration["prog_path"])
|
||||
client.spawn(ide_data["gdb_path"], ide_data["prog_path"])
|
||||
|
||||
signal.signal(signal.SIGINT, lambda *args, **kwargs: None)
|
||||
reactor.run()
|
||||
|
@ -23,11 +23,8 @@ from os.path import isfile
|
||||
from platformio import fs, util
|
||||
from platformio.commands import PlatformioCLI
|
||||
from platformio.commands.debug.exception import DebugInvalidOptionsError
|
||||
from platformio.commands.platform import platform_install as cmd_platform_install
|
||||
from platformio.commands.run.command import cli as cmd_run
|
||||
from platformio.compat import is_bytes
|
||||
from platformio.platform.exception import UnknownPlatform
|
||||
from platformio.platform.factory import PlatformFactory
|
||||
from platformio.project.config import ProjectConfig
|
||||
from platformio.project.options import ProjectOptions
|
||||
|
||||
@ -89,21 +86,11 @@ def predebug_project(ctx, project_dir, env_name, preload, verbose):
|
||||
time.sleep(5)
|
||||
|
||||
|
||||
def validate_debug_options(cmd_ctx, env_options):
|
||||
def configure_initial_debug_options(platform, env_options):
|
||||
def _cleanup_cmds(items):
|
||||
items = ProjectConfig.parse_multi_values(items)
|
||||
return ["$LOAD_CMDS" if item == "$LOAD_CMD" else item for item in items]
|
||||
|
||||
try:
|
||||
platform = PlatformFactory.new(env_options["platform"])
|
||||
except UnknownPlatform:
|
||||
cmd_ctx.invoke(
|
||||
cmd_platform_install,
|
||||
platforms=[env_options["platform"]],
|
||||
skip_default_package=True,
|
||||
)
|
||||
platform = PlatformFactory.new(env_options["platform"])
|
||||
|
||||
board_config = platform.board_config(env_options["board"])
|
||||
tool_name = board_config.get_debug_tool_name(env_options.get("debug_tool"))
|
||||
tool_settings = board_config.get("debug", {}).get("tools", {}).get(tool_name, {})
|
||||
@ -195,13 +182,16 @@ def validate_debug_options(cmd_ctx, env_options):
|
||||
|
||||
|
||||
def configure_esp32_load_cmds(debug_options, configuration):
|
||||
"""
|
||||
DEPRECATED: Moved to ESP32 dev-platform
|
||||
See platform.py::configure_debug_options
|
||||
"""
|
||||
flash_images = configuration.get("extra", {}).get("flash_images")
|
||||
ignore_conds = [
|
||||
debug_options["load_cmds"] != ["load"],
|
||||
"xtensa-esp32" not in configuration.get("cc_path", ""),
|
||||
not configuration.get("flash_extra_images"),
|
||||
not all(
|
||||
[isfile(item["path"]) for item in configuration.get("flash_extra_images")]
|
||||
),
|
||||
not flash_images,
|
||||
not all([isfile(item["path"]) for item in flash_images]),
|
||||
]
|
||||
if any(ignore_conds):
|
||||
return debug_options["load_cmds"]
|
||||
@ -210,7 +200,7 @@ def configure_esp32_load_cmds(debug_options, configuration):
|
||||
'monitor program_esp32 "{{{path}}}" {offset} verify'.format(
|
||||
path=fs.to_unix_path(item["path"]), offset=item["offset"]
|
||||
)
|
||||
for item in configuration.get("flash_extra_images")
|
||||
for item in flash_images
|
||||
]
|
||||
mon_cmds.append(
|
||||
'monitor program_esp32 "{%s.bin}" 0x10000 verify'
|
||||
|
@ -203,7 +203,9 @@ def device_monitor(**kwargs): # pylint: disable=too-many-branches
|
||||
kwargs["port"] = ports[0]["port"]
|
||||
elif "platform" in project_options and "board" in project_options:
|
||||
board_hwids = device_helpers.get_board_hwids(
|
||||
kwargs["project_dir"], platform, project_options["board"],
|
||||
kwargs["project_dir"],
|
||||
platform,
|
||||
project_options["board"],
|
||||
)
|
||||
for item in ports:
|
||||
for hwid in board_hwids:
|
||||
|
@ -22,13 +22,16 @@ class Timestamp(DeviceMonitorFilter):
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
super(Timestamp, self).__init__(*args, **kwargs)
|
||||
self._first_text_received = False
|
||||
self._line_started = False
|
||||
|
||||
def rx(self, text):
|
||||
if self._first_text_received and "\n" not in text:
|
||||
if self._line_started and "\n" not in text:
|
||||
return text
|
||||
timestamp = datetime.now().strftime("%H:%M:%S.%f")[:-3]
|
||||
if not self._first_text_received:
|
||||
self._first_text_received = True
|
||||
return "%s > %s" % (timestamp, text)
|
||||
if not self._line_started:
|
||||
self._line_started = True
|
||||
text = "%s > %s" % (timestamp, text)
|
||||
if text.endswith("\n"):
|
||||
self._line_started = False
|
||||
return text[:-1].replace("\n", "\n%s > " % timestamp) + "\n"
|
||||
return text.replace("\n", "\n%s > " % timestamp)
|
||||
|
@ -27,7 +27,13 @@ from twisted.internet import utils # pylint: disable=import-error
|
||||
|
||||
from platformio import __main__, __version__, fs
|
||||
from platformio.commands.home import helpers
|
||||
from platformio.compat import PY2, get_filesystem_encoding, is_bytes, string_types
|
||||
from platformio.compat import (
|
||||
PY2,
|
||||
get_filesystem_encoding,
|
||||
get_locale_encoding,
|
||||
is_bytes,
|
||||
string_types,
|
||||
)
|
||||
|
||||
try:
|
||||
from thread import get_ident as thread_get_ident
|
||||
@ -95,10 +101,11 @@ class PIOCoreRPC(object):
|
||||
else:
|
||||
args[i] = str(arg)
|
||||
|
||||
options = options or {}
|
||||
to_json = "--json-output" in args
|
||||
|
||||
try:
|
||||
if args and args[0] == "remote":
|
||||
if options.get("force_subprocess"):
|
||||
result = yield PIOCoreRPC._call_subprocess(args, options)
|
||||
defer.returnValue(PIOCoreRPC._process_result(result, to_json))
|
||||
else:
|
||||
@ -117,7 +124,7 @@ class PIOCoreRPC(object):
|
||||
@staticmethod
|
||||
def _call_inline(args, options):
|
||||
PIOCoreRPC.setup_multithreading_std_streams()
|
||||
cwd = (options or {}).get("cwd") or os.getcwd()
|
||||
cwd = options.get("cwd") or os.getcwd()
|
||||
|
||||
def _thread_task():
|
||||
with fs.cd(cwd):
|
||||
@ -143,13 +150,15 @@ class PIOCoreRPC(object):
|
||||
@staticmethod
|
||||
def _process_result(result, to_json=False):
|
||||
out, err, code = result
|
||||
if out and is_bytes(out):
|
||||
out = out.decode(get_locale_encoding())
|
||||
if err and is_bytes(err):
|
||||
err = err.decode(get_locale_encoding())
|
||||
text = ("%s\n\n%s" % (out, err)).strip()
|
||||
if code != 0:
|
||||
raise Exception(text)
|
||||
if not to_json:
|
||||
return text
|
||||
if is_bytes(out):
|
||||
out = out.decode()
|
||||
try:
|
||||
return json.loads(out)
|
||||
except ValueError as e:
|
||||
|
@ -198,7 +198,9 @@ class ProjectRPC(object):
|
||||
and state["storage"]["coreCaller"] in ProjectGenerator.get_supported_ides()
|
||||
):
|
||||
args.extend(["--ide", state["storage"]["coreCaller"]])
|
||||
d = PIOCoreRPC.call(args, options={"cwd": project_dir})
|
||||
d = PIOCoreRPC.call(
|
||||
args, options={"cwd": project_dir, "force_subprocess": True}
|
||||
)
|
||||
d.addCallback(self._generate_project_main, project_dir, framework)
|
||||
return d
|
||||
|
||||
@ -291,7 +293,9 @@ class ProjectRPC(object):
|
||||
and state["storage"]["coreCaller"] in ProjectGenerator.get_supported_ides()
|
||||
):
|
||||
args.extend(["--ide", state["storage"]["coreCaller"]])
|
||||
d = PIOCoreRPC.call(args, options={"cwd": project_dir})
|
||||
d = PIOCoreRPC.call(
|
||||
args, options={"cwd": project_dir, "force_subprocess": True}
|
||||
)
|
||||
d.addCallback(self._finalize_arduino_import, project_dir, arduino_project_dir)
|
||||
return d
|
||||
|
||||
@ -324,6 +328,8 @@ class ProjectRPC(object):
|
||||
and state["storage"]["coreCaller"] in ProjectGenerator.get_supported_ides()
|
||||
):
|
||||
args.extend(["--ide", state["storage"]["coreCaller"]])
|
||||
d = PIOCoreRPC.call(args, options={"cwd": new_project_dir})
|
||||
d = PIOCoreRPC.call(
|
||||
args, options={"cwd": new_project_dir, "force_subprocess": True}
|
||||
)
|
||||
d.addCallback(lambda _: new_project_dir)
|
||||
return d
|
||||
|
@ -22,11 +22,7 @@ from tabulate import tabulate
|
||||
|
||||
from platformio import exception, fs, util
|
||||
from platformio.commands import PlatformioCLI
|
||||
from platformio.commands.lib.helpers import (
|
||||
get_builtin_libs,
|
||||
is_builtin_lib,
|
||||
save_project_libdeps,
|
||||
)
|
||||
from platformio.commands.lib.helpers import get_builtin_libs, save_project_libdeps
|
||||
from platformio.compat import dump_json_to_unicode
|
||||
from platformio.package.exception import NotGlobalLibDir, UnknownPackageError
|
||||
from platformio.package.manager.library import LibraryPackageManager
|
||||
@ -164,15 +160,8 @@ def lib_install( # pylint: disable=too-many-arguments,unused-argument
|
||||
}
|
||||
|
||||
elif storage_dir in storage_libdeps:
|
||||
builtin_lib_storages = None
|
||||
for library in storage_libdeps[storage_dir]:
|
||||
try:
|
||||
lm.install(library, silent=silent, force=force)
|
||||
except UnknownPackageError as e:
|
||||
if builtin_lib_storages is None:
|
||||
builtin_lib_storages = get_builtin_libs()
|
||||
if not silent or not is_builtin_lib(builtin_lib_storages, library):
|
||||
click.secho("Warning! %s" % e, fg="yellow")
|
||||
lm.install(library, silent=silent, force=force)
|
||||
|
||||
if save and installed_pkgs:
|
||||
_save_deps(ctx, installed_pkgs)
|
||||
|
@ -45,10 +45,11 @@ def get_builtin_libs(storage_names=None):
|
||||
return items
|
||||
|
||||
|
||||
def is_builtin_lib(storages, name):
|
||||
for storage in storages or []:
|
||||
if any(lib.get("name") == name for lib in storage["items"]):
|
||||
return True
|
||||
def is_builtin_lib(name, storages=None):
|
||||
for storage in storages or get_builtin_libs():
|
||||
for lib in storage["items"]:
|
||||
if lib.get("name") == name:
|
||||
return True
|
||||
return False
|
||||
|
||||
|
||||
@ -80,15 +81,22 @@ def save_project_libdeps(project_dir, specs, environments=None, action="add"):
|
||||
if environments and env not in environments:
|
||||
continue
|
||||
config.expand_interpolations = False
|
||||
lib_deps = []
|
||||
candidates = []
|
||||
try:
|
||||
lib_deps = ignore_deps_by_specs(config.get("env:" + env, "lib_deps"), specs)
|
||||
candidates = ignore_deps_by_specs(
|
||||
config.get("env:" + env, "lib_deps"), specs
|
||||
)
|
||||
except InvalidProjectConfError:
|
||||
pass
|
||||
if action == "add":
|
||||
lib_deps.extend(spec.as_dependency() for spec in specs)
|
||||
if lib_deps:
|
||||
config.set("env:" + env, "lib_deps", lib_deps)
|
||||
candidates.extend(spec.as_dependency() for spec in specs)
|
||||
if candidates:
|
||||
result = []
|
||||
for item in candidates:
|
||||
item = item.strip()
|
||||
if item and item not in result:
|
||||
result.append(item)
|
||||
config.set("env:" + env, "lib_deps", result)
|
||||
elif config.has_option("env:" + env, "lib_deps"):
|
||||
config.remove_option("env:" + env, "lib_deps")
|
||||
config.save()
|
||||
|
@ -34,17 +34,21 @@ def validate_orgname(value):
|
||||
|
||||
@cli.command("create", short_help="Create a new organization")
|
||||
@click.argument(
|
||||
"orgname", callback=lambda _, __, value: validate_orgname(value),
|
||||
"orgname",
|
||||
callback=lambda _, __, value: validate_orgname(value),
|
||||
)
|
||||
@click.option(
|
||||
"--email", callback=lambda _, __, value: validate_email(value) if value else value
|
||||
)
|
||||
@click.option("--displayname",)
|
||||
@click.option(
|
||||
"--displayname",
|
||||
)
|
||||
def org_create(orgname, email, displayname):
|
||||
client = AccountClient()
|
||||
client.create_org(orgname, email, displayname)
|
||||
return click.secho(
|
||||
"The organization `%s` has been successfully created." % orgname, fg="green",
|
||||
"The organization `%s` has been successfully created." % orgname,
|
||||
fg="green",
|
||||
)
|
||||
|
||||
|
||||
@ -121,12 +125,19 @@ def account_destroy(orgname):
|
||||
abort=True,
|
||||
)
|
||||
client.destroy_org(orgname)
|
||||
return click.secho("Organization `%s` has been destroyed." % orgname, fg="green",)
|
||||
return click.secho(
|
||||
"Organization `%s` has been destroyed." % orgname,
|
||||
fg="green",
|
||||
)
|
||||
|
||||
|
||||
@cli.command("add", short_help="Add a new owner to organization")
|
||||
@click.argument("orgname",)
|
||||
@click.argument("username",)
|
||||
@click.argument(
|
||||
"orgname",
|
||||
)
|
||||
@click.argument(
|
||||
"username",
|
||||
)
|
||||
def org_add_owner(orgname, username):
|
||||
client = AccountClient()
|
||||
client.add_org_owner(orgname, username)
|
||||
@ -138,8 +149,12 @@ def org_add_owner(orgname, username):
|
||||
|
||||
|
||||
@cli.command("remove", short_help="Remove an owner from organization")
|
||||
@click.argument("orgname",)
|
||||
@click.argument("username",)
|
||||
@click.argument(
|
||||
"orgname",
|
||||
)
|
||||
@click.argument(
|
||||
"username",
|
||||
)
|
||||
def org_remove_owner(orgname, username):
|
||||
client = AccountClient()
|
||||
client.remove_org_owner(orgname, username)
|
||||
|
@ -13,11 +13,14 @@
|
||||
# limitations under the License.
|
||||
|
||||
import os
|
||||
import tempfile
|
||||
from datetime import datetime
|
||||
|
||||
import click
|
||||
|
||||
from platformio import fs
|
||||
from platformio.clients.registry import RegistryClient
|
||||
from platformio.compat import ensure_python3
|
||||
from platformio.package.meta import PackageSpec, PackageType
|
||||
from platformio.package.pack import PackagePacker
|
||||
|
||||
@ -77,13 +80,16 @@ def package_pack(package, output):
|
||||
help="Notify by email when package is processed",
|
||||
)
|
||||
def package_publish(package, owner, released_at, private, notify):
|
||||
p = PackagePacker(package)
|
||||
archive_path = p.pack()
|
||||
response = RegistryClient().publish_package(
|
||||
archive_path, owner, released_at, private, notify
|
||||
)
|
||||
os.remove(archive_path)
|
||||
click.secho(response.get("message"), fg="green")
|
||||
assert ensure_python3()
|
||||
with tempfile.TemporaryDirectory() as tmp_dir: # pylint: disable=no-member
|
||||
with fs.cd(tmp_dir):
|
||||
p = PackagePacker(package)
|
||||
archive_path = p.pack()
|
||||
response = RegistryClient().publish_package(
|
||||
archive_path, owner, released_at, private, notify
|
||||
)
|
||||
os.remove(archive_path)
|
||||
click.secho(response.get("message"), fg="green")
|
||||
|
||||
|
||||
@cli.command("unpublish", short_help="Remove a pushed package from the registry")
|
||||
@ -107,7 +113,7 @@ def package_unpublish(package, type, undo): # pylint: disable=redefined-builtin
|
||||
type=type,
|
||||
name=spec.name,
|
||||
owner=spec.owner,
|
||||
version=spec.requirements,
|
||||
version=str(spec.requirements),
|
||||
undo=undo,
|
||||
)
|
||||
click.secho(response.get("message"), fg="green")
|
||||
|
@ -174,8 +174,10 @@ def project_init(
|
||||
if is_new_project:
|
||||
init_base_project(project_dir)
|
||||
|
||||
if board:
|
||||
fill_project_envs(
|
||||
if environment:
|
||||
update_project_env(project_dir, environment, project_option)
|
||||
elif board:
|
||||
update_board_envs(
|
||||
ctx, project_dir, board, project_option, env_prefix, ide is not None
|
||||
)
|
||||
|
||||
@ -358,7 +360,7 @@ def init_cvs_ignore(project_dir):
|
||||
fp.write(".pio\n")
|
||||
|
||||
|
||||
def fill_project_envs(
|
||||
def update_board_envs(
|
||||
ctx, project_dir, board_ids, project_option, env_prefix, force_download
|
||||
):
|
||||
config = ProjectConfig(
|
||||
@ -417,6 +419,26 @@ def _install_dependent_platforms(ctx, platforms):
|
||||
)
|
||||
|
||||
|
||||
def update_project_env(project_dir, environment, project_option):
|
||||
if not project_option:
|
||||
return
|
||||
config = ProjectConfig(
|
||||
os.path.join(project_dir, "platformio.ini"), parse_extra=False
|
||||
)
|
||||
|
||||
section = "env:%s" % environment
|
||||
if not config.has_section(section):
|
||||
config.add_section(section)
|
||||
|
||||
for item in project_option:
|
||||
if "=" not in item:
|
||||
continue
|
||||
_name, _value = item.split("=", 1)
|
||||
config.set(section, _name.strip(), _value.strip())
|
||||
|
||||
config.save()
|
||||
|
||||
|
||||
def get_best_envname(config, board_ids=None):
|
||||
envname = None
|
||||
default_envs = config.default_envs()
|
||||
|
@ -23,12 +23,12 @@ from time import sleep
|
||||
|
||||
import click
|
||||
|
||||
from platformio import exception, fs, proc
|
||||
from platformio import fs, proc
|
||||
from platformio.commands.device import helpers as device_helpers
|
||||
from platformio.commands.device.command import device_monitor as cmd_device_monitor
|
||||
from platformio.commands.run.command import cli as cmd_run
|
||||
from platformio.commands.test.command import cli as cmd_test
|
||||
from platformio.compat import PY2
|
||||
from platformio.compat import ensure_python3
|
||||
from platformio.package.manager.core import inject_contrib_pysite
|
||||
from platformio.project.exception import NotPlatformIOProjectError
|
||||
|
||||
@ -37,13 +37,7 @@ from platformio.project.exception import NotPlatformIOProjectError
|
||||
@click.option("-a", "--agent", multiple=True)
|
||||
@click.pass_context
|
||||
def cli(ctx, agent):
|
||||
if PY2:
|
||||
raise exception.UserSideException(
|
||||
"PlatformIO Remote Development requires Python 3.5 or above. \n"
|
||||
"Please install the latest Python 3 and reinstall PlatformIO Core using "
|
||||
"installation script:\n"
|
||||
"https://docs.platformio.org/page/core/installation.html"
|
||||
)
|
||||
assert ensure_python3()
|
||||
ctx.obj = agent
|
||||
inject_contrib_pysite(verify_openssl=True)
|
||||
|
||||
|
@ -45,7 +45,10 @@ class RemoteClientFactory(pb.PBClientFactory, protocol.ReconnectingClientFactory
|
||||
return d
|
||||
|
||||
d = self.login(
|
||||
credentials.UsernamePassword(auth_token.encode(), get_host_id().encode(),),
|
||||
credentials.UsernamePassword(
|
||||
auth_token.encode(),
|
||||
get_host_id().encode(),
|
||||
),
|
||||
client=self.remote_client,
|
||||
)
|
||||
d.addCallback(self.remote_client.cb_client_authorization_made)
|
||||
|
@ -63,13 +63,16 @@ def cli():
|
||||
value, teamname_validate=True
|
||||
),
|
||||
)
|
||||
@click.option("--description",)
|
||||
@click.option(
|
||||
"--description",
|
||||
)
|
||||
def team_create(orgname_teamname, description):
|
||||
orgname, teamname = orgname_teamname.split(":", 1)
|
||||
client = AccountClient()
|
||||
client.create_team(orgname, teamname, description)
|
||||
return click.secho(
|
||||
"The team %s has been successfully created." % teamname, fg="green",
|
||||
"The team %s has been successfully created." % teamname,
|
||||
fg="green",
|
||||
)
|
||||
|
||||
|
||||
@ -123,7 +126,9 @@ def team_list(orgname, json_output):
|
||||
callback=lambda _, __, value: validate_teamname(value),
|
||||
help="A new team name",
|
||||
)
|
||||
@click.option("--description",)
|
||||
@click.option(
|
||||
"--description",
|
||||
)
|
||||
def team_update(orgname_teamname, **kwargs):
|
||||
orgname, teamname = orgname_teamname.split(":", 1)
|
||||
client = AccountClient()
|
||||
@ -142,7 +147,8 @@ def team_update(orgname_teamname, **kwargs):
|
||||
new_team.update({key: value for key, value in kwargs.items() if value})
|
||||
client.update_team(orgname, teamname, new_team)
|
||||
return click.secho(
|
||||
"The team %s has been successfully updated." % teamname, fg="green",
|
||||
"The team %s has been successfully updated." % teamname,
|
||||
fg="green",
|
||||
)
|
||||
|
||||
|
||||
@ -163,7 +169,8 @@ def team_destroy(orgname_teamname):
|
||||
client = AccountClient()
|
||||
client.destroy_team(orgname, teamname)
|
||||
return click.secho(
|
||||
"The team %s has been successfully destroyed." % teamname, fg="green",
|
||||
"The team %s has been successfully destroyed." % teamname,
|
||||
fg="green",
|
||||
)
|
||||
|
||||
|
||||
@ -173,7 +180,9 @@ def team_destroy(orgname_teamname):
|
||||
metavar="ORGNAME:TEAMNAME",
|
||||
callback=lambda _, __, value: validate_orgname_teamname(value),
|
||||
)
|
||||
@click.argument("username",)
|
||||
@click.argument(
|
||||
"username",
|
||||
)
|
||||
def team_add_member(orgname_teamname, username):
|
||||
orgname, teamname = orgname_teamname.split(":", 1)
|
||||
client = AccountClient()
|
||||
|
@ -23,9 +23,12 @@ import os
|
||||
import re
|
||||
import sys
|
||||
|
||||
from platformio.exception import UserSideException
|
||||
|
||||
PY2 = sys.version_info[0] == 2
|
||||
CYGWIN = sys.platform.startswith("cygwin")
|
||||
WINDOWS = sys.platform.startswith("win")
|
||||
MACOS = sys.platform.startswith("darwin")
|
||||
|
||||
|
||||
def get_filesystem_encoding():
|
||||
@ -58,6 +61,17 @@ def ci_strings_are_equal(a, b):
|
||||
return a.strip().lower() == b.strip().lower()
|
||||
|
||||
|
||||
def ensure_python3(raise_exception=True):
|
||||
if not raise_exception or not PY2:
|
||||
return not PY2
|
||||
raise UserSideException(
|
||||
"Python 3.5 or later is required for this operation. \n"
|
||||
"Please install the latest Python 3 and reinstall PlatformIO Core using "
|
||||
"installation script:\n"
|
||||
"https://docs.platformio.org/page/core/installation.html"
|
||||
)
|
||||
|
||||
|
||||
if PY2:
|
||||
import imp
|
||||
|
||||
@ -84,7 +98,7 @@ if PY2:
|
||||
if isinstance(obj, unicode):
|
||||
return obj
|
||||
return json.dumps(
|
||||
obj, encoding=get_filesystem_encoding(), ensure_ascii=False, sort_keys=True
|
||||
obj, encoding=get_filesystem_encoding(), ensure_ascii=False
|
||||
).encode("utf8")
|
||||
|
||||
_magic_check = re.compile("([*?[])")
|
||||
@ -132,7 +146,7 @@ else:
|
||||
def dump_json_to_unicode(obj):
|
||||
if isinstance(obj, string_types):
|
||||
return obj
|
||||
return json.dumps(obj, ensure_ascii=False, sort_keys=True)
|
||||
return json.dumps(obj)
|
||||
|
||||
def glob_recursive(pathname):
|
||||
return glob.glob(pathname, recursive=True)
|
||||
|
@ -63,7 +63,7 @@ SET(CMAKE_CXX_COMPILER "{{ _normalize_path(cxx_path) }}")
|
||||
SET(CMAKE_CXX_FLAGS "{{ _normalize_path(to_unix_path(cxx_flags)) }}")
|
||||
SET(CMAKE_C_FLAGS "{{ _normalize_path(to_unix_path(cc_flags)) }}")
|
||||
|
||||
% STD_RE = re.compile(r"\-std=[a-z\+]+(\d+)")
|
||||
% STD_RE = re.compile(r"\-std=[a-z\+]+(\w+)")
|
||||
% cc_stds = STD_RE.findall(cc_flags)
|
||||
% cxx_stds = STD_RE.findall(cxx_flags)
|
||||
% if cc_stds:
|
||||
|
@ -1,5 +1,5 @@
|
||||
% import re
|
||||
% STD_RE = re.compile(r"(\-std=[a-z\+]+\d+)")
|
||||
% STD_RE = re.compile(r"(\-std=[a-z\+]+\w+)")
|
||||
% cxx_stds = STD_RE.findall(cxx_flags)
|
||||
% cxx_std = cxx_stds[-1] if cxx_stds else ""
|
||||
%
|
||||
|
@ -1,5 +1,5 @@
|
||||
% import re
|
||||
% STD_RE = re.compile(r"\-std=[a-z\+]+(\d+)")
|
||||
% STD_RE = re.compile(r"\-std=[a-z\+]+(\w+)")
|
||||
% cc_stds = STD_RE.findall(cc_flags)
|
||||
% cxx_stds = STD_RE.findall(cxx_flags)
|
||||
%
|
||||
|
@ -27,3 +27,5 @@ HEADERS += {{file}}
|
||||
SOURCES += {{file}}
|
||||
% end
|
||||
% end
|
||||
|
||||
CONFIG += c++17
|
||||
|
@ -1,5 +1,5 @@
|
||||
% import re
|
||||
% STD_RE = re.compile(r"\-std=[a-z\+]+(\d+)")
|
||||
% STD_RE = re.compile(r"\-std=[a-z\+]+(\w+)")
|
||||
% cc_stds = STD_RE.findall(cc_flags)
|
||||
% cxx_stds = STD_RE.findall(cxx_flags)
|
||||
%
|
||||
@ -18,5 +18,5 @@ clang
|
||||
% end
|
||||
|
||||
% for define in defines:
|
||||
-D{{ define }}
|
||||
-D{{ !define }}
|
||||
% end
|
||||
|
@ -6,6 +6,14 @@
|
||||
%
|
||||
% systype = platform.system().lower()
|
||||
%
|
||||
% cpp_standards_remap = {
|
||||
% "0x": "11",
|
||||
% "1y": "14",
|
||||
% "1z": "17",
|
||||
% "2a": "20",
|
||||
% "2b": "23"
|
||||
% }
|
||||
%
|
||||
% def _escape(text):
|
||||
% return to_unix_path(text).replace('"', '\\"')
|
||||
% end
|
||||
@ -68,7 +76,7 @@
|
||||
%
|
||||
% cleaned_includes = filter_includes(includes, ["toolchain"])
|
||||
%
|
||||
% STD_RE = re.compile(r"\-std=[a-z\+]+(\d+)")
|
||||
% STD_RE = re.compile(r"\-std=[a-z\+]+(\w+)")
|
||||
% cc_stds = STD_RE.findall(cc_flags)
|
||||
% cxx_stds = STD_RE.findall(cxx_flags)
|
||||
% cc_m_flags = split_args(cc_flags)
|
||||
@ -115,7 +123,7 @@
|
||||
"cStandard": "c{{ cc_stds[-1] }}",
|
||||
% end
|
||||
% if cxx_stds:
|
||||
"cppStandard": "c++{{ cxx_stds[-1] }}",
|
||||
"cppStandard": "c++{{ cpp_standards_remap.get(cxx_stds[-1], cxx_stds[-1]) }}",
|
||||
% end
|
||||
% if forced_includes:
|
||||
"forcedInclude": [
|
||||
|
@ -124,7 +124,9 @@ class Upgrader(object):
|
||||
continue
|
||||
result = result[0]
|
||||
pkg.metadata.spec = PackageSpec(
|
||||
id=result["id"], owner=result["owner"]["username"], name=result["name"],
|
||||
id=result["id"],
|
||||
owner=result["owner"]["username"],
|
||||
name=result["name"],
|
||||
)
|
||||
pkg.dump_meta()
|
||||
return True
|
||||
|
@ -152,7 +152,10 @@ class PackageManagerInstallMixin(object):
|
||||
return self._install_tmp_pkg(pkg_item)
|
||||
finally:
|
||||
if os.path.isdir(tmp_dir):
|
||||
fs.rmtree(tmp_dir)
|
||||
try:
|
||||
shutil.rmtree(tmp_dir)
|
||||
except: # pylint: disable=bare-except
|
||||
pass
|
||||
|
||||
def _install_tmp_pkg(self, tmp_pkg):
|
||||
assert isinstance(tmp_pkg, PackageItem)
|
||||
@ -213,10 +216,10 @@ class PackageManagerInstallMixin(object):
|
||||
# move existing into the new place
|
||||
pkg_dir = os.path.join(self.package_dir, target_dirname)
|
||||
_cleanup_dir(pkg_dir)
|
||||
shutil.move(dst_pkg.path, pkg_dir)
|
||||
shutil.copytree(dst_pkg.path, pkg_dir, symlinks=True)
|
||||
# move new source to the destination location
|
||||
_cleanup_dir(dst_pkg.path)
|
||||
shutil.move(tmp_pkg.path, dst_pkg.path)
|
||||
shutil.copytree(tmp_pkg.path, dst_pkg.path, symlinks=True)
|
||||
return PackageItem(dst_pkg.path)
|
||||
|
||||
if action == "detach-new":
|
||||
@ -233,10 +236,10 @@ class PackageManagerInstallMixin(object):
|
||||
)
|
||||
pkg_dir = os.path.join(self.package_dir, target_dirname)
|
||||
_cleanup_dir(pkg_dir)
|
||||
shutil.move(tmp_pkg.path, pkg_dir)
|
||||
shutil.copytree(tmp_pkg.path, pkg_dir, symlinks=True)
|
||||
return PackageItem(pkg_dir)
|
||||
|
||||
# otherwise, overwrite existing
|
||||
_cleanup_dir(dst_pkg.path)
|
||||
shutil.move(tmp_pkg.path, dst_pkg.path)
|
||||
shutil.copytree(tmp_pkg.path, dst_pkg.path, symlinks=True)
|
||||
return PackageItem(dst_pkg.path)
|
||||
|
@ -51,7 +51,7 @@ class BasePackageManager( # pylint: disable=too-many-public-methods
|
||||
|
||||
def __init__(self, pkg_type, package_dir):
|
||||
self.pkg_type = pkg_type
|
||||
self.package_dir = self.ensure_dir_exists(package_dir)
|
||||
self.package_dir = package_dir
|
||||
self._MEMORY_CACHE = {}
|
||||
|
||||
self._lockfile = None
|
||||
@ -62,7 +62,9 @@ class BasePackageManager( # pylint: disable=too-many-public-methods
|
||||
def lock(self):
|
||||
if self._lockfile:
|
||||
return
|
||||
self.ensure_dir_exists(os.path.dirname(self.package_dir))
|
||||
self._lockfile = LockFile(self.package_dir)
|
||||
self.ensure_dir_exists(self.package_dir)
|
||||
self._lockfile.acquire()
|
||||
|
||||
def unlock(self):
|
||||
@ -190,6 +192,9 @@ class BasePackageManager( # pylint: disable=too-many-public-methods
|
||||
return metadata
|
||||
|
||||
def get_installed(self):
|
||||
if not os.path.isdir(self.package_dir):
|
||||
return []
|
||||
|
||||
cache_key = "get_installed"
|
||||
if self.memcache_get(cache_key):
|
||||
return self.memcache_get(cache_key)
|
||||
|
@ -14,13 +14,16 @@
|
||||
|
||||
import json
|
||||
import os
|
||||
import shutil
|
||||
import subprocess
|
||||
import sys
|
||||
from datetime import date
|
||||
|
||||
from platformio import __core_packages__, exception, fs, util
|
||||
from platformio.compat import PY2
|
||||
from platformio.package.exception import UnknownPackageError
|
||||
from platformio.package.manager.tool import ToolPackageManager
|
||||
from platformio.package.meta import PackageSpec
|
||||
from platformio.package.meta import PackageItem, PackageSpec
|
||||
from platformio.proc import get_pythonexe_path
|
||||
|
||||
|
||||
@ -73,9 +76,17 @@ def inject_contrib_pysite(verify_openssl=False):
|
||||
# pylint: disable=import-outside-toplevel
|
||||
from site import addsitedir
|
||||
|
||||
contrib_pysite_dir = get_core_package_dir("contrib-pysite")
|
||||
try:
|
||||
contrib_pysite_dir = get_core_package_dir("contrib-pysite")
|
||||
except UnknownPackageError:
|
||||
pm = ToolPackageManager()
|
||||
contrib_pysite_dir = build_contrib_pysite_package(
|
||||
os.path.join(pm.package_dir, "contrib-pysite")
|
||||
)
|
||||
|
||||
if contrib_pysite_dir in sys.path:
|
||||
return True
|
||||
|
||||
addsitedir(contrib_pysite_dir)
|
||||
sys.path.insert(0, contrib_pysite_dir)
|
||||
|
||||
@ -86,41 +97,92 @@ def inject_contrib_pysite(verify_openssl=False):
|
||||
# pylint: disable=import-error,unused-import,unused-variable
|
||||
from OpenSSL import SSL
|
||||
except: # pylint: disable=bare-except
|
||||
build_contrib_pysite_deps(get_core_package_dir("contrib-pysite"))
|
||||
build_contrib_pysite_package(contrib_pysite_dir)
|
||||
|
||||
return True
|
||||
|
||||
|
||||
def build_contrib_pysite_deps(target_dir):
|
||||
def build_contrib_pysite_package(target_dir, with_metadata=True):
|
||||
systype = util.get_systype()
|
||||
if os.path.isdir(target_dir):
|
||||
fs.rmtree(target_dir)
|
||||
os.makedirs(target_dir)
|
||||
|
||||
# build dependencies
|
||||
args = [
|
||||
get_pythonexe_path(),
|
||||
"-m",
|
||||
"pip",
|
||||
"install",
|
||||
"--no-compile",
|
||||
"-t",
|
||||
target_dir,
|
||||
]
|
||||
if "linux" in systype:
|
||||
args.extend(["--no-binary", ":all:"])
|
||||
for dep in get_contrib_pysite_deps():
|
||||
subprocess.check_call(args + [dep])
|
||||
|
||||
# build manifests
|
||||
with open(os.path.join(target_dir, "package.json"), "w") as fp:
|
||||
json.dump(
|
||||
dict(
|
||||
name="contrib-pysite",
|
||||
version="2.%d%d.0" % (sys.version_info.major, sys.version_info.minor),
|
||||
system=util.get_systype(),
|
||||
version="2.%d%d.%s"
|
||||
% (
|
||||
sys.version_info.major,
|
||||
sys.version_info.minor,
|
||||
date.today().strftime("%y%m%d"),
|
||||
),
|
||||
system=list(
|
||||
set([systype, "linux_armv6l", "linux_armv7l", "linux_armv8l"])
|
||||
)
|
||||
if systype.startswith("linux_arm")
|
||||
else systype,
|
||||
description="Extra Python package for PlatformIO Core",
|
||||
keywords=["platformio", "platformio-core"],
|
||||
homepage="https://docs.platformio.org/page/core/index.html",
|
||||
repository={
|
||||
"type": "git",
|
||||
"url": "https://github.com/platformio/platformio-core",
|
||||
},
|
||||
),
|
||||
fp,
|
||||
)
|
||||
|
||||
pythonexe = get_pythonexe_path()
|
||||
for dep in get_contrib_pysite_deps():
|
||||
subprocess.check_call(
|
||||
[
|
||||
pythonexe,
|
||||
"-m",
|
||||
"pip",
|
||||
"install",
|
||||
# "--no-cache-dir",
|
||||
"--no-compile",
|
||||
"-t",
|
||||
target_dir,
|
||||
dep,
|
||||
]
|
||||
# generate package metadata
|
||||
if with_metadata:
|
||||
pm = ToolPackageManager()
|
||||
pkg = PackageItem(target_dir)
|
||||
pkg.metadata = pm.build_metadata(
|
||||
target_dir, PackageSpec(owner="platformio", name="contrib-pysite")
|
||||
)
|
||||
return True
|
||||
pkg.dump_meta()
|
||||
|
||||
# remove unused files
|
||||
shutil.rmtree(os.path.join(target_dir, "autobahn", "xbr", "contracts"))
|
||||
for root, dirs, files in os.walk(target_dir):
|
||||
for t in ("_test", "test", "tests"):
|
||||
if t in dirs:
|
||||
shutil.rmtree(os.path.join(root, t))
|
||||
for name in files:
|
||||
if name.endswith((".chm", ".pyc")):
|
||||
os.remove(os.path.join(root, name))
|
||||
|
||||
# apply patches
|
||||
with open(
|
||||
os.path.join(target_dir, "autobahn", "twisted", "__init__.py"), "r+"
|
||||
) as fp:
|
||||
contents = fp.read()
|
||||
contents = contents.replace(
|
||||
"from autobahn.twisted.wamp import ApplicationSession",
|
||||
"# from autobahn.twisted.wamp import ApplicationSession",
|
||||
)
|
||||
fp.seek(0)
|
||||
fp.truncate()
|
||||
fp.write(contents)
|
||||
|
||||
return target_dir
|
||||
|
||||
|
||||
def get_contrib_pysite_deps():
|
||||
@ -130,7 +192,7 @@ def get_contrib_pysite_deps():
|
||||
twisted_version = "19.10.0" if PY2 else "20.3.0"
|
||||
result = [
|
||||
"twisted == %s" % twisted_version,
|
||||
"autobahn == 20.4.3",
|
||||
"autobahn == %s" % ("19.11.2" if PY2 else "20.7.1"),
|
||||
"json-rpc == 1.13.0",
|
||||
]
|
||||
|
||||
@ -151,8 +213,8 @@ def get_contrib_pysite_deps():
|
||||
result.append("pypiwin32 == 223")
|
||||
# workaround for twisted wheels
|
||||
twisted_wheel = (
|
||||
"https://download.lfd.uci.edu/pythonlibs/g5apjq5m/Twisted-"
|
||||
"%s-cp%s-cp%sm-win%s.whl"
|
||||
"https://download.lfd.uci.edu/pythonlibs/x2tqcw5k/Twisted-"
|
||||
"%s-cp%s-cp%s-win%s.whl"
|
||||
% (
|
||||
twisted_version,
|
||||
py_version,
|
||||
|
@ -15,7 +15,10 @@
|
||||
import json
|
||||
import os
|
||||
|
||||
from platformio.package.exception import MissingPackageManifestError
|
||||
from platformio.package.exception import (
|
||||
MissingPackageManifestError,
|
||||
UnknownPackageError,
|
||||
)
|
||||
from platformio.package.manager.base import BasePackageManager
|
||||
from platformio.package.meta import PackageItem, PackageSpec, PackageType
|
||||
from platformio.project.helpers import get_project_global_lib_dir
|
||||
@ -43,7 +46,10 @@ class LibraryPackageManager(BasePackageManager): # pylint: disable=too-many-anc
|
||||
# automatically generate library manifest
|
||||
with open(os.path.join(root_dir, "library.json"), "w") as fp:
|
||||
json.dump(
|
||||
dict(name=spec.name, version=self.generate_rand_version(),),
|
||||
dict(
|
||||
name=spec.name,
|
||||
version=self.generate_rand_version(),
|
||||
),
|
||||
fp,
|
||||
indent=2,
|
||||
)
|
||||
@ -63,6 +69,33 @@ class LibraryPackageManager(BasePackageManager): # pylint: disable=too-many-anc
|
||||
return root
|
||||
return path
|
||||
|
||||
def _install( # pylint: disable=too-many-arguments
|
||||
self,
|
||||
spec,
|
||||
search_filters=None,
|
||||
silent=False,
|
||||
skip_dependencies=False,
|
||||
force=False,
|
||||
):
|
||||
try:
|
||||
return super(LibraryPackageManager, self)._install(
|
||||
spec,
|
||||
search_filters=search_filters,
|
||||
silent=silent,
|
||||
skip_dependencies=skip_dependencies,
|
||||
force=force,
|
||||
)
|
||||
except UnknownPackageError as e:
|
||||
# pylint: disable=import-outside-toplevel
|
||||
from platformio.commands.lib.helpers import is_builtin_lib
|
||||
|
||||
spec = self.ensure_spec(spec)
|
||||
if is_builtin_lib(spec.name):
|
||||
self.print_message("Already installed, built-in library", fg="yellow")
|
||||
return True
|
||||
|
||||
raise e
|
||||
|
||||
def install_dependencies(self, pkg, silent=False):
|
||||
assert isinstance(pkg, PackageItem)
|
||||
manifest = self.load_manifest(pkg)
|
||||
@ -79,9 +112,16 @@ class LibraryPackageManager(BasePackageManager): # pylint: disable=too-many-anc
|
||||
)
|
||||
|
||||
def _install_dependency(self, dependency, silent=False):
|
||||
spec = PackageSpec(
|
||||
name=dependency.get("name"), requirements=dependency.get("version")
|
||||
)
|
||||
if set(["name", "version"]) <= set(dependency.keys()) and any(
|
||||
c in dependency["version"] for c in (":", "/", "@")
|
||||
):
|
||||
spec = PackageSpec("%s=%s" % (dependency["name"], dependency["version"]))
|
||||
else:
|
||||
spec = PackageSpec(
|
||||
owner=dependency.get("owner"),
|
||||
name=dependency.get("name"),
|
||||
requirements=dependency.get("version"),
|
||||
)
|
||||
search_filters = {
|
||||
key: value
|
||||
for key, value in dependency.items()
|
||||
|
@ -119,12 +119,13 @@ class ManifestParserFactory(object):
|
||||
assert path.endswith("tar.gz")
|
||||
with tarfile.open(path, mode="r:gz") as tf:
|
||||
for t in sorted(ManifestFileType.items().values()):
|
||||
try:
|
||||
return ManifestParserFactory.new(
|
||||
tf.extractfile(t).read().decode(), t
|
||||
)
|
||||
except KeyError:
|
||||
pass
|
||||
for member in (t, "./" + t):
|
||||
try:
|
||||
return ManifestParserFactory.new(
|
||||
tf.extractfile(member).read().decode(), t
|
||||
)
|
||||
except KeyError:
|
||||
pass
|
||||
raise UnknownManifestError("Unknown manifest file type in %s archive" % path)
|
||||
|
||||
@staticmethod
|
||||
@ -388,7 +389,15 @@ class LibraryJsonManifestParser(BaseManifestParser):
|
||||
raw = [raw]
|
||||
|
||||
if isinstance(raw, dict):
|
||||
return [dict(name=name, version=version) for name, version in raw.items()]
|
||||
result = []
|
||||
for name, version in raw.items():
|
||||
if "/" in name:
|
||||
owner, name = name.split("/", 1)
|
||||
result.append(dict(owner=owner, name=name, version=version))
|
||||
else:
|
||||
result.append(dict(name=name, version=version))
|
||||
return result
|
||||
|
||||
if isinstance(raw, list):
|
||||
for i, dependency in enumerate(raw):
|
||||
if isinstance(dependency, dict):
|
||||
|
@ -106,6 +106,7 @@ class RepositorySchema(StrictSchema):
|
||||
|
||||
|
||||
class DependencySchema(StrictSchema):
|
||||
owner = fields.Str(validate=validate.Length(min=1, max=100))
|
||||
name = fields.Str(required=True, validate=validate.Length(min=1, max=100))
|
||||
version = fields.Str(validate=validate.Length(min=1, max=100))
|
||||
authors = StrictListField(fields.Str(validate=validate.Length(min=1, max=50)))
|
||||
@ -242,7 +243,7 @@ class ManifestSchema(BaseSchema):
|
||||
raise ValidationError("Could not load SPDX licenses for validation")
|
||||
for item in spdx.get("licenses", []):
|
||||
if item.get("licenseId") == value:
|
||||
return
|
||||
return True
|
||||
raise ValidationError(
|
||||
"Invalid SPDX license identifier. See valid identifiers at "
|
||||
"https://spdx.org/licenses/"
|
||||
@ -251,9 +252,5 @@ class ManifestSchema(BaseSchema):
|
||||
@staticmethod
|
||||
@memoized(expire="1h")
|
||||
def load_spdx_licenses():
|
||||
version = "3.10"
|
||||
spdx_data_url = (
|
||||
"https://raw.githubusercontent.com/spdx/license-list-data"
|
||||
"/v%s/json/licenses.json" % version
|
||||
)
|
||||
spdx_data_url = "https://dl.bintray.com/platformio/dl-misc/spdx-licenses-3.json"
|
||||
return json.loads(fetch_remote_content(spdx_data_url))
|
||||
|
@ -209,6 +209,7 @@ class PackageSpec(object): # pylint: disable=too-many-instance-attributes
|
||||
raw = raw.strip()
|
||||
|
||||
parsers = (
|
||||
self._parse_local_file,
|
||||
self._parse_requirements,
|
||||
self._parse_custom_name,
|
||||
self._parse_id,
|
||||
@ -227,10 +228,16 @@ class PackageSpec(object): # pylint: disable=too-many-instance-attributes
|
||||
# the leftover is a package name
|
||||
self.name = raw
|
||||
|
||||
def _parse_requirements(self, raw):
|
||||
if "@" not in raw:
|
||||
@staticmethod
|
||||
def _parse_local_file(raw):
|
||||
if raw.startswith("file://") or not any(c in raw for c in ("/", "\\")):
|
||||
return raw
|
||||
if raw.startswith("file://") and os.path.exists(raw[7:]):
|
||||
if os.path.exists(raw):
|
||||
return "file://%s" % raw
|
||||
return raw
|
||||
|
||||
def _parse_requirements(self, raw):
|
||||
if "@" not in raw or raw.startswith("file://"):
|
||||
return raw
|
||||
tokens = raw.rsplit("@", 1)
|
||||
if any(s in tokens[1] for s in (":", "/")):
|
||||
|
@ -20,6 +20,7 @@ import tarfile
|
||||
import tempfile
|
||||
|
||||
from platformio import fs
|
||||
from platformio.compat import ensure_python3
|
||||
from platformio.package.exception import PackageException
|
||||
from platformio.package.manifest.parser import ManifestFileType, ManifestParserFactory
|
||||
from platformio.package.manifest.schema import ManifestSchema
|
||||
@ -28,20 +29,70 @@ from platformio.package.unpack import FileUnpacker
|
||||
|
||||
|
||||
class PackagePacker(object):
|
||||
INCLUDE_DEFAULT = ManifestFileType.items().values()
|
||||
EXCLUDE_DEFAULT = [
|
||||
# PlatformIO internal files
|
||||
PackageItem.METAFILE_NAME,
|
||||
".pio/",
|
||||
"**/.pio/",
|
||||
# Hidden files
|
||||
"._*",
|
||||
"__*",
|
||||
".DS_Store",
|
||||
".vscode",
|
||||
".cache",
|
||||
"**/.cache",
|
||||
# VCS
|
||||
".git/",
|
||||
".hg/",
|
||||
".svn/",
|
||||
".pio/",
|
||||
"**/.pio/",
|
||||
PackageItem.METAFILE_NAME,
|
||||
# Tests
|
||||
"tests?",
|
||||
# Docs
|
||||
"doc",
|
||||
"docs",
|
||||
"mkdocs",
|
||||
"**/*.[pP][dD][fF]",
|
||||
"**/*.[dD][oO][cC]?",
|
||||
"**/*.[pP][pP][tT]?",
|
||||
"**/*.[dD][oO][xX]",
|
||||
"**/*.[hH][tT][mM]?",
|
||||
"**/*.[tT][eE][xX]",
|
||||
"**/*.[jJ][sS]",
|
||||
"**/*.[cC][sS][sS]",
|
||||
# Binary files
|
||||
"**/*.[jJ][pP][gG]",
|
||||
"**/*.[jJ][pP][eE][gG]",
|
||||
"**/*.[pP][nN][gG]",
|
||||
"**/*.[gG][iI][fF]",
|
||||
"**/*.[zZ][iI][pP]",
|
||||
"**/*.[gG][zZ]",
|
||||
"**/*.3[gG][pP]",
|
||||
"**/*.[mM][oO][vV]",
|
||||
"**/*.[mM][pP][34]",
|
||||
"**/*.[pP][sS][dD]",
|
||||
"**/*.[wW][aA][wW]",
|
||||
]
|
||||
EXCLUDE_LIBRARY_EXTRA = [
|
||||
"assets",
|
||||
"extra",
|
||||
"resources",
|
||||
"html",
|
||||
"media",
|
||||
"doxygen",
|
||||
"**/build/",
|
||||
"**/*.flat",
|
||||
"**/*.[jJ][aA][rR]",
|
||||
"**/*.[eE][xX][eE]",
|
||||
"**/*.[bB][iI][nN]",
|
||||
"**/*.[hH][eE][xX]",
|
||||
"**/*.[dD][bB]",
|
||||
"**/*.[dD][aA][tT]",
|
||||
"**/*.[dD][lL][lL]",
|
||||
]
|
||||
INCLUDE_DEFAULT = ManifestFileType.items().values()
|
||||
|
||||
def __init__(self, package, manifest_uri=None):
|
||||
assert ensure_python3()
|
||||
self.package = package
|
||||
self.manifest_uri = manifest_uri
|
||||
|
||||
@ -51,7 +102,9 @@ class PackagePacker(object):
|
||||
r"[^\da-zA-Z\-\._\+]+",
|
||||
"",
|
||||
"{name}{system}-{version}.tar.gz".format(
|
||||
name=name, system=("-" + system) if system else "", version=version,
|
||||
name=name,
|
||||
system=("-" + system) if system else "",
|
||||
version=version,
|
||||
),
|
||||
)
|
||||
|
||||
@ -128,16 +181,28 @@ class PackagePacker(object):
|
||||
json.dump(manifest_updated, fp, indent=2, ensure_ascii=False)
|
||||
include = None
|
||||
|
||||
src_filters = self.compute_src_filters(include, exclude)
|
||||
src_filters = self.compute_src_filters(src, include, exclude)
|
||||
with tarfile.open(dst, "w:gz") as tar:
|
||||
for f in fs.match_src_files(src, src_filters, followlinks=False):
|
||||
tar.add(os.path.join(src, f), f)
|
||||
return dst
|
||||
|
||||
def compute_src_filters(self, include, exclude):
|
||||
def compute_src_filters(self, src, include, exclude):
|
||||
exclude_default = self.EXCLUDE_DEFAULT[:]
|
||||
# extend with library extra filters
|
||||
if any(
|
||||
os.path.isfile(os.path.join(src, name))
|
||||
for name in (
|
||||
ManifestFileType.LIBRARY_JSON,
|
||||
ManifestFileType.LIBRARY_PROPERTIES,
|
||||
ManifestFileType.MODULE_JSON,
|
||||
)
|
||||
):
|
||||
exclude_default.extend(self.EXCLUDE_LIBRARY_EXTRA)
|
||||
|
||||
result = ["+<%s>" % p for p in include or ["*", ".*"]]
|
||||
result += ["-<%s>" % p for p in exclude or []]
|
||||
result += ["-<%s>" % p for p in self.EXCLUDE_DEFAULT]
|
||||
result += ["-<%s>" % p for p in exclude_default]
|
||||
# automatically include manifests
|
||||
result += ["+<%s>" % p for p in self.INCLUDE_DEFAULT]
|
||||
return result
|
||||
|
@ -94,7 +94,7 @@ class PlatformBase( # pylint: disable=too-many-instance-attributes,too-many-pub
|
||||
name = item
|
||||
version = "*"
|
||||
if "@" in item:
|
||||
name, version = item.split("@", 2)
|
||||
name, version = item.split("@", 1)
|
||||
spec = self.pm.ensure_spec(name)
|
||||
options = {"version": version.strip(), "optional": False}
|
||||
if spec.owner:
|
||||
@ -203,6 +203,9 @@ class PlatformBase( # pylint: disable=too-many-instance-attributes,too-many-pub
|
||||
elif "nobuild" in targets and opts.get("type") != "framework":
|
||||
self.packages[name]["optional"] = True
|
||||
|
||||
def configure_debug_options(self, initial_debug_options, ide_data):
|
||||
raise NotImplementedError
|
||||
|
||||
def get_lib_storages(self):
|
||||
storages = {}
|
||||
for opts in (self.frameworks or {}).values():
|
||||
|
@ -358,12 +358,6 @@ class ProjectConfigBase(object):
|
||||
click.secho("Warning! %s" % warning, fg="yellow")
|
||||
return True
|
||||
|
||||
def remove_option(self, section, option):
|
||||
return self._parser.remove_option(section, option)
|
||||
|
||||
def remove_section(self, section):
|
||||
return self._parser.remove_section(section)
|
||||
|
||||
|
||||
class ProjectConfigDirsMixin(object):
|
||||
def _get_core_dir(self, exists=False):
|
||||
|
@ -301,7 +301,11 @@ def on_command():
|
||||
def on_exception(e):
|
||||
skip_conditions = [
|
||||
isinstance(e, cls)
|
||||
for cls in (IOError, exception.ReturnErrorCode, exception.UserSideException,)
|
||||
for cls in (
|
||||
IOError,
|
||||
exception.ReturnErrorCode,
|
||||
exception.UserSideException,
|
||||
)
|
||||
]
|
||||
if any(skip_conditions):
|
||||
return
|
||||
|
@ -19,7 +19,6 @@ import math
|
||||
import os
|
||||
import platform
|
||||
import re
|
||||
import sys
|
||||
import time
|
||||
from functools import wraps
|
||||
from glob import glob
|
||||
@ -167,12 +166,9 @@ def get_mdns_services():
|
||||
try:
|
||||
import zeroconf
|
||||
except ImportError:
|
||||
from site import addsitedir
|
||||
from platformio.package.manager.core import get_core_package_dir
|
||||
from platformio.package.manager.core import inject_contrib_pysite
|
||||
|
||||
contrib_pysite_dir = get_core_package_dir("contrib-pysite")
|
||||
addsitedir(contrib_pysite_dir)
|
||||
sys.path.insert(0, contrib_pysite_dir)
|
||||
inject_contrib_pysite()
|
||||
import zeroconf # pylint: disable=import-outside-toplevel
|
||||
|
||||
class mDNSListener(object):
|
||||
|
@ -1066,6 +1066,8 @@ def update_project_examples():
|
||||
# Frameworks
|
||||
frameworks = []
|
||||
for framework in API_FRAMEWORKS:
|
||||
if framework["name"] not in framework_examples_md_lines:
|
||||
continue
|
||||
readme_dir = join(project_examples_dir, "frameworks", framework["name"])
|
||||
if not isdir(readme_dir):
|
||||
os.makedirs(readme_dir)
|
||||
|
2
setup.py
2
setup.py
@ -52,7 +52,7 @@ setup(
|
||||
[">=2.7", "!=3.0.*", "!=3.1.*", "!=3.2.*", "!=3.3.*", "!=3.4.*"]
|
||||
),
|
||||
install_requires=install_requires,
|
||||
packages=find_packages() + ["scripts"],
|
||||
packages=find_packages(exclude=["tests.*", "tests"]) + ["scripts"],
|
||||
package_data={
|
||||
"platformio": [
|
||||
"ide/tpls/*/.*.tpl",
|
||||
|
@ -100,14 +100,21 @@ def test_account_register(
|
||||
|
||||
|
||||
def test_account_login(
|
||||
clirunner, validate_cliresult, isolated_pio_core,
|
||||
clirunner,
|
||||
validate_cliresult,
|
||||
isolated_pio_core,
|
||||
):
|
||||
result = clirunner.invoke(cmd_account, ["login", "-u", username, "-p", password],)
|
||||
result = clirunner.invoke(
|
||||
cmd_account,
|
||||
["login", "-u", username, "-p", password],
|
||||
)
|
||||
validate_cliresult(result)
|
||||
|
||||
|
||||
def test_account_summary(
|
||||
clirunner, validate_cliresult, isolated_pio_core,
|
||||
clirunner,
|
||||
validate_cliresult,
|
||||
isolated_pio_core,
|
||||
):
|
||||
result = clirunner.invoke(cmd_account, ["show", "--json-output", "--offline"])
|
||||
validate_cliresult(result)
|
||||
@ -160,13 +167,21 @@ def test_account_summary(
|
||||
|
||||
|
||||
def test_account_token(clirunner, validate_cliresult, isolated_pio_core):
|
||||
result = clirunner.invoke(cmd_account, ["token", "--password", password,],)
|
||||
result = clirunner.invoke(
|
||||
cmd_account,
|
||||
[
|
||||
"token",
|
||||
"--password",
|
||||
password,
|
||||
],
|
||||
)
|
||||
validate_cliresult(result)
|
||||
assert "Personal Authentication Token:" in result.output
|
||||
token = result.output.strip().split(": ")[-1]
|
||||
|
||||
result = clirunner.invoke(
|
||||
cmd_account, ["token", "--password", password, "--json-output"],
|
||||
cmd_account,
|
||||
["token", "--password", password, "--json-output"],
|
||||
)
|
||||
validate_cliresult(result)
|
||||
json_result = json.loads(result.output.strip())
|
||||
@ -177,7 +192,14 @@ def test_account_token(clirunner, validate_cliresult, isolated_pio_core):
|
||||
|
||||
clirunner.invoke(cmd_account, ["logout"])
|
||||
|
||||
result = clirunner.invoke(cmd_account, ["token", "--password", password,],)
|
||||
result = clirunner.invoke(
|
||||
cmd_account,
|
||||
[
|
||||
"token",
|
||||
"--password",
|
||||
password,
|
||||
],
|
||||
)
|
||||
assert result.exit_code > 0
|
||||
assert result.exception
|
||||
assert "You are not authorized! Please log in to PIO Account" in str(
|
||||
@ -187,7 +209,8 @@ def test_account_token(clirunner, validate_cliresult, isolated_pio_core):
|
||||
os.environ["PLATFORMIO_AUTH_TOKEN"] = token
|
||||
|
||||
result = clirunner.invoke(
|
||||
cmd_account, ["token", "--password", password, "--json-output"],
|
||||
cmd_account,
|
||||
["token", "--password", password, "--json-output"],
|
||||
)
|
||||
validate_cliresult(result)
|
||||
json_result = json.loads(result.output.strip())
|
||||
@ -197,7 +220,10 @@ def test_account_token(clirunner, validate_cliresult, isolated_pio_core):
|
||||
|
||||
os.environ.pop("PLATFORMIO_AUTH_TOKEN")
|
||||
|
||||
result = clirunner.invoke(cmd_account, ["login", "-u", username, "-p", password],)
|
||||
result = clirunner.invoke(
|
||||
cmd_account,
|
||||
["login", "-u", username, "-p", password],
|
||||
)
|
||||
validate_cliresult(result)
|
||||
|
||||
|
||||
@ -205,7 +231,13 @@ def test_account_change_password(clirunner, validate_cliresult, isolated_pio_cor
|
||||
new_password = "Testpassword123"
|
||||
result = clirunner.invoke(
|
||||
cmd_account,
|
||||
["password", "--old-password", password, "--new-password", new_password,],
|
||||
[
|
||||
"password",
|
||||
"--old-password",
|
||||
password,
|
||||
"--new-password",
|
||||
new_password,
|
||||
],
|
||||
)
|
||||
validate_cliresult(result)
|
||||
assert "Password successfully changed!" in result.output
|
||||
@ -213,13 +245,20 @@ def test_account_change_password(clirunner, validate_cliresult, isolated_pio_cor
|
||||
clirunner.invoke(cmd_account, ["logout"])
|
||||
|
||||
result = clirunner.invoke(
|
||||
cmd_account, ["login", "-u", username, "-p", new_password],
|
||||
cmd_account,
|
||||
["login", "-u", username, "-p", new_password],
|
||||
)
|
||||
validate_cliresult(result)
|
||||
|
||||
result = clirunner.invoke(
|
||||
cmd_account,
|
||||
["password", "--old-password", new_password, "--new-password", password,],
|
||||
[
|
||||
"password",
|
||||
"--old-password",
|
||||
new_password,
|
||||
"--new-password",
|
||||
password,
|
||||
],
|
||||
)
|
||||
validate_cliresult(result)
|
||||
|
||||
@ -272,14 +311,20 @@ def test_account_update(
|
||||
link = link.replace("&", "&")
|
||||
session.get(link)
|
||||
|
||||
result = clirunner.invoke(cmd_account, ["show"],)
|
||||
result = clirunner.invoke(
|
||||
cmd_account,
|
||||
["show"],
|
||||
)
|
||||
assert result.exit_code > 0
|
||||
assert result.exception
|
||||
assert "You are not authorized! Please log in to PIO Account" in str(
|
||||
result.exception
|
||||
)
|
||||
|
||||
result = clirunner.invoke(cmd_account, ["login", "-u", username, "-p", password],)
|
||||
result = clirunner.invoke(
|
||||
cmd_account,
|
||||
["login", "-u", username, "-p", password],
|
||||
)
|
||||
validate_cliresult(result)
|
||||
|
||||
|
||||
@ -317,7 +362,8 @@ def test_account_update(
|
||||
|
||||
def test_org_create(clirunner, validate_cliresult, isolated_pio_core):
|
||||
result = clirunner.invoke(
|
||||
cmd_org, ["create", "--email", email, "--displayname", display_name, orgname],
|
||||
cmd_org,
|
||||
["create", "--email", email, "--displayname", display_name, orgname],
|
||||
)
|
||||
validate_cliresult(result)
|
||||
|
||||
@ -405,13 +451,21 @@ def test_org_update(clirunner, validate_cliresult, isolated_pio_core):
|
||||
def test_team_create(clirunner, validate_cliresult, isolated_pio_core):
|
||||
result = clirunner.invoke(
|
||||
cmd_team,
|
||||
["create", "%s:%s" % (orgname, teamname), "--description", team_description,],
|
||||
[
|
||||
"create",
|
||||
"%s:%s" % (orgname, teamname),
|
||||
"--description",
|
||||
team_description,
|
||||
],
|
||||
)
|
||||
validate_cliresult(result)
|
||||
|
||||
|
||||
def test_team_list(clirunner, validate_cliresult, isolated_pio_core):
|
||||
result = clirunner.invoke(cmd_team, ["list", "%s" % orgname, "--json-output"],)
|
||||
result = clirunner.invoke(
|
||||
cmd_team,
|
||||
["list", "%s" % orgname, "--json-output"],
|
||||
)
|
||||
validate_cliresult(result)
|
||||
json_result = json.loads(result.output.strip())
|
||||
for item in json_result:
|
||||
@ -423,22 +477,30 @@ def test_team_list(clirunner, validate_cliresult, isolated_pio_core):
|
||||
|
||||
def test_team_add_member(clirunner, validate_cliresult, isolated_pio_core):
|
||||
result = clirunner.invoke(
|
||||
cmd_team, ["add", "%s:%s" % (orgname, teamname), second_username],
|
||||
cmd_team,
|
||||
["add", "%s:%s" % (orgname, teamname), second_username],
|
||||
)
|
||||
validate_cliresult(result)
|
||||
|
||||
result = clirunner.invoke(cmd_team, ["list", "%s" % orgname, "--json-output"],)
|
||||
result = clirunner.invoke(
|
||||
cmd_team,
|
||||
["list", "%s" % orgname, "--json-output"],
|
||||
)
|
||||
validate_cliresult(result)
|
||||
assert second_username in result.output
|
||||
|
||||
|
||||
def test_team_remove(clirunner, validate_cliresult, isolated_pio_core):
|
||||
result = clirunner.invoke(
|
||||
cmd_team, ["remove", "%s:%s" % (orgname, teamname), second_username],
|
||||
cmd_team,
|
||||
["remove", "%s:%s" % (orgname, teamname), second_username],
|
||||
)
|
||||
validate_cliresult(result)
|
||||
|
||||
result = clirunner.invoke(cmd_team, ["list", "%s" % orgname, "--json-output"],)
|
||||
result = clirunner.invoke(
|
||||
cmd_team,
|
||||
["list", "%s" % orgname, "--json-output"],
|
||||
)
|
||||
validate_cliresult(result)
|
||||
assert second_username not in result.output
|
||||
|
||||
@ -459,7 +521,10 @@ def test_team_update(clirunner, validate_cliresult, receive_email, isolated_pio_
|
||||
)
|
||||
validate_cliresult(result)
|
||||
|
||||
result = clirunner.invoke(cmd_team, ["list", "%s" % orgname, "--json-output"],)
|
||||
result = clirunner.invoke(
|
||||
cmd_team,
|
||||
["list", "%s" % orgname, "--json-output"],
|
||||
)
|
||||
validate_cliresult(result)
|
||||
json_result = json.loads(result.output.strip())
|
||||
for item in json_result:
|
||||
|
@ -446,7 +446,10 @@ int main() {
|
||||
result = clirunner.invoke(cmd_check, ["--project-dir", str(tmpdir)])
|
||||
validate_cliresult(result)
|
||||
defects = sum(count_defects(result.output))
|
||||
assert defects > 0, "Failed %s with %s" % (framework, tool,)
|
||||
assert defects > 0, "Failed %s with %s" % (
|
||||
framework,
|
||||
tool,
|
||||
)
|
||||
|
||||
|
||||
def test_check_skip_includes_from_packages(clirunner, validate_cliresult, tmpdir):
|
||||
|
@ -339,14 +339,17 @@ def test_lib_stats(clirunner, validate_cliresult):
|
||||
|
||||
result = clirunner.invoke(cmd_lib, ["stats", "--json-output"])
|
||||
validate_cliresult(result)
|
||||
assert set(
|
||||
[
|
||||
"dlweek",
|
||||
"added",
|
||||
"updated",
|
||||
"topkeywords",
|
||||
"dlmonth",
|
||||
"dlday",
|
||||
"lastkeywords",
|
||||
]
|
||||
) == set(json.loads(result.output).keys())
|
||||
assert (
|
||||
set(
|
||||
[
|
||||
"dlweek",
|
||||
"added",
|
||||
"updated",
|
||||
"topkeywords",
|
||||
"dlmonth",
|
||||
"dlday",
|
||||
"lastkeywords",
|
||||
]
|
||||
)
|
||||
== set(json.loads(result.output).keys())
|
||||
)
|
||||
|
@ -60,7 +60,8 @@ def test_install_unknown_from_registry(clirunner):
|
||||
|
||||
def test_install_core_3_dev_platform(clirunner, validate_cliresult, isolated_pio_core):
|
||||
result = clirunner.invoke(
|
||||
cli_platform.platform_install, ["atmelavr@1.2.0", "--skip-default-package"],
|
||||
cli_platform.platform_install,
|
||||
["atmelavr@1.2.0", "--skip-default-package"],
|
||||
)
|
||||
assert result.exit_code == 0
|
||||
|
||||
|
@ -77,7 +77,8 @@ void loop() {}
|
||||
)
|
||||
|
||||
result = clirunner.invoke(
|
||||
cmd_test, ["-d", str(project_dir), "--without-testing", "--without-uploading"],
|
||||
cmd_test,
|
||||
["-d", str(project_dir), "--without-testing", "--without-uploading"],
|
||||
)
|
||||
|
||||
validate_cliresult(result)
|
||||
@ -127,7 +128,8 @@ int main() {
|
||||
)
|
||||
|
||||
native_result = clirunner.invoke(
|
||||
cmd_test, ["-d", str(project_dir), "-e", "native"],
|
||||
cmd_test,
|
||||
["-d", str(project_dir), "-e", "native"],
|
||||
)
|
||||
|
||||
test_dir.join("unittest_transport.h").write(
|
||||
|
@ -21,6 +21,7 @@ import pytest
|
||||
import semantic_version
|
||||
|
||||
from platformio import fs, util
|
||||
from platformio.compat import PY2
|
||||
from platformio.package.exception import (
|
||||
MissingPackageManifestError,
|
||||
UnknownPackageError,
|
||||
@ -144,6 +145,7 @@ def test_build_metadata(isolated_pio_core, tmpdir_factory):
|
||||
assert metadata.version.build[1] == vcs_revision
|
||||
|
||||
|
||||
@pytest.mark.skipif(PY2, reason="Requires Python 3.5 or higher")
|
||||
def test_install_from_url(isolated_pio_core, tmpdir_factory):
|
||||
tmp_dir = tmpdir_factory.mktemp("tmp")
|
||||
storage_dir = tmpdir_factory.mktemp("storage")
|
||||
@ -230,6 +232,41 @@ def test_install_from_registry(isolated_pio_core, tmpdir_factory):
|
||||
tm.install("owner/unknown-package-tool", silent=True)
|
||||
|
||||
|
||||
def test_install_lib_depndencies(isolated_pio_core, tmpdir_factory):
|
||||
tmp_dir = tmpdir_factory.mktemp("tmp")
|
||||
|
||||
src_dir = tmp_dir.join("lib-with-deps").mkdir()
|
||||
root_dir = src_dir.mkdir("root")
|
||||
root_dir.mkdir("src").join("main.cpp").write("#include <stdio.h>")
|
||||
root_dir.join("library.json").write(
|
||||
"""
|
||||
{
|
||||
"name": "lib-with-deps",
|
||||
"version": "2.0.0",
|
||||
"dependencies": [
|
||||
{
|
||||
"owner": "bblanchon",
|
||||
"name": "ArduinoJson",
|
||||
"version": "^6.16.1"
|
||||
},
|
||||
{
|
||||
"name": "external-repo",
|
||||
"version": "https://github.com/milesburton/Arduino-Temperature-Control-Library.git#4a0ccc1"
|
||||
}
|
||||
]
|
||||
}
|
||||
"""
|
||||
)
|
||||
|
||||
lm = LibraryPackageManager(str(tmpdir_factory.mktemp("lib-storage")))
|
||||
lm.install("file://%s" % str(src_dir), silent=True)
|
||||
installed = lm.get_installed()
|
||||
assert len(installed) == 4
|
||||
assert set(["external-repo", "ArduinoJson", "lib-with-deps", "OneWire"]) == set(
|
||||
p.metadata.name for p in installed
|
||||
)
|
||||
|
||||
|
||||
def test_install_force(isolated_pio_core, tmpdir_factory):
|
||||
lm = LibraryPackageManager(str(tmpdir_factory.mktemp("lib-storage")))
|
||||
# install #64 ArduinoJson
|
||||
|
@ -44,7 +44,7 @@ def test_library_json_parser():
|
||||
"dependencies": {
|
||||
"deps1": "1.2.0",
|
||||
"deps2": "https://github.com/username/package.git",
|
||||
"@owner/deps3": "^2.1.3"
|
||||
"owner/deps3": "^2.1.3"
|
||||
},
|
||||
"customField": "Custom Value"
|
||||
}
|
||||
@ -65,9 +65,9 @@ def test_library_json_parser():
|
||||
"homepage": "http://old.url.format",
|
||||
"build": {"flags": ["-DHELLO"]},
|
||||
"dependencies": [
|
||||
{"name": "@owner/deps3", "version": "^2.1.3"},
|
||||
{"name": "deps1", "version": "1.2.0"},
|
||||
{"name": "deps2", "version": "https://github.com/username/package.git"},
|
||||
{"owner": "owner", "name": "deps3", "version": "^2.1.3"},
|
||||
],
|
||||
"customField": "Custom Value",
|
||||
},
|
||||
@ -83,7 +83,7 @@ def test_library_json_parser():
|
||||
},
|
||||
"dependencies": [
|
||||
{"name": "deps1", "version": "1.0.0"},
|
||||
{"name": "@owner/deps2", "version": "1.0.0", "platforms": "*", "frameworks": "arduino, espidf"},
|
||||
{"owner": "owner", "name": "deps2", "version": "1.0.0", "platforms": "*", "frameworks": "arduino, espidf"},
|
||||
{"name": "deps3", "version": "1.0.0", "platforms": ["ststm32", "sifive"]}
|
||||
]
|
||||
}
|
||||
@ -98,13 +98,14 @@ def test_library_json_parser():
|
||||
"export": {"exclude": ["audio_samples"]},
|
||||
"platforms": ["atmelavr"],
|
||||
"dependencies": [
|
||||
{"name": "deps1", "version": "1.0.0"},
|
||||
{
|
||||
"name": "@owner/deps2",
|
||||
"owner": "owner",
|
||||
"name": "deps2",
|
||||
"version": "1.0.0",
|
||||
"platforms": ["*"],
|
||||
"frameworks": ["arduino", "espidf"],
|
||||
},
|
||||
{"name": "deps1", "version": "1.0.0"},
|
||||
{
|
||||
"name": "deps3",
|
||||
"version": "1.0.0",
|
||||
@ -115,16 +116,16 @@ def test_library_json_parser():
|
||||
)
|
||||
|
||||
raw_data = parser.LibraryJsonManifestParser(
|
||||
'{"dependencies": ["dep1", "dep2", "@owner/dep3"]}'
|
||||
'{"dependencies": ["dep1", "dep2", "owner/dep3@1.2.3"]}'
|
||||
).as_dict()
|
||||
raw_data["dependencies"] = sorted(raw_data["dependencies"], key=lambda a: a["name"])
|
||||
assert not jsondiff.diff(
|
||||
raw_data,
|
||||
{
|
||||
"dependencies": [
|
||||
{"name": "@owner/dep3"},
|
||||
{"name": "dep1"},
|
||||
{"name": "dep2"},
|
||||
{"name": "owner/dep3@1.2.3"},
|
||||
],
|
||||
},
|
||||
)
|
||||
|
@ -90,12 +90,13 @@ def test_spec_local_urls(tmpdir_factory):
|
||||
assert PackageSpec("file:///tmp/some-lib/") == PackageSpec(
|
||||
url="file:///tmp/some-lib/", name="some-lib"
|
||||
)
|
||||
assert PackageSpec("file:///tmp/foo.tar.gz@~2.3.0-beta.1") == PackageSpec(
|
||||
url="file:///tmp/foo.tar.gz", name="foo", requirements="~2.3.0-beta.1"
|
||||
# detached package
|
||||
assert PackageSpec("file:///tmp/some-lib@src-67e1043a673d2") == PackageSpec(
|
||||
url="file:///tmp/some-lib@src-67e1043a673d2", name="some-lib"
|
||||
)
|
||||
# detached folder with "@" symbol
|
||||
# detached folder without scheme
|
||||
pkg_dir = tmpdir_factory.mktemp("storage").join("detached@1.2.3").mkdir()
|
||||
assert PackageSpec("file://%s" % str(pkg_dir)) == PackageSpec(
|
||||
assert PackageSpec(str(pkg_dir)) == PackageSpec(
|
||||
name="detached", url="file://%s" % pkg_dir
|
||||
)
|
||||
|
||||
|
@ -19,10 +19,12 @@ import tarfile
|
||||
import pytest
|
||||
|
||||
from platformio import fs
|
||||
from platformio.compat import WINDOWS
|
||||
from platformio.compat import PY2, WINDOWS
|
||||
from platformio.package.exception import UnknownManifestError
|
||||
from platformio.package.pack import PackagePacker
|
||||
|
||||
pytestmark = pytest.mark.skipif(PY2, reason="Requires Python 3.5 or higher")
|
||||
|
||||
|
||||
def test_base(tmpdir_factory):
|
||||
pkg_dir = tmpdir_factory.mktemp("package")
|
||||
|
@ -12,10 +12,9 @@
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import os
|
||||
import random
|
||||
from glob import glob
|
||||
from os import listdir, walk
|
||||
from os.path import basename, dirname, getsize, isdir, isfile, join, normpath
|
||||
|
||||
import pytest
|
||||
|
||||
@ -32,24 +31,26 @@ def pytest_generate_tests(metafunc):
|
||||
examples_dirs = []
|
||||
|
||||
# repo examples
|
||||
examples_dirs.append(normpath(join(dirname(__file__), "..", "examples")))
|
||||
examples_dirs.append(
|
||||
os.path.normpath(os.path.join(os.path.dirname(__file__), "..", "examples"))
|
||||
)
|
||||
|
||||
# dev/platforms
|
||||
for pkg in PlatformPackageManager().get_installed():
|
||||
p = PlatformFactory.new(pkg)
|
||||
examples_dir = join(p.get_dir(), "examples")
|
||||
assert isdir(examples_dir)
|
||||
examples_dirs.append(examples_dir)
|
||||
examples_dir = os.path.join(p.get_dir(), "examples")
|
||||
if os.path.isdir(examples_dir):
|
||||
examples_dirs.append(examples_dir)
|
||||
|
||||
project_dirs = []
|
||||
for examples_dir in examples_dirs:
|
||||
candidates = {}
|
||||
for root, _, files in walk(examples_dir):
|
||||
for root, _, files in os.walk(examples_dir):
|
||||
if "platformio.ini" not in files or ".skiptest" in files:
|
||||
continue
|
||||
if "zephyr-" in root and PY2:
|
||||
continue
|
||||
group = basename(root)
|
||||
group = os.path.basename(root)
|
||||
if "-" in group:
|
||||
group = group.split("-", 1)[0]
|
||||
if group not in candidates:
|
||||
@ -67,7 +68,7 @@ def test_run(pioproject_dir):
|
||||
with fs.cd(pioproject_dir):
|
||||
config = ProjectConfig()
|
||||
build_dir = config.get_optional_dir("build")
|
||||
if isdir(build_dir):
|
||||
if os.path.isdir(build_dir):
|
||||
fs.rmtree(build_dir)
|
||||
|
||||
env_names = config.envs()
|
||||
@ -77,18 +78,18 @@ def test_run(pioproject_dir):
|
||||
if result["returncode"] != 0:
|
||||
pytest.fail(str(result))
|
||||
|
||||
assert isdir(build_dir)
|
||||
assert os.path.isdir(build_dir)
|
||||
|
||||
# check .elf file
|
||||
for item in listdir(build_dir):
|
||||
if not isdir(item):
|
||||
for item in os.listdir(build_dir):
|
||||
if not os.path.isdir(item):
|
||||
continue
|
||||
assert isfile(join(build_dir, item, "firmware.elf"))
|
||||
assert os.path.isfile(os.path.join(build_dir, item, "firmware.elf"))
|
||||
# check .hex or .bin files
|
||||
firmwares = []
|
||||
for ext in ("bin", "hex"):
|
||||
firmwares += glob(join(build_dir, item, "firmware*.%s" % ext))
|
||||
firmwares += glob(os.path.join(build_dir, item, "firmware*.%s" % ext))
|
||||
if not firmwares:
|
||||
pytest.fail("Missed firmware file")
|
||||
for firmware in firmwares:
|
||||
assert getsize(firmware) > 0
|
||||
assert os.path.getsize(firmware) > 0
|
||||
|
Reference in New Issue
Block a user